How to use the nlp.model.qnmodel.QuasiNewtonModel function in nlp

To help you get started, we’ve selected a few nlp examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github PythonOptimizers / NLP.py / nlp / model / pysparsemodel.py View on Github external
Useful to obtain constraint matrix when problem is a linear programming
            problem.
            """
            vals, rows, cols = super(PySparseAmplModel,
                                     self).A(*args, **kwargs)
            A = psp(nrow=self.ncon, ncol=self.nvar,
                    sizeHint=vals.size, symmetric=False)
            A.put(vals, rows, cols)
            return A

        def jop(self, *args, **kwargs):
            """Obtain Jacobian at x as a linear operator."""
            return PysparseLinearOperator(self.jac(*args, **kwargs))

    class QnPySparseAmplModel(QuasiNewtonModel, PySparseAmplModel):
        pass

except:
    pass


class PySparseSlackModel(SlackModel):
    """SlackModel in wich matrices are PySparse matrices.

    :keywords:
        :model:  Original model to be transformed into a slack form.

    """

    def __init__(self, model, **kwargs):
        if not isinstance(model, PySparseNLPModel):
github PythonOptimizers / NLP.py / nlp / model / amplmodel.py View on Github external
def display_basic_info(self):
        """Display vital statistics about the current model."""
        super(AmplModel, self).display_basic_info()

        # Display info that wasn't available in NLPModel.
        write = self.logger.info
        write('Number of nonzeros in Jacobian: %d\n' % self.nnzj)
        write('Number of nonzeros in Lagrangian Hessian: %d\n' % self.nnzh)
        if self.islp():
            write('This problem is a linear program.\n')

        return


class QNAmplModel(QuasiNewtonModel, AmplModel):
    """AMPL model with quasi-Newton Hessian approximation."""
    pass  # All the work is done by the parent classes.
github PythonOptimizers / NLP.py / nlp / model / scipymodel.py View on Github external
def hess(self, x, z=None, *args, **kwargs):
        """Evaluate Lagrangian Hessian at (x, z)."""
        model = self.model
        if isinstance(model, QuasiNewtonModel):
            return self.hop(x, z, *args, **kwargs)

        if z is None:
            z = np.zeros(self.m)

        H = model.hess(x, z, **kwargs)
        vals = H.data
        rows = H.row
        cols = H.col
        return sp.coo_matrix((vals, (rows, cols)),
                             shape=(self.nvar, self.nvar))
github PythonOptimizers / NLP.py / nlp / model / adolcmodel.py View on Github external
def jac(self, *args, **kwargs):
            """Evaluate sparse constraints Jacobian."""
            if self.ncon == 0:  # SciPy cannot create sparse matrix of size 0.
                return linop_from_ndarray(np.empty((0, self.nvar),
                                                   dtype=np.float))
            vals, rows, cols = super(SciPyAdolcModel, self).jac(*args,
                                                                **kwargs)
            return sp.coo_matrix((vals, (rows, cols)),
                                 shape=(self.ncon, self.nvar))

except:
    pass


class QNAdolcModel(QuasiNewtonModel, SparseAdolcModel):
    """`AdolcModel` with quasi-Newton Hessian approximation."""

    pass  # All the work is done by the parent classes.
github PythonOptimizers / NLP.py / nlp / model / qnmodel.py View on Github external
def __init__(self, *args, **kwargs):
        """Instantiate a model with quasi-Newton Hessian approximation.

        :keywords:
            :H: the `class` of a quasi-Newton linear operator.
                This keyword is mandatory.

        Keywords accepted by the quasi-Newton class will be passed
        directly to its constructor.
        """
        super(QuasiNewtonModel, self).__init__(*args, **kwargs)
        qn_cls = kwargs.pop('H')
        self._H = qn_cls(self.nvar, **kwargs)
github PythonOptimizers / NLP.py / nlp / model / pysparsemodel.py View on Github external
def hess(self, x, z=None, obj_num=0, *args, **kwargs):
        """Evaluate Lagrangian Hessian at (x, z)."""
        model = self.model
        if isinstance(model, QuasiNewtonModel):
            return self.hop(x, z, *args, **kwargs)

        if z is None:
            z = np.zeros(self.m)

        # Create some shortcuts for convenience
        model = self.model
        on = self.original_n

        H = psp(nrow=self.n, ncol=self.n, symmetric=True,
                sizeHint=self.model.nnzh)
        H[:on, :on] = model.hess(x[:on], z, obj_num, *args, **kwargs)
        return H
github PythonOptimizers / NLP.py / nlp / optimize / regsqp / counterfeitamplmodel.py View on Github external
return LinearOperator(self.n, self.m,
                              lambda v: self.jprod(x, v),
                              matvec_transp=lambda u: self.jtprod(x, u),
                              symmetric=False,
                              dtype=np.float)

    def jprod(self, x, p, **kwargs):
        """Evaluate Jacobian-vector product at x with p."""
        return self.jac(x, **kwargs) * p

    def jtprod(self, x, p, **kwargs):
        """Evaluate transposed-Jacobian-vector product at x with p."""
        return p * self.jac(x, **kwargs)


class QNCounterFeitAmplModel(QuasiNewtonModel, CounterFeitAmplModel):
    """Counterfeit AMPL model with quasi-Newton Hessian approximation."""
    pass  # All the work is done by the parent classes.
github PythonOptimizers / NLP.py / nlp / model / augmented_lagrangian.py View on Github external
rangeC = orig_model.rangeC
        cons = orig_model.cons(x)
        d = np.zeros(self.n)

        # Compute the direction to the optimal slacks from the current point.
        d[sL] = (cons[lowerC] - self.pi[lowerC]/self.penalty) - x[sL]
        d[sU] = (cons[upperC] - self.pi[upperC]/self.penalty) - x[sU]
        d[sR] = (cons[rangeC] - self.pi[rangeC]/self.penalty) - x[sR]

        # Compute the updated x and the step using projection
        x = project(x + d, model.Lvar, model.Uvar)
        m_step = projected_step(x, d, model.Lvar, model.Uvar)
        return (x, m_step)


class QuasiNewtonAugmentedLagrangian(QuasiNewtonModel, AugmentedLagrangian):
    """Bound-constrained augmented Lagrangian with quasi-Newton Hessian.

    In instances of this class, the quasi-Newton Hessian approximates the
    Hessian of the augmented Lagrangian as a whole.

    If the quasi-Newton Hessian should approximate only the Hessian of the
    Lagrangian, consider an initialization of the form

            AugmentedLagrangian(QuasiNewtonModel(...))
    """

    pass  # All the work is done by the parent classes.
github PythonOptimizers / NLP.py / nlp / model / cysparsemodel.py View on Github external
def hess(self, x, z=None, *args, **kwargs):
        """Evaluate Lagrangian Hessian at (x, z)."""
        model = self.model
        if isinstance(model, QuasiNewtonModel):
            return self.hop(x, z, *args, **kwargs)

        if z is None:
            z = np.zeros(self.m)

        on = model.n

        H = LLSparseMatrix(size=self.nvar, size_hint=self.model.nnzh,
                           store_symmetric=True, itype=types.INT64_T,
                           dtype=types.FLOAT64_T)
        H[:on, :on] = self.model.hess(x[:on], z, *args, **kwargs)
        return H
github PythonOptimizers / NLP.py / nlp / optimize / cqp.py View on Github external
def hess(self, x, z=None, *args, **kwargs):
        """Evaluate Lagrangian Hessian at (x, z)."""
        model = self.model
        if isinstance(model, QuasiNewtonModel):
            return self.hop(x, z, *args, **kwargs)

        on = model.n
        pi = self.convert_multipliers(z)
        H = PysparseMatrix(nrow=self.n, ncol=self.n, symmetric=True,
                           sizeHint=self.model.nnzh)
        H[:on, :on] = self.model.hess(x[:on], pi, *args, **kwargs)
        return H