Source code for openturns.optim

# This file was automatically generated by SWIG (https://www.swig.org).
# Version 4.2.1
#
# Do not make changes to this file unless you know what you are doing - modify
# the SWIG interface file instead.

"""Optimization routines."""

from sys import version_info as _swig_python_version_info
# Import the low-level C/C++ module
if __package__ or "." in __name__:
    from . import _optim
else:
    import _optim

try:
    import builtins as __builtin__
except ImportError:
    import __builtin__

def _swig_repr(self):
    try:
        strthis = "proxy of " + self.this.__repr__()
    except __builtin__.Exception:
        strthis = ""
    return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)


def _swig_setattr_nondynamic_instance_variable(set):
    def set_instance_attr(self, name, value):
        if name == "this":
            set(self, name, value)
        elif name == "thisown":
            self.this.own(value)
        elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
            set(self, name, value)
        else:
            raise AttributeError("You cannot add instance attributes to %s" % self)
    return set_instance_attr


def _swig_setattr_nondynamic_class_variable(set):
    def set_class_attr(cls, name, value):
        if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
            set(cls, name, value)
        else:
            raise AttributeError("You cannot add class attributes to %s" % cls)
    return set_class_attr


def _swig_add_metaclass(metaclass):
    """Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
    def wrapper(cls):
        return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
    return wrapper


class _SwigNonDynamicMeta(type):
    """Meta class to enforce nondynamic attributes (no new attributes) for a class"""
    __setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)


class SwigPyIterator(object):
    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def __init__(self, *args, **kwargs):
        raise AttributeError("No constructor defined - class is abstract")
    __repr__ = _swig_repr
    __swig_destroy__ = _optim.delete_SwigPyIterator

    def value(self):
        return _optim.SwigPyIterator_value(self)

    def incr(self, n=1):
        return _optim.SwigPyIterator_incr(self, n)

    def decr(self, n=1):
        return _optim.SwigPyIterator_decr(self, n)

    def distance(self, x):
        return _optim.SwigPyIterator_distance(self, x)

    def equal(self, x):
        return _optim.SwigPyIterator_equal(self, x)

    def copy(self):
        return _optim.SwigPyIterator_copy(self)

    def next(self):
        return _optim.SwigPyIterator_next(self)

    def __next__(self):
        return _optim.SwigPyIterator___next__(self)

    def previous(self):
        return _optim.SwigPyIterator_previous(self)

    def advance(self, n):
        return _optim.SwigPyIterator_advance(self, n)

    def __eq__(self, x):
        return _optim.SwigPyIterator___eq__(self, x)

    def __ne__(self, x):
        return _optim.SwigPyIterator___ne__(self, x)

    def __iadd__(self, n):
        return _optim.SwigPyIterator___iadd__(self, n)

    def __isub__(self, n):
        return _optim.SwigPyIterator___isub__(self, n)

    def __add__(self, n):
        return _optim.SwigPyIterator___add__(self, n)

    def __sub__(self, *args):
        return _optim.SwigPyIterator___sub__(self, *args)
    def __iter__(self):
        return self

# Register SwigPyIterator in _optim:
_optim.SwigPyIterator_swigregister(SwigPyIterator)
import openturns.common
import openturns.typ
import openturns.statistics
import openturns.graph
import openturns.func
import openturns.geom
import openturns.diff
import openturns.experiment
class OptimizationProblemImplementation(openturns.common.PersistentObject):
    r"""
    Base class to define an optimization problem.

    This represents a general optimization problem:

    .. math::

        \min_{x\in B} f(x) \\
        g(x) = 0 \\
        h(x) \ge 0

    where *B* is problem's bounds, *f* is the objective function, *g* are equality constraints, and *h* are inequality constraints.

    Available constructors:
        OptimizationProblem(*objective*)

        OptimizationProblem(*objective, equality, inequality, bounds*)

    Parameters
    ----------
    objective : :class:`~openturns.Function`
        Objective function. Additional constraints and bounds must always be
        consistent with the objective input dimension.
    equality : :class:`~openturns.Function`
        Equality constraints.
    inequality : :class:`~openturns.Function`
        Inequality constraints.
    bounds : :class:`~openturns.Interval`
        Bounds.

    Examples
    --------

    Define an optimization problem to find the minimum of the Rosenbrock function:

    >>> import openturns as ot
    >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
    >>> problem = ot.OptimizationProblem(rosenbrock)
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.OptimizationProblemImplementation_getClassName(self)
    CONTINUOUS = _optim.OptimizationProblemImplementation_CONTINUOUS
    BINARY = _optim.OptimizationProblemImplementation_BINARY
    INTEGER = _optim.OptimizationProblemImplementation_INTEGER

    def getObjective(self):
        r"""
        Accessor to objective function.

        Returns
        -------
        objective : :class:`~openturns.Function`
            Objective function.
        """
        return _optim.OptimizationProblemImplementation_getObjective(self)

    def setObjective(self, objective):
        r"""
        Accessor to objective function.

        Parameters
        ----------
        objectiveFunction : :class:`~openturns.Function`
            Objective function.

        Notes
        -----
        Constraints and bounds are cleared if the objective has a different input
        dimension in order to keep the problem valid at all time.
        """
        return _optim.OptimizationProblemImplementation_setObjective(self, objective)

    def hasMultipleObjective(self):
        r"""
        Test whether objective function is a scalar or vector function.

        Returns
        -------
        value : bool
            *False* if objective function is scalar, *True* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasMultipleObjective(self)

    def getEqualityConstraint(self):
        r"""
        Accessor to equality constraints.

        Returns
        -------
        equality : :class:`~openturns.Function`
            Describe equality constraints.
        """
        return _optim.OptimizationProblemImplementation_getEqualityConstraint(self)

    def setEqualityConstraint(self, equalityConstraint):
        r"""
        Accessor to equality constraints.

        Parameters
        ----------
        equalityConstraint : :class:`~openturns.Function`
            Equality constraints.
        """
        return _optim.OptimizationProblemImplementation_setEqualityConstraint(self, equalityConstraint)

    def hasEqualityConstraint(self):
        r"""
        Test whether equality constraints had been specified.

        Returns
        -------
        value : bool
            *True* if equality constraints had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasEqualityConstraint(self)

    def getInequalityConstraint(self):
        r"""
        Accessor to inequality constraints.

        Returns
        -------
        inequality : :class:`~openturns.Function`
            Describe inequality constraints.
        """
        return _optim.OptimizationProblemImplementation_getInequalityConstraint(self)

    def setInequalityConstraint(self, inequalityConstraint):
        r"""
        Accessor to inequality constraints.

        Parameters
        ----------
        inequalityConstraint : :class:`~openturns.Function`
            Inequality constraints.
        """
        return _optim.OptimizationProblemImplementation_setInequalityConstraint(self, inequalityConstraint)

    def hasInequalityConstraint(self):
        r"""
        Test whether inequality constraints had been specified.

        Returns
        -------
        value : bool
            *True* if inequality constraints had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasInequalityConstraint(self)

    def getBounds(self):
        r"""
        Accessor to bounds.

        Returns
        -------
        bounds : :class:`~openturns.Interval`
            Problem bounds.
        """
        return _optim.OptimizationProblemImplementation_getBounds(self)

    def setBounds(self, bounds):
        r"""
        Accessor to bounds.

        Parameters
        ----------
        bounds : :class:`~openturns.Interval`
            Problem bounds.
        """
        return _optim.OptimizationProblemImplementation_setBounds(self, bounds)

    def hasBounds(self):
        r"""
        Test whether bounds had been specified.

        Returns
        -------
        value : bool
            *True* if bounds had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasBounds(self)

    def getLevelFunction(self):
        r"""
        Accessor to level function.

        Returns
        -------
        level : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblemImplementation_getLevelFunction(self)

    def setLevelFunction(self, levelFunction):
        r"""
        Accessor to level function.

        Parameters
        ----------
        levelFunction : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblemImplementation_setLevelFunction(self, levelFunction)

    def hasLevelFunction(self):
        r"""
        Test whether level function had been specified.

        Returns
        -------
        value : bool
            *True* if level function had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasLevelFunction(self)

    def getLevelValue(self):
        r"""
        Accessor to level value.

        Returns
        -------
        value : float
            Level value.
        """
        return _optim.OptimizationProblemImplementation_getLevelValue(self)

    def setLevelValue(self, levelValue):
        r"""
        Accessor to level value.

        Parameters
        ----------
        levelValue : float
            Level value.
        """
        return _optim.OptimizationProblemImplementation_setLevelValue(self, levelValue)

    def getResidualFunction(self):
        r"""
        Accessor to level function.

        Returns
        -------
        level : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblemImplementation_getResidualFunction(self)

    def setResidualFunction(self, residualFunction):
        r"""
        Accessor to level function.

        Parameters
        ----------
        levelFunction : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblemImplementation_setResidualFunction(self, residualFunction)

    def hasResidualFunction(self):
        r"""
        Test whether a least-square problem is defined.

        Returns
        -------
        value : bool
            *True* if this is a least-squares problem, *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_hasResidualFunction(self)

    def getDimension(self):
        r"""
        Accessor to input dimension.

        Returns
        -------
        dimension : int
            Input dimension of objective function.
        """
        return _optim.OptimizationProblemImplementation_getDimension(self)

    def setMinimization(self, minimization, marginalIndex=0):
        r"""
        Tell whether this is a minimization or maximization problem.

        Parameters
        ----------
        minimization : bool
            *True* if this is a minimization problem, *False* otherwise.
        marginal_index : int, default=0
            Index of the output marginal (for multi-objective only)
        """
        return _optim.OptimizationProblemImplementation_setMinimization(self, minimization, marginalIndex)

    def isMinimization(self, marginalIndex=0):
        r"""
        Test whether this is a minimization or maximization problem.

        Parameters
        ----------
        marginal_index : int, default=0
            Index of the output marginal (for multi-objective only)

        Returns
        -------
        value : bool
            *True* if this is a minimization problem (default), *False* otherwise.
        """
        return _optim.OptimizationProblemImplementation_isMinimization(self, marginalIndex)

    def setVariablesType(self, variableType):
        r"""
        Accessor to the variables type.

        Parameters
        ----------
        variablesType : :class:`~openturns.Indices`
            Types of the variables.

        Notes
        -----
        Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and `ot.OptimizationProblemImplementation.BINARY`.
        """
        return _optim.OptimizationProblemImplementation_setVariablesType(self, variableType)

    def getVariablesType(self):
        r"""
        Accessor to the variables type.

        Returns
        -------
        variablesType : :class:`~openturns.Indices`
            Types of the variables.

        Notes
        -----
        Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and ot.OptimizationProblemImplementation.`BINARY`.
        """
        return _optim.OptimizationProblemImplementation_getVariablesType(self)

    def isContinuous(self):
        r"""
        Check if the problem is continuous.

        Returns
        -------
        isContinuous : bool
            Returns True if all variables are continuous.
        """
        return _optim.OptimizationProblemImplementation_isContinuous(self)

    def __repr__(self):
        return _optim.OptimizationProblemImplementation___repr__(self)

    def __init__(self, *args):
        _optim.OptimizationProblemImplementation_swiginit(self, _optim.new_OptimizationProblemImplementation(*args))
    __swig_destroy__ = _optim.delete_OptimizationProblemImplementation

# Register OptimizationProblemImplementation in _optim:
_optim.OptimizationProblemImplementation_swigregister(OptimizationProblemImplementation)
class _OptimizationProblemImplementationTypedInterfaceObject(openturns.common.InterfaceObject):
    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
    __repr__ = _swig_repr

    def __init__(self, *args):
        _optim._OptimizationProblemImplementationTypedInterfaceObject_swiginit(self, _optim.new__OptimizationProblemImplementationTypedInterfaceObject(*args))

    def getImplementation(self):
        r"""
        Accessor to the underlying implementation.

        Returns
        -------
        impl : Implementation
            A copy of the underlying implementation object.
        """
        return _optim._OptimizationProblemImplementationTypedInterfaceObject_getImplementation(self)

    def setName(self, name):
        r"""
        Accessor to the object's name.

        Parameters
        ----------
        name : str
            The name of the object.
        """
        return _optim._OptimizationProblemImplementationTypedInterfaceObject_setName(self, name)

    def getName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        name : str
            The name of the object.
        """
        return _optim._OptimizationProblemImplementationTypedInterfaceObject_getName(self)

    def __eq__(self, other):
        return _optim._OptimizationProblemImplementationTypedInterfaceObject___eq__(self, other)

    def __ne__(self, other):
        return _optim._OptimizationProblemImplementationTypedInterfaceObject___ne__(self, other)
    __swig_destroy__ = _optim.delete__OptimizationProblemImplementationTypedInterfaceObject

# Register _OptimizationProblemImplementationTypedInterfaceObject in _optim:
_optim._OptimizationProblemImplementationTypedInterfaceObject_swigregister(_OptimizationProblemImplementationTypedInterfaceObject)
class OptimizationProblem(_OptimizationProblemImplementationTypedInterfaceObject):
    r"""
    Base class to define an optimization problem.

    This represents a general optimization problem:

    .. math::

        \min_{x\in B} f(x) \\
        g(x) = 0 \\
        h(x) \ge 0

    where *B* is problem's bounds, *f* is the objective function, *g* are equality constraints, and *h* are inequality constraints.

    Available constructors:
        OptimizationProblem(*objective*)

        OptimizationProblem(*objective, equality, inequality, bounds*)

    Parameters
    ----------
    objective : :class:`~openturns.Function`
        Objective function. Additional constraints and bounds must always be
        consistent with the objective input dimension.
    equality : :class:`~openturns.Function`
        Equality constraints.
    inequality : :class:`~openturns.Function`
        Inequality constraints.
    bounds : :class:`~openturns.Interval`
        Bounds.

    Examples
    --------

    Define an optimization problem to find the minimum of the Rosenbrock function:

    >>> import openturns as ot
    >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
    >>> problem = ot.OptimizationProblem(rosenbrock)
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.OptimizationProblem_getClassName(self)

    def getObjective(self):
        r"""
        Accessor to objective function.

        Returns
        -------
        objective : :class:`~openturns.Function`
            Objective function.
        """
        return _optim.OptimizationProblem_getObjective(self)

    def setObjective(self, objective):
        r"""
        Accessor to objective function.

        Parameters
        ----------
        objectiveFunction : :class:`~openturns.Function`
            Objective function.

        Notes
        -----
        Constraints and bounds are cleared if the objective has a different input
        dimension in order to keep the problem valid at all time.
        """
        return _optim.OptimizationProblem_setObjective(self, objective)

    def hasMultipleObjective(self):
        r"""
        Test whether objective function is a scalar or vector function.

        Returns
        -------
        value : bool
            *False* if objective function is scalar, *True* otherwise.
        """
        return _optim.OptimizationProblem_hasMultipleObjective(self)

    def getEqualityConstraint(self):
        r"""
        Accessor to equality constraints.

        Returns
        -------
        equality : :class:`~openturns.Function`
            Describe equality constraints.
        """
        return _optim.OptimizationProblem_getEqualityConstraint(self)

    def setEqualityConstraint(self, equalityConstraint):
        r"""
        Accessor to equality constraints.

        Parameters
        ----------
        equalityConstraint : :class:`~openturns.Function`
            Equality constraints.
        """
        return _optim.OptimizationProblem_setEqualityConstraint(self, equalityConstraint)

    def hasEqualityConstraint(self):
        r"""
        Test whether equality constraints had been specified.

        Returns
        -------
        value : bool
            *True* if equality constraints had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblem_hasEqualityConstraint(self)

    def getInequalityConstraint(self):
        r"""
        Accessor to inequality constraints.

        Returns
        -------
        inequality : :class:`~openturns.Function`
            Describe inequality constraints.
        """
        return _optim.OptimizationProblem_getInequalityConstraint(self)

    def setInequalityConstraint(self, inequalityConstraint):
        r"""
        Accessor to inequality constraints.

        Parameters
        ----------
        inequalityConstraint : :class:`~openturns.Function`
            Inequality constraints.
        """
        return _optim.OptimizationProblem_setInequalityConstraint(self, inequalityConstraint)

    def hasInequalityConstraint(self):
        r"""
        Test whether inequality constraints had been specified.

        Returns
        -------
        value : bool
            *True* if inequality constraints had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblem_hasInequalityConstraint(self)

    def getBounds(self):
        r"""
        Accessor to bounds.

        Returns
        -------
        bounds : :class:`~openturns.Interval`
            Problem bounds.
        """
        return _optim.OptimizationProblem_getBounds(self)

    def setBounds(self, bounds):
        r"""
        Accessor to bounds.

        Parameters
        ----------
        bounds : :class:`~openturns.Interval`
            Problem bounds.
        """
        return _optim.OptimizationProblem_setBounds(self, bounds)

    def hasBounds(self):
        r"""
        Test whether bounds had been specified.

        Returns
        -------
        value : bool
            *True* if bounds had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblem_hasBounds(self)

    def getLevelFunction(self):
        r"""
        Accessor to level function.

        Returns
        -------
        level : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblem_getLevelFunction(self)

    def setLevelFunction(self, levelFunction):
        r"""
        Accessor to level function.

        Parameters
        ----------
        levelFunction : :class:`~openturns.Function`
            Level function.
        """
        return _optim.OptimizationProblem_setLevelFunction(self, levelFunction)

    def hasLevelFunction(self):
        r"""
        Test whether level function had been specified.

        Returns
        -------
        value : bool
            *True* if level function had been set for this problem, *False* otherwise.
        """
        return _optim.OptimizationProblem_hasLevelFunction(self)

    def getLevelValue(self):
        r"""
        Accessor to level value.

        Returns
        -------
        value : float
            Level value.
        """
        return _optim.OptimizationProblem_getLevelValue(self)

    def setLevelValue(self, levelValue):
        r"""
        Accessor to level value.

        Parameters
        ----------
        levelValue : float
            Level value.
        """
        return _optim.OptimizationProblem_setLevelValue(self, levelValue)

    def getResidualFunction(self):
        r"""
        Accessor to the residual function.

        Returns
        -------
        residualFunction : :class:`~openturns.Function`
            Residual function.
        """
        return _optim.OptimizationProblem_getResidualFunction(self)

    def setResidualFunction(self, residualFunction):
        r"""
        Accessor to the residual function.

        Parameters
        ----------
        residualFunction : :class:`~openturns.Function`
            Residual function.
        """
        return _optim.OptimizationProblem_setResidualFunction(self, residualFunction)

    def hasResidualFunction(self):
        r"""
        Test whether a least-square problem is defined.

        Returns
        -------
        value : bool
            *True* if this is a least-squares problem, *False* otherwise.
        """
        return _optim.OptimizationProblem_hasResidualFunction(self)

    def getDimension(self):
        r"""
        Accessor to input dimension.

        Returns
        -------
        dimension : int
            Input dimension of objective function.
        """
        return _optim.OptimizationProblem_getDimension(self)

    def setMinimization(self, minimization, marginalIndex=0):
        r"""
        Tell whether this is a minimization or maximization problem.

        Parameters
        ----------
        minimization : bool
            *True* if this is a minimization problem, *False* otherwise.
        marginal_index : int, default=0
            Index of the output marginal (for multi-objective only)
        """
        return _optim.OptimizationProblem_setMinimization(self, minimization, marginalIndex)

    def isMinimization(self, marginalIndex=0):
        r"""
        Test whether this is a minimization or maximization problem.

        Parameters
        ----------
        marginal_index : int, default=0
            Index of the output marginal (for multi-objective only)

        Returns
        -------
        value : bool
            *True* if this is a minimization problem (default), *False* otherwise.
        """
        return _optim.OptimizationProblem_isMinimization(self, marginalIndex)

    def setVariablesType(self, variableType):
        r"""
        Accessor to the variables type.

        Parameters
        ----------
        variablesType : :class:`~openturns.Indices`
            Types of the variables.

        Notes
        -----
        Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and `ot.OptimizationProblemImplementation.BINARY`.
        """
        return _optim.OptimizationProblem_setVariablesType(self, variableType)

    def getVariablesType(self):
        r"""
        Accessor to the variables type.

        Returns
        -------
        variablesType : :class:`~openturns.Indices`
            Types of the variables.

        Notes
        -----
        Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and ot.OptimizationProblemImplementation.`BINARY`.
        """
        return _optim.OptimizationProblem_getVariablesType(self)

    def isContinuous(self):
        r"""
        Check if the problem is continuous.

        Returns
        -------
        isContinuous : bool
            Returns True if all variables are continuous.
        """
        return _optim.OptimizationProblem_isContinuous(self)

    def __repr__(self):
        return _optim.OptimizationProblem___repr__(self)

    def __str__(self, *args):
        return _optim.OptimizationProblem___str__(self, *args)

    def __init__(self, *args):
        _optim.OptimizationProblem_swiginit(self, _optim.new_OptimizationProblem(*args))
    __swig_destroy__ = _optim.delete_OptimizationProblem

# Register OptimizationProblem in _optim:
_optim.OptimizationProblem_swigregister(OptimizationProblem)
class NearestPointProblem(OptimizationProblemImplementation):
    r"""
    Nearest point problem.

    This defines a nearest point problem:

    .. math::

        \min_{x} \frac{1}{2}\|x\|_2^2 \\
        g(x) = v

    where :math:`\| \cdot\|_2` is the euclidian norm. 

    In other words, this is a minimum norm problem 
    with a general nonlinear constraint. 

    Parameters
    ----------
    level : :class:`~openturns.Function`
        The level function :math:`g`.
    value : float
        The level value :math:`v`.

    Examples
    --------

    Define an optimization problem to find the point :math:`(x_1, x_2, x_3, x_4)` with minimum euclidian norm which satisfies :math:`x_1+2x_2-3x_3+4x_4=3`.

    >>> import openturns as ot
    >>> levelFunction = ot.SymbolicFunction(
    ...     ['x1', 'x2', 'x3', 'x4'], ['x1+2*x2-3*x3+4*x4'])
    >>> problem = ot.NearestPointProblem(levelFunction, 3.0)
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.NearestPointProblem_getClassName(self)

    def getLevelFunction(self):
        r"""
        Accessor to level function.

        Returns
        -------
        level : :class:`~openturns.Function`
            Level function.
        """
        return _optim.NearestPointProblem_getLevelFunction(self)

    def setLevelFunction(self, levelFunction):
        r"""
        Accessor to level function.

        Parameters
        ----------
        levelFunction : :class:`~openturns.Function`
            Level function.
        """
        return _optim.NearestPointProblem_setLevelFunction(self, levelFunction)

    def hasLevelFunction(self):
        r"""
        Test whether level function had been specified.

        Returns
        -------
        value : bool
            *True* if level function had been set for this problem, *False* otherwise.
        """
        return _optim.NearestPointProblem_hasLevelFunction(self)

    def getLevelValue(self):
        r"""
        Accessor to level value.

        Returns
        -------
        value : float
            Level value.
        """
        return _optim.NearestPointProblem_getLevelValue(self)

    def setLevelValue(self, levelValue):
        r"""
        Accessor to level value.

        Parameters
        ----------
        levelValue : float
            Level value.
        """
        return _optim.NearestPointProblem_setLevelValue(self, levelValue)

    def __repr__(self):
        return _optim.NearestPointProblem___repr__(self)

    def __init__(self, *args):
        _optim.NearestPointProblem_swiginit(self, _optim.new_NearestPointProblem(*args))
    __swig_destroy__ = _optim.delete_NearestPointProblem

# Register NearestPointProblem in _optim:
_optim.NearestPointProblem_swigregister(NearestPointProblem)
class LeastSquaresProblem(OptimizationProblemImplementation):
    r"""
    Least-squares problem.

    This represents a least-squares problem:

    .. math::

        \min_{x} ||f(\vect{x})||^2_2

    where *f* is the residual function.

    Parameters
    ----------
    residualFunction : :class:`~openturns.Function`
        Residual function.

    Examples
    --------
    >>> import openturns as ot
    >>> residualFunction = ot.SymbolicFunction(['x0', 'x1'], ['10 * (x1 - x0^2)', '0.5 - x0', '0.3 - 2 * x1'])
    >>> problem = ot.LeastSquaresProblem(residualFunction)
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.LeastSquaresProblem_getClassName(self)

    def getResidualFunction(self):
        r"""
        Accessor to level function.

        Returns
        -------
        level : :class:`~openturns.Function`
            Level function.
        """
        return _optim.LeastSquaresProblem_getResidualFunction(self)

    def setResidualFunction(self, residualFunction):
        r"""
        Accessor to level function.

        Parameters
        ----------
        levelFunction : :class:`~openturns.Function`
            Level function.
        """
        return _optim.LeastSquaresProblem_setResidualFunction(self, residualFunction)

    def hasResidualFunction(self):
        r"""
        Test whether a least-square problem is defined.

        Returns
        -------
        value : bool
            *True* if this is a least-squares problem, *False* otherwise.
        """
        return _optim.LeastSquaresProblem_hasResidualFunction(self)

    def __repr__(self):
        return _optim.LeastSquaresProblem___repr__(self)

    def __init__(self, *args):
        _optim.LeastSquaresProblem_swiginit(self, _optim.new_LeastSquaresProblem(*args))
    __swig_destroy__ = _optim.delete_LeastSquaresProblem

# Register LeastSquaresProblem in _optim:
_optim.LeastSquaresProblem_swigregister(LeastSquaresProblem)
class OptimizationResult(openturns.common.PersistentObject):
    r"""
    Optimization result.

    Returned by optimization solvers, see :class:`~openturns.OptimizationAlgorithm`.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Problem being solved.
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.OptimizationResult_getClassName(self)

    def setOptimalPoint(self, optimalPoint):
        r"""
        Accessor to the optimal point.

        Parameters
        ----------
        optimalPoint : :class:`~openturns.Point`
            Optimal point
        """
        return _optim.OptimizationResult_setOptimalPoint(self, optimalPoint)

    def getOptimalPoint(self):
        r"""
        Accessor to the optimal point.

        Returns
        -------
        optimalPoint : :class:`~openturns.Point`
            Optimal point
        """
        return _optim.OptimizationResult_getOptimalPoint(self)

    def setOptimalValue(self, optimalValue):
        r"""
        Accessor to the optimal value.

        Parameters
        ----------
        optimalValue : :class:`~openturns.Point`
            Value at the optimal point
        """
        return _optim.OptimizationResult_setOptimalValue(self, optimalValue)

    def getOptimalValue(self):
        r"""
        Accessor to the optimal value.

        Returns
        -------
        optimalValue : :class:`~openturns.Point`
            Value at the optimal point
        """
        return _optim.OptimizationResult_getOptimalValue(self)

    def setFinalPoints(self, finalPoints):
        r"""
        Accessor to the final points.

        Parameters
        ----------
        finalPoints : :class:`~openturns.Sample`
            Final population
        """
        return _optim.OptimizationResult_setFinalPoints(self, finalPoints)

    def getFinalPoints(self):
        r"""
        Accessor to the final points.

        Returns
        -------
        finalPoints : :class:`~openturns.Sample`
            Final population.
            For non-evolutionary algorithms this will return the optimal point.
        """
        return _optim.OptimizationResult_getFinalPoints(self)

    def setFinalValues(self, finalValues):
        r"""
        Accessor to the final values.

        Parameters
        ----------
        finalValues : :class:`~openturns.Sample`
            Values at the final points
        """
        return _optim.OptimizationResult_setFinalValues(self, finalValues)

    def getFinalValues(self):
        r"""
        Accessor to the final values.

        Returns
        -------
        finalValues : :class:`~openturns.Sample`
            Values at the final points.
            For non-evolutionary algorithms this will return the optimal value.
        """
        return _optim.OptimizationResult_getFinalValues(self)

    def setCallsNumber(self, callsNumber):
        r"""
        Accessor to the number calls.

        Parameters
        ----------
        callsNumber : int
            Number of objective calls.
        """
        return _optim.OptimizationResult_setCallsNumber(self, callsNumber)

    def getCallsNumber(self):
        r"""
        Accessor to the number of calls.

        Returns
        -------
        callsNumber : int
            Number of objective calls.
        """
        return _optim.OptimizationResult_getCallsNumber(self)

    def setIterationNumber(self, iterationNumber):
        r"""
        Accessor to the number of iterations.

        Parameters
        ----------
        iterationNumber : int
            Number of iterations.
        """
        return _optim.OptimizationResult_setIterationNumber(self, iterationNumber)

    def getIterationNumber(self):
        r"""
        Accessor to the number of iterations.

        Returns
        -------
        iterationNumber : int
            Number of iterations.
        """
        return _optim.OptimizationResult_getIterationNumber(self)

    def getAbsoluteError(self):
        r"""
        Accessor to the absolute error.

        Returns
        -------
        absoluteError : float
            Absolute error of the input point :math:`\vect{x}`, defined by :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}` and :math:`\vect{x}_n` are two consecutive approximations of the optimum.
        """
        return _optim.OptimizationResult_getAbsoluteError(self)

    def getAbsoluteErrorHistory(self):
        r"""
        Accessor to the evolution of the absolute error.

        Returns
        -------
        absoluteErrorHistory : :class:`~openturns.Sample`
            Value of the absolute error at each function evaluation.
        """
        return _optim.OptimizationResult_getAbsoluteErrorHistory(self)

    def getRelativeError(self):
        r"""
        Accessor to the relative error.

        Returns
        -------
        relativeError : float
            Relative error of the input point :math:`\vect{x}`. If :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, then the relative error is :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}` where :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` is the absolute error. Otherwise, the relative error is :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationResult_getRelativeError(self)

    def getRelativeErrorHistory(self):
        r"""
        Accessor to the evolution of the relative error.

        Returns
        -------
        relativeErrorHistory : :class:`~openturns.Sample`
            Value of the relative error at each function evaluation.
        """
        return _optim.OptimizationResult_getRelativeErrorHistory(self)

    def getResidualError(self):
        r"""
        Accessor to the residual error.

        Returns
        -------
        residualError : float
            Relative error, defined by :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}` if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationResult_getResidualError(self)

    def getResidualErrorHistory(self):
        r"""
        Accessor to the evolution of the residual error.

        Returns
        -------
        residualErrorHistory : :class:`~openturns.Sample`
            Value of the residual error at each function evaluation.
        """
        return _optim.OptimizationResult_getResidualErrorHistory(self)

    def getConstraintError(self):
        r"""
        Accessor to the constraint error.

        Returns
        -------
        constraintError : float
            Constraint error, defined by :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only).
        """
        return _optim.OptimizationResult_getConstraintError(self)

    def getConstraintErrorHistory(self):
        r"""
        Accessor to the evolution of the constraint error.

        Returns
        -------
        constraintErrorHistory : :class:`~openturns.Sample`
            Value of the constraint error at each function evaluation.
        """
        return _optim.OptimizationResult_getConstraintErrorHistory(self)

    def getInputSample(self):
        r"""
        Accessor to the input sample.

        Returns
        -------
        inputSample : :class:`~openturns.Sample`
            Input points used by the solver
        """
        return _optim.OptimizationResult_getInputSample(self)

    def getOutputSample(self):
        r"""
        Accessor to the output sample.

        Returns
        -------
        outputSample : :class:`~openturns.Sample`
            Output points used by the solver
        """
        return _optim.OptimizationResult_getOutputSample(self)

    def setProblem(self, problem):
        r"""
        Accessor to the underlying optimization problem.

        Parameters
        ----------
        problem : :class:`~openturns.OptimizationProblem`
            Problem corresponding to the result
        """
        return _optim.OptimizationResult_setProblem(self, problem)

    def getProblem(self):
        r"""
        Accessor to the underlying optimization problem.

        Returns
        -------
        problem : :class:`~openturns.OptimizationProblem`
            Problem corresponding to the result
        """
        return _optim.OptimizationResult_getProblem(self)

    def computeLagrangeMultipliers(self, *args):
        r"""
        Compute the Lagrange multipliers.

        Parameters
        ----------
        x : sequence of float, optional
            Location where the multipliers are computed
            If not provided, the optimal point is used

        Returns
        -------
        lagrangeMultiplier : sequence of float
            Lagrange multipliers of the problem at point x.
            It needs an extra call to the objective function gradient unless it can be
            computed during the optimization (AbdoRackwitz or SQP).

        Notes
        -----
        The Lagrange multipliers :math:`\vect{\lambda}` are associated with the following Lagrangian formulation of the optimization problem:

        .. math::

            \cL(\vect{x}, \vect{\lambda}_{eq}, \vect{\lambda}_{\ell}, \vect{\lambda}_{u}, \vect{\lambda}_{ineq}) = J(\vect{x}) + \Tr{\vect{\lambda}}_{eq} g(\vect{x}) + \Tr{\vect{\lambda}}_{\ell} (\vect{x}-\vect{\ell})^{+} + \Tr{\vect{\lambda}}_{u} (\vect{u}-\vect{x})^{+} + \Tr{\vect{\lambda}}_{ineq}  h^{+}(\vect{x})

        where :math:`\vect{\alpha}^{+}=(\max(0,\alpha_1),\hdots,\max(0,\alpha_n))`.

        The Lagrange multipliers are stored as :math:`(\vect{\lambda}_{eq}, \vect{\lambda}_{\ell}, \vect{\lambda}_{u}, \vect{\lambda}_{ineq})`, where:
            - :math:`\vect{\lambda}_{eq}` is of dimension 0 if there is no equality
              constraint, else of dimension the dimension of :math:`g(\vect{x})` ie the number of scalar equality constraints
            - :math:`\vect{\lambda}_{\ell}` and :math:`\vect{\lambda}_{u}` are of dimension 0 if there is no bound constraint, else of dimension of :math:`\vect{x}`
            - :math:`\vect{\lambda}_{eq}` is of dimension 0 if there is no inequality constraint, else of dimension the dimension of :math:`h(\vect{x})` ie the number of scalar inequality constraints

        The vector :math:`\vect{\lambda}` is solution of the following linear system:

        .. math::

            \Tr{\vect{\lambda}}_{eq}\left[\dfrac{\partial g}{\partial\vect{x}}(\vect{x})\right]+
            \Tr{\vect{\lambda}}_{\ell}\left[\dfrac{\partial (\vect{x}-\vect{\ell})^{+}}{\partial\vect{x}}(\vect{x})\right]+
            \Tr{\vect{\lambda}}_{u}\left[\dfrac{\partial (\vect{u}-\vect{x})^{+}}{\partial\vect{x}}(\vect{x})\right]+
            \Tr{\vect{\lambda}}_{ineq}\left[\dfrac{\partial h}{\partial\vect{x}}(\vect{x})\right]=-\dfrac{\partial J}{\partial\vect{x}}(\vect{x})

        If there is no constraint of any kind, :math:`\vect{\lambda}` is of dimension 0, as well as if no constraint is active.
        """
        return _optim.OptimizationResult_computeLagrangeMultipliers(self, *args)

    def __repr__(self):
        return _optim.OptimizationResult___repr__(self)

    def drawErrorHistory(self):
        r"""
        Draw the convergence criteria history.

        Returns
        -------
        graph : :class:`~openturns.Graph`
            Convergence criteria history graph
        """
        return _optim.OptimizationResult_drawErrorHistory(self)

    def drawOptimalValueHistory(self):
        r"""
        Draw the optimal value history.

        Returns
        -------
        graph : :class:`~openturns.Graph`
            Optimal value history graph
        """
        return _optim.OptimizationResult_drawOptimalValueHistory(self)

    def setParetoFrontsIndices(self, indices):
        r"""
        Accessor to the Pareto fronts indices in the final population.

        Parameters
        ----------
        indices : 2-d sequence of int
            Pareto fronts indices
        """
        return _optim.OptimizationResult_setParetoFrontsIndices(self, indices)

    def getParetoFrontsIndices(self):
        r"""
        Accessor to the Pareto fronts indices in the final population.

        In the multi-objective case, it consists of stratas of points in the final population.
        The first front contains the best candidates according to the objectives.

        Returns
        -------
        indices : list of :class:`~openturns.Indices`
            Pareto fronts indices
        """
        return _optim.OptimizationResult_getParetoFrontsIndices(self)

    def setStatusMessage(self, statusMessage):
        r"""
        Accessor to the specialized status string.

        Parameters
        ----------
        statusMessage : str
            Convergence or error message
        """
        return _optim.OptimizationResult_setStatusMessage(self, statusMessage)

    def getStatusMessage(self):
        r"""
        Accessor to the specialized status string.

        Returns
        -------
        statusMessage : str
            Convergence or error message
        """
        return _optim.OptimizationResult_getStatusMessage(self)
    SUCCESS = _optim.OptimizationResult_SUCCESS
    FAILURE = _optim.OptimizationResult_FAILURE
    TIMEOUT = _optim.OptimizationResult_TIMEOUT
    INTERRUPTION = _optim.OptimizationResult_INTERRUPTION
    MAXIMUMCALLS = _optim.OptimizationResult_MAXIMUMCALLS

    def setStatus(self, status):
        r"""
        Accessor to the generic status.

        Parameters
        ----------
        status : int
            Status flag, possible values SUCCESS, FAILURE, TIMEOUT, INTERRUPTION, MAXIMUMCALLS
        """
        return _optim.OptimizationResult_setStatus(self, status)

    def getStatus(self):
        r"""
        Accessor to the generic status.

        Returns
        -------
        status : int
            Status flag, possible values SUCCESS, FAILURE, TIMEOUT, INTERRUPTION, MAXIMUMCALLS
        """
        return _optim.OptimizationResult_getStatus(self)

    def setTimeDuration(self, time):
        r"""
        Accessor to the elapsed time.

        Parameters
        ----------
        time : float
            Optimization duration in seconds
        """
        return _optim.OptimizationResult_setTimeDuration(self, time)

    def getTimeDuration(self):
        r"""
        Accessor to the elapsed time.

        Returns
        -------
        time : float
            Optimization duration in seconds
        """
        return _optim.OptimizationResult_getTimeDuration(self)

    def __init__(self, *args):
        _optim.OptimizationResult_swiginit(self, _optim.new_OptimizationResult(*args))
    __swig_destroy__ = _optim.delete_OptimizationResult

# Register OptimizationResult in _optim:
_optim.OptimizationResult_swigregister(OptimizationResult)
class OptimizationAlgorithmImplementation(openturns.common.PersistentObject):
    r"""
    Base class for optimization wrappers.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Optimization problem.

    Notes
    -----
    Class :class:`~openturns.OptimizationAlgorithm` is an abstract class, which has several implementations.
    The default implementation is :class:`~openturns.Cobyla`

    See also
    --------
    AbdoRackwitz, Cobyla, SQP, TNC, NLopt

    Examples
    --------
    Define an optimization problem to find the minimum of the Rosenbrock function:

    >>> import openturns as ot
    >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
    >>> problem = ot.OptimizationProblem(rosenbrock)
    >>> solver = ot.OptimizationAlgorithm(problem)
    >>> solver.setStartingPoint([0, 0])
    >>> solver.setMaximumResidualError(1.e-3)
    >>> solver.setMaximumCallsNumber(10000)
    >>> solver.run()
    >>> result = solver.getResult()
    >>> x_star = result.getOptimalPoint()
    >>> y_star = result.getOptimalValue()
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.OptimizationAlgorithmImplementation_getClassName(self)

    def __init__(self, *args):
        _optim.OptimizationAlgorithmImplementation_swiginit(self, _optim.new_OptimizationAlgorithmImplementation(*args))

    def run(self):
        r"""Launch the optimization."""
        return _optim.OptimizationAlgorithmImplementation_run(self)

    def getStartingPoint(self):
        r"""
        Accessor to starting point.

        Returns
        -------
        startingPoint : :class:`~openturns.Point`
            Starting point.
        """
        return _optim.OptimizationAlgorithmImplementation_getStartingPoint(self)

    def setStartingPoint(self, startingPoint):
        r"""
        Accessor to starting point.

        Parameters
        ----------
        startingPoint : :class:`~openturns.Point`
            Starting point.
        """
        return _optim.OptimizationAlgorithmImplementation_setStartingPoint(self, startingPoint)

    def getProblem(self):
        r"""
        Accessor to optimization problem.

        Returns
        -------
        problem : :class:`~openturns.OptimizationProblem`
            Optimization problem.
        """
        return _optim.OptimizationAlgorithmImplementation_getProblem(self)

    def setProblem(self, problem):
        r"""
        Accessor to optimization problem.

        Parameters
        ----------
        problem : :class:`~openturns.OptimizationProblem`
            Optimization problem.
        """
        return _optim.OptimizationAlgorithmImplementation_setProblem(self, problem)

    def getResult(self):
        r"""
        Accessor to optimization result.

        Returns
        -------
        result : :class:`~openturns.OptimizationResult`
            Result class.
        """
        return _optim.OptimizationAlgorithmImplementation_getResult(self)

    def setResult(self, result):
        r"""
        Accessor to optimization result.

        Parameters
        ----------
        result : :class:`~openturns.OptimizationResult`
            Result class.
        """
        return _optim.OptimizationAlgorithmImplementation_setResult(self, result)

    def setMaximumIterationNumber(self, maximumIterationNumber):
        r"""
        Accessor to maximum allowed number of iterations.

        Parameters
        ----------
        maximumIterationNumber : int
            Maximum allowed number of iterations.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumIterationNumber(self, maximumIterationNumber)

    def getMaximumIterationNumber(self):
        r"""
        Accessor to maximum allowed number of iterations.

        Returns
        -------
        maximumIterationNumber : int
            Maximum allowed number of iterations.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumIterationNumber(self)

    def setMaximumCallsNumber(self, maximumCallsNumber):
        r"""
        Accessor to maximum allowed number of calls

        Parameters
        ----------
        maximumEvaluationNumber : int
            Maximum allowed number of direct objective function calls through the `()` operator.
            Does not take into account eventual indirect calls through finite difference gradient calls.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumCallsNumber(self, maximumCallsNumber)

    def getMaximumCallsNumber(self):
        r"""
        Accessor to maximum allowed number of calls.

        Returns
        -------
        maximumEvaluationNumber : int
            Maximum allowed number of direct objective function calls through the `()` operator.
            Does not take into account eventual indirect calls through finite difference gradient calls.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumCallsNumber(self)

    def getMaximumAbsoluteError(self):
        r"""
        Accessor to maximum allowed absolute error.

        Returns
        -------
        maximumAbsoluteError : float
            Maximum allowed absolute error, where the absolute error is defined by
            :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}`
            and :math:`\vect{x}_n` are two consecutive approximations of the optimum.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumAbsoluteError(self)

    def setMaximumAbsoluteError(self, maximumAbsoluteError):
        r"""
        Accessor to maximum allowed absolute error.

        Parameters
        ----------
        maximumAbsoluteError : float
            Maximum allowed absolute error, where the absolute error is defined by
            :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}`
            and :math:`\vect{x}_n` are two consecutive approximations of the optimum.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumAbsoluteError(self, maximumAbsoluteError)

    def getMaximumRelativeError(self):
        r"""
        Accessor to maximum allowed relative error.

        Returns
        -------
        maximumRelativeError : float
            Maximum allowed relative error, where the relative error is defined by
            :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}`
            if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumRelativeError(self)

    def setMaximumRelativeError(self, maximumRelativeError):
        r"""
        Accessor to maximum allowed relative error.

        Parameters
        ----------
        maximumRelativeError : float
            Maximum allowed relative error, where the relative error is defined by
            :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}`
            if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumRelativeError(self, maximumRelativeError)

    def getMaximumResidualError(self):
        r"""
        Accessor to maximum allowed residual error.

        Returns
        -------
        maximumResidualError : float
            Maximum allowed residual error, where the residual error is defined by
            :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}`
            if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumResidualError(self)

    def setMaximumResidualError(self, maximumResidualError):
        r"""
        Accessor to maximum allowed residual error.

        Parameters
        ----------
        maximumResidualError : float
            Maximum allowed residual error, where the residual error is defined by
            :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}`
            if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumResidualError(self, maximumResidualError)

    def getMaximumConstraintError(self):
        r"""
        Accessor to maximum allowed constraint error.

        Returns
        -------
        maximumConstraintError : float
            Maximum allowed constraint error, where the constraint error is defined by
            :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only)
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumConstraintError(self)

    def setMaximumConstraintError(self, maximumConstraintError):
        r"""
        Accessor to maximum allowed constraint error.

        Parameters
        ----------
        maximumConstraintError : float
            Maximum allowed constraint error, where the constraint error is defined by
            :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only)
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumConstraintError(self, maximumConstraintError)

    def setMaximumTimeDuration(self, maximumTime):
        r"""
        Accessor to the maximum duration.

        Parameters
        ----------
        maximumTime : float
            Maximum optimization duration in seconds.
        """
        return _optim.OptimizationAlgorithmImplementation_setMaximumTimeDuration(self, maximumTime)

    def getMaximumTimeDuration(self):
        r"""
        Accessor to the maximum duration.

        Returns
        -------
        maximumTime : float
            Maximum optimization duration in seconds.
        """
        return _optim.OptimizationAlgorithmImplementation_getMaximumTimeDuration(self)

    def __repr__(self):
        return _optim.OptimizationAlgorithmImplementation___repr__(self)

    def setCheckStatus(self, checkStatus):
        r"""
        Accessor to check status flag.

        Parameters
        ----------
        checkStatus : bool
            Whether to check the termination status.
            If set to **False**, :meth:`run` will not throw an exception if the algorithm
            does not fully converge and will allow one to still find a feasible candidate.
        """
        return _optim.OptimizationAlgorithmImplementation_setCheckStatus(self, checkStatus)

    def getCheckStatus(self):
        r"""
        Accessor to check status flag.

        Returns
        -------
        checkStatus : bool
            Whether to check the termination status.
            If set to **False**, :meth:`run` will not throw an exception if the algorithm
            does not fully converge and will allow one to still find a feasible candidate.
        """
        return _optim.OptimizationAlgorithmImplementation_getCheckStatus(self)

    def setProgressCallback(self, *args):
        r"""
        Set up a progress callback.

        Can be used to programmatically report the progress of an optimization.

        Parameters
        ----------
        callback : callable
            Takes a float as argument as percentage of progress.

        Examples
        --------
        >>> import sys
        >>> import openturns as ot
        >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
        >>> problem = ot.OptimizationProblem(rosenbrock)
        >>> solver = ot.OptimizationAlgorithm(problem)
        >>> solver.setStartingPoint([0, 0])
        >>> solver.setMaximumResidualError(1.e-3)
        >>> solver.setMaximumCallsNumber(10000)
        >>> def report_progress(progress):
        ...     sys.stderr.write('-- progress=' + str(progress) + '%\n')
        >>> solver.setProgressCallback(report_progress)
        >>> solver.run()
        """
        return _optim.OptimizationAlgorithmImplementation_setProgressCallback(self, *args)

    def setStopCallback(self, *args):
        r"""
        Set up a stop callback.

        Can be used to programmatically stop an optimization.

        Parameters
        ----------
        callback : callable
            Returns an int deciding whether to stop or continue.

        Examples
        --------
        >>> import openturns as ot
        >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
        >>> problem = ot.OptimizationProblem(rosenbrock)
        >>> solver = ot.OptimizationAlgorithm(problem)
        >>> solver.setStartingPoint([0, 0])
        >>> solver.setMaximumResidualError(1.e-3)
        >>> solver.setMaximumCallsNumber(10000)
        >>> def ask_stop():
        ...     return True
        >>> solver.setStopCallback(ask_stop)
        >>> solver.run()
        """
        return _optim.OptimizationAlgorithmImplementation_setStopCallback(self, *args)
    __swig_destroy__ = _optim.delete_OptimizationAlgorithmImplementation

# Register OptimizationAlgorithmImplementation in _optim:
_optim.OptimizationAlgorithmImplementation_swigregister(OptimizationAlgorithmImplementation)
class _OptimizationAlgorithmImplementationTypedInterfaceObject(openturns.common.InterfaceObject):
    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
    __repr__ = _swig_repr

    def __init__(self, *args):
        _optim._OptimizationAlgorithmImplementationTypedInterfaceObject_swiginit(self, _optim.new__OptimizationAlgorithmImplementationTypedInterfaceObject(*args))

    def getImplementation(self):
        r"""
        Accessor to the underlying implementation.

        Returns
        -------
        impl : Implementation
            A copy of the underlying implementation object.
        """
        return _optim._OptimizationAlgorithmImplementationTypedInterfaceObject_getImplementation(self)

    def setName(self, name):
        r"""
        Accessor to the object's name.

        Parameters
        ----------
        name : str
            The name of the object.
        """
        return _optim._OptimizationAlgorithmImplementationTypedInterfaceObject_setName(self, name)

    def getName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        name : str
            The name of the object.
        """
        return _optim._OptimizationAlgorithmImplementationTypedInterfaceObject_getName(self)

    def __eq__(self, other):
        return _optim._OptimizationAlgorithmImplementationTypedInterfaceObject___eq__(self, other)

    def __ne__(self, other):
        return _optim._OptimizationAlgorithmImplementationTypedInterfaceObject___ne__(self, other)
    __swig_destroy__ = _optim.delete__OptimizationAlgorithmImplementationTypedInterfaceObject

# Register _OptimizationAlgorithmImplementationTypedInterfaceObject in _optim:
_optim._OptimizationAlgorithmImplementationTypedInterfaceObject_swigregister(_OptimizationAlgorithmImplementationTypedInterfaceObject)
class OptimizationAlgorithm(_OptimizationAlgorithmImplementationTypedInterfaceObject):
    r"""
    Base class for optimization wrappers.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Optimization problem.

    Notes
    -----
    Class :class:`~openturns.OptimizationAlgorithm` is an abstract class, which has several implementations.
    The default implementation is :class:`~openturns.Cobyla`

    See also
    --------
    AbdoRackwitz, Cobyla, SQP, TNC, NLopt

    Examples
    --------
    Define an optimization problem to find the minimum of the Rosenbrock function:

    >>> import openturns as ot
    >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
    >>> problem = ot.OptimizationProblem(rosenbrock)
    >>> solver = ot.OptimizationAlgorithm(problem)
    >>> solver.setStartingPoint([0, 0])
    >>> solver.setMaximumResidualError(1.e-3)
    >>> solver.setMaximumCallsNumber(10000)
    >>> solver.run()
    >>> result = solver.getResult()
    >>> x_star = result.getOptimalPoint()
    >>> y_star = result.getOptimalValue()
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.OptimizationAlgorithm_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.OptimizationAlgorithm_run(self)

    def getStartingPoint(self):
        r"""
        Accessor to starting point.

        Returns
        -------
        startingPoint : :class:`~openturns.Point`
            Starting point.
        """
        return _optim.OptimizationAlgorithm_getStartingPoint(self)

    def setStartingPoint(self, startingPoint):
        r"""
        Accessor to starting point.

        Parameters
        ----------
        startingPoint : :class:`~openturns.Point`
            Starting point.
        """
        return _optim.OptimizationAlgorithm_setStartingPoint(self, startingPoint)

    def getProblem(self):
        r"""
        Accessor to optimization problem.

        Returns
        -------
        problem : :class:`~openturns.OptimizationProblem`
            Optimization problem.
        """
        return _optim.OptimizationAlgorithm_getProblem(self)

    def setProblem(self, problem):
        r"""
        Accessor to optimization problem.

        Parameters
        ----------
        problem : :class:`~openturns.OptimizationProblem`
            Optimization problem.
        """
        return _optim.OptimizationAlgorithm_setProblem(self, problem)

    def getResult(self):
        r"""
        Accessor to optimization result.

        Returns
        -------
        result : :class:`~openturns.OptimizationResult`
            Result class.
        """
        return _optim.OptimizationAlgorithm_getResult(self)

    def setResult(self, result):
        r"""
        Accessor to optimization result.

        Parameters
        ----------
        result : :class:`~openturns.OptimizationResult`
            Result class.
        """
        return _optim.OptimizationAlgorithm_setResult(self, result)

    def getMaximumIterationNumber(self):
        r"""
        Accessor to maximum allowed number of iterations.

        Returns
        -------
        maximumIterationNumber : int
            Maximum allowed number of iterations.
        """
        return _optim.OptimizationAlgorithm_getMaximumIterationNumber(self)

    def setMaximumIterationNumber(self, maximumIterationNumber):
        r"""
        Accessor to maximum allowed number of iterations.

        Parameters
        ----------
        maximumIterationNumber : int
            Maximum allowed number of iterations.
        """
        return _optim.OptimizationAlgorithm_setMaximumIterationNumber(self, maximumIterationNumber)

    def setMaximumCallsNumber(self, maximumCallsNumber):
        r"""
        Accessor to maximum allowed number of calls

        Parameters
        ----------
        maximumEvaluationNumber : int
            Maximum allowed number of direct objective function calls through the `()` operator.
            Does not take into account eventual indirect calls through finite difference gradient calls.
        """
        return _optim.OptimizationAlgorithm_setMaximumCallsNumber(self, maximumCallsNumber)

    def getMaximumCallsNumber(self):
        r"""
        Accessor to maximum allowed number of calls.

        Returns
        -------
        maximumEvaluationNumber : int
            Maximum allowed number of direct objective function calls through the `()` operator.
            Does not take into account eventual indirect calls through finite difference gradient calls.
        """
        return _optim.OptimizationAlgorithm_getMaximumCallsNumber(self)

    def getMaximumAbsoluteError(self):
        r"""
        Accessor to maximum allowed absolute error.

        Returns
        -------
        maximumAbsoluteError : float
            Maximum allowed absolute error, where the absolute error is defined by
            :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}`
            and :math:`\vect{x}_n` are two consecutive approximations of the optimum.
        """
        return _optim.OptimizationAlgorithm_getMaximumAbsoluteError(self)

    def setMaximumAbsoluteError(self, maximumAbsoluteError):
        r"""
        Accessor to maximum allowed absolute error.

        Parameters
        ----------
        maximumAbsoluteError : float
            Maximum allowed absolute error, where the absolute error is defined by
            :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}`
            and :math:`\vect{x}_n` are two consecutive approximations of the optimum.
        """
        return _optim.OptimizationAlgorithm_setMaximumAbsoluteError(self, maximumAbsoluteError)

    def getMaximumRelativeError(self):
        r"""
        Accessor to maximum allowed relative error.

        Returns
        -------
        maximumRelativeError : float
            Maximum allowed relative error, where the relative error is defined by
            :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}`
            if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithm_getMaximumRelativeError(self)

    def setMaximumRelativeError(self, maximumRelativeError):
        r"""
        Accessor to maximum allowed relative error.

        Parameters
        ----------
        maximumRelativeError : float
            Maximum allowed relative error, where the relative error is defined by
            :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}`
            if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithm_setMaximumRelativeError(self, maximumRelativeError)

    def getMaximumResidualError(self):
        r"""
        Accessor to maximum allowed residual error.

        Returns
        -------
        maximumResidualError : float
            Maximum allowed residual error, where the residual error is defined by
            :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}`
            if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithm_getMaximumResidualError(self)

    def setMaximumResidualError(self, maximumResidualError):
        r"""
        Accessor to maximum allowed residual error.

        Parameters
        ----------
        maximumResidualError : float
            Maximum allowed residual error, where the residual error is defined by
            :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}`
            if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`.
        """
        return _optim.OptimizationAlgorithm_setMaximumResidualError(self, maximumResidualError)

    def getMaximumConstraintError(self):
        r"""
        Accessor to maximum allowed constraint error.

        Returns
        -------
        maximumConstraintError : float
            Maximum allowed constraint error, where the constraint error is defined by
            :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only)
        """
        return _optim.OptimizationAlgorithm_getMaximumConstraintError(self)

    def setMaximumConstraintError(self, maximumConstraintError):
        r"""
        Accessor to maximum allowed constraint error.

        Parameters
        ----------
        maximumConstraintError : float
            Maximum allowed constraint error, where the constraint error is defined by
            :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only)
        """
        return _optim.OptimizationAlgorithm_setMaximumConstraintError(self, maximumConstraintError)

    def setMaximumTimeDuration(self, maximumTime):
        r"""
        Accessor to the maximum duration.

        Parameters
        ----------
        maximumTime : float
            Maximum optimization duration in seconds.
        """
        return _optim.OptimizationAlgorithm_setMaximumTimeDuration(self, maximumTime)

    def getMaximumTimeDuration(self):
        r"""
        Accessor to the maximum duration.

        Returns
        -------
        maximumTime : float
            Maximum optimization duration in seconds.
        """
        return _optim.OptimizationAlgorithm_getMaximumTimeDuration(self)

    def setCheckStatus(self, checkStatus):
        r"""
        Accessor to check status flag.

        Parameters
        ----------
        checkStatus : bool
            Whether to check the termination status.
            If set to **False**, :meth:`run` will not throw an exception if the algorithm
            does not fully converge and will allow one to still find a feasible candidate.
        """
        return _optim.OptimizationAlgorithm_setCheckStatus(self, checkStatus)

    def getCheckStatus(self):
        r"""
        Accessor to check status flag.

        Returns
        -------
        checkStatus : bool
            Whether to check the termination status.
            If set to **False**, :meth:`run` will not throw an exception if the algorithm
            does not fully converge and will allow one to still find a feasible candidate.
        """
        return _optim.OptimizationAlgorithm_getCheckStatus(self)

    @staticmethod
    def GetByName(solverName):
        r"""
        Instantiate an optimization algorithm from its name.

        Parameters
        ----------
        name : str
            Name of the algorithm.
            For example `TNC`, `Cobyla` or one of the :class:`~openturns.NLopt` solver names.
        """
        return _optim.OptimizationAlgorithm_GetByName(solverName)

    @staticmethod
    def Build(*args):
        r"""
        Instantiate an optimization algorithm from name or problem.

        Parameters
        ----------
        problem : :class:`~openturns.OptimizationProblem`
            Problem to solve.
        """
        return _optim.OptimizationAlgorithm_Build(*args)

    @staticmethod
    def GetAlgorithmNames(*args):
        r"""
        Get the list of available solver names.

        Parameters
        ----------
        problem : :class:`~openturns.OptimizationProblem`, optional
            Problem to solve.

        Returns
        -------
        names : :class:`~openturns.Description`
            List of available solver names.
        """
        return _optim.OptimizationAlgorithm_GetAlgorithmNames(*args)

    def __repr__(self):
        return _optim.OptimizationAlgorithm___repr__(self)

    def __str__(self, *args):
        return _optim.OptimizationAlgorithm___str__(self, *args)

    def __init__(self, *args):
        _optim.OptimizationAlgorithm_swiginit(self, _optim.new_OptimizationAlgorithm(*args))

    def setProgressCallback(self, *args):
        r"""
        Set up a progress callback.

        Can be used to programmatically report the progress of an optimization.

        Parameters
        ----------
        callback : callable
            Takes a float as argument as percentage of progress.

        Examples
        --------
        >>> import sys
        >>> import openturns as ot
        >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
        >>> problem = ot.OptimizationProblem(rosenbrock)
        >>> solver = ot.OptimizationAlgorithm(problem)
        >>> solver.setStartingPoint([0, 0])
        >>> solver.setMaximumResidualError(1.e-3)
        >>> solver.setMaximumCallsNumber(10000)
        >>> def report_progress(progress):
        ...     sys.stderr.write('-- progress=' + str(progress) + '%\n')
        >>> solver.setProgressCallback(report_progress)
        >>> solver.run()
        """
        return _optim.OptimizationAlgorithm_setProgressCallback(self, *args)

    def setStopCallback(self, *args):
        r"""
        Set up a stop callback.

        Can be used to programmatically stop an optimization.

        Parameters
        ----------
        callback : callable
            Returns an int deciding whether to stop or continue.

        Examples
        --------
        >>> import openturns as ot
        >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
        >>> problem = ot.OptimizationProblem(rosenbrock)
        >>> solver = ot.OptimizationAlgorithm(problem)
        >>> solver.setStartingPoint([0, 0])
        >>> solver.setMaximumResidualError(1.e-3)
        >>> solver.setMaximumCallsNumber(10000)
        >>> def ask_stop():
        ...     return True
        >>> solver.setStopCallback(ask_stop)
        >>> solver.run()
        """
        return _optim.OptimizationAlgorithm_setStopCallback(self, *args)
    __swig_destroy__ = _optim.delete_OptimizationAlgorithm

# Register OptimizationAlgorithm in _optim:
_optim.OptimizationAlgorithm_swigregister(OptimizationAlgorithm)
class AbdoRackwitz(OptimizationAlgorithmImplementation):
    r"""
    Abdo-Rackwitz solver.

    This solver uses first derivative information and can only be used to solve level function problems.

    Available constructors:
        AbdoRackwitz(*problem*)

        AbdoRackwitz(*problem, tau, omega, smooth*)

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Optimization problem to solve.
    tau : float
        Multiplicative decrease of linear step.
    omega : float
        Armijo factor.
    smooth : float
        Growing factor in penalization term.

    See also
    --------
    Cobyla, SQP, TNC, NLopt

    Examples
    --------
    >>> import openturns as ot
    >>> model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['-F*L^3/(3*E*I)'])
    >>> problem = ot.NearestPointProblem(model, 5.0)
    >>> algo = ot.AbdoRackwitz(problem)
    >>> algo.setStartingPoint([1.0] * 4)
    >>> algo.run()
    >>> result = algo.getResult()
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.AbdoRackwitz_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.AbdoRackwitz_run(self)

    def getTau(self):
        r"""
        Accessor to tau parameter.

        Returns
        -------
        tau : float
            Multiplicative decrease of linear step.
        """
        return _optim.AbdoRackwitz_getTau(self)

    def setTau(self, tau):
        r"""
        Accessor to tau parameter.

        Parameters
        ----------
        tau : float
            Multiplicative decrease of linear step.
        """
        return _optim.AbdoRackwitz_setTau(self, tau)

    def getOmega(self):
        r"""
        Accessor to omega parameter.

        Returns
        -------
        omega : float
            Armijo factor.
        """
        return _optim.AbdoRackwitz_getOmega(self)

    def setOmega(self, tau):
        r"""
        Accessor to omega parameter.

        Parameters
        ----------
        omega : float
            Armijo factor.
        """
        return _optim.AbdoRackwitz_setOmega(self, tau)

    def getSmooth(self):
        r"""
        Accessor to smooth parameter.

        Returns
        -------
        smooth : float
            Growing factor in penalization term.
        """
        return _optim.AbdoRackwitz_getSmooth(self)

    def setSmooth(self, tau):
        r"""
        Accessor to smooth parameter.

        Parameters
        ----------
        smooth : float
            Growing factor in penalization term.
        """
        return _optim.AbdoRackwitz_setSmooth(self, tau)

    def __repr__(self):
        return _optim.AbdoRackwitz___repr__(self)

    def __init__(self, *args):
        _optim.AbdoRackwitz_swiginit(self, _optim.new_AbdoRackwitz(*args))
    __swig_destroy__ = _optim.delete_AbdoRackwitz

# Register AbdoRackwitz in _optim:
_optim.AbdoRackwitz_swigregister(AbdoRackwitz)
class _VariableTypeCollection(object):
    r"""
    Collection.

    Examples
    --------
    >>> import openturns as ot

    - Collection of **real values**:

    >>> ot.ScalarCollection(2)
    [0,0]
    >>> ot.ScalarCollection(2, 3.25)
    [3.25,3.25]
    >>> vector = ot.ScalarCollection([2.0, 1.5, 2.6])
    >>> vector
    [2,1.5,2.6]
    >>> vector[1] = 4.2
    >>> vector
    [2,4.2,2.6]
    >>> vector.add(3.8)
    >>> vector
    [2,4.2,2.6,3.8]

    - Collection of **complex values**:

    >>> ot.ComplexCollection(2)
    [(0,0),(0,0)]
    >>> ot.ComplexCollection(2, 3+4j)
    [(3,4),(3,4)]
    >>> vector = ot.ComplexCollection([2+3j, 1-4j, 3.0])
    >>> vector
    [(2,3),(1,-4),(3,0)]
    >>> vector[1] = 4+3j
    >>> vector
    [(2,3),(4,3),(3,0)]
    >>> vector.add(5+1j)
    >>> vector
    [(2,3),(4,3),(3,0),(5,1)]

    - Collection of **booleans**:

    >>> ot.BoolCollection(3)
    [0,0,0]
    >>> ot.BoolCollection(3, 1)
    [1,1,1]
    >>> vector = ot.BoolCollection([0, 1, 0])
    >>> vector
    [0,1,0]
    >>> vector[1] = 0
    >>> vector
    [0,0,0]
    >>> vector.add(1)
    >>> vector
    [0,0,0,1]

    - Collection of **distributions**:

    >>> print(ot.DistributionCollection(2))
    [Uniform(a = -1, b = 1),Uniform(a = -1, b = 1)]
    >>> print(ot.DistributionCollection(2, ot.Gamma(2.75, 1.0)))
    [Gamma(k = 2.75, lambda = 1, gamma = 0),Gamma(k = 2.75, lambda = 1, gamma = 0)]
    >>> vector = ot.DistributionCollection([ot.Normal(), ot.Uniform()])
    >>> print(vector)
    [Normal(mu = 0, sigma = 1),Uniform(a = -1, b = 1)]
    >>> vector[1] = ot.Uniform(-0.5, 1)
    >>> print(vector)
    [Normal(mu = 0, sigma = 1),Uniform(a = -0.5, b = 1)]
    >>> vector.add(ot.Gamma(2.75, 1.0))
    >>> print(vector)
    [Normal(mu = 0, sigma = 1),Uniform(a = -0.5, b = 1),Gamma(k = 2.75, lambda = 1, gamma = 0)]
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
    __swig_destroy__ = _optim.delete__VariableTypeCollection

    def clear(self):
        r"""
        Reset the collection to zero dimension.

        Examples
        --------
        >>> import openturns as ot
        >>> x = ot.Point(2)
        >>> x.clear()
        >>> x
        class=Point name=Unnamed dimension=0 values=[]
        """
        return _optim._VariableTypeCollection_clear(self)

    def __len__(self):
        return _optim._VariableTypeCollection___len__(self)

    def __eq__(self, rhs):
        return _optim._VariableTypeCollection___eq__(self, rhs)

    def __contains__(self, val):
        return _optim._VariableTypeCollection___contains__(self, val)

    def select(self, marginalIndices):
        r"""
        Selection from indices.

        Parameters
        ----------
        indices : sequence of int
            Indices to select

        Returns
        -------
        coll : sequence
            Sub-collection of values at the selection indices.
        """
        return _optim._VariableTypeCollection_select(self, marginalIndices)

    def __getitem__(self, i):
        return _optim._VariableTypeCollection___getitem__(self, i)

    def __setitem__(self, i, val):
        return _optim._VariableTypeCollection___setitem__(self, i, val)

    def __delitem__(self, i):
        return _optim._VariableTypeCollection___delitem__(self, i)

    def at(self, *args):
        r"""
        Access to an element of the collection.

        Parameters
        ----------
        index : positive int
            Position of the element to access.

        Returns
        -------
        element : type depends on the type of the collection
            Element of the collection at the position *index*.
        """
        return _optim._VariableTypeCollection_at(self, *args)

    def add(self, *args):
        r"""
        Append a component (in-place).

        Parameters
        ----------
        value : type depends on the type of the collection.
            The component to append.

        Examples
        --------
        >>> import openturns as ot
        >>> x = ot.Point(2)
        >>> x.add(1.)
        >>> print(x)
        [0,0,1]
        """
        return _optim._VariableTypeCollection_add(self, *args)

    def getSize(self):
        r"""
        Get the collection's dimension (or size).

        Returns
        -------
        n : int
            The number of components in the collection.
        """
        return _optim._VariableTypeCollection_getSize(self)

    def resize(self, newSize):
        r"""
        Change the size of the collection.

        Parameters
        ----------
        newSize : positive int
            New size of the collection.

        Notes
        -----
        If the new size is smaller than the older one, the last elements are thrown
        away, else the new elements are set to the default value of the element type.

        Examples
        --------
        >>> import openturns as ot
        >>> x = ot.Point(2, 4)
        >>> print(x)
        [4,4]
        >>> x.resize(1)
        >>> print(x)
        [4]
        >>> x.resize(4)
        >>> print(x)
        [4,0,0,0]
        """
        return _optim._VariableTypeCollection_resize(self, newSize)

    def isEmpty(self):
        r"""
        Tell if the collection is empty.

        Returns
        -------
        isEmpty : bool
            *True* if there is no element in the collection.

        Examples
        --------
        >>> import openturns as ot
        >>> x = ot.Point(2)
        >>> x.isEmpty()
        False
        >>> x.clear()
        >>> x.isEmpty()
        True
        """
        return _optim._VariableTypeCollection_isEmpty(self)

    def find(self, val):
        r"""
        Find the index of a given value.

        Parameters
        ----------
        val : collection value type
            The value to find

        Returns
        -------
        index : int
            The index of the first occurrence of the value,
            or the size of the container if not found.
            When several values match, only the first index is returned.
        """
        return _optim._VariableTypeCollection_find(self, val)

    def __repr__(self):
        return _optim._VariableTypeCollection___repr__(self)

    def __str__(self, *args):
        return _optim._VariableTypeCollection___str__(self, *args)

    def __init__(self, *args):
        _optim._VariableTypeCollection_swiginit(self, _optim.new__VariableTypeCollection(*args))

# Register _VariableTypeCollection in _optim:
_optim._VariableTypeCollection_swigregister(_VariableTypeCollection)
class Bonmin(OptimizationAlgorithmImplementation):
    r"""
    Bonmin MINLP solver.

    `Bonmin <https://www.coin-or.org/Bonmin/Intro.html>`_ is an open-source code for solving general MINLP problems.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`, optional
        Optimization problem to solve. Default is an empty problem.

    algoName : str, optional
        Identifier of the optimization method to use. Default is 'B-BB'

    Notes
    -----

    *Available algorithms:*

      Bonmin provides algorithms for the resolution of general optimization only. In particular, least squares problems or nearest point problems are not supported.


      Bonmin provides the following algorithms:

      +-----------+-------------------------------------------------+
      | Algorithm | Description                                     |
      +===========+=================================================+
      | B-BB      | NLP-based branch-and-bound                      |
      +-----------+-------------------------------------------------+
      | B-OA      | Outer-approximation decomposition               |
      +-----------+-------------------------------------------------+
      | B-QG      | Quesada and Grossmann branch-and-cut            |
      +-----------+-------------------------------------------------+
      | B-Hyb     | Hybrid outer-approximation based branch-and-cut |
      +-----------+-------------------------------------------------+
      | B-Ecp     | ECP cuts based branch-and-cut a la FilMINT      |
      +-----------+-------------------------------------------------+
      | B-iFP     | Iterated feasibility pump algorithm for MINLP   |
      +-----------+-------------------------------------------------+

    *Algorithms parameters:*

      Bonmin algorithms can be adapted using numerous parameters, described  `here <https://www.coin-or.org/Bonmin/options_list.html>`_.
      These parameters can be modified using the :class:`~openturns.ResourceMap`.
      For every option ``optionName``, simply add a key named ``Bonmin-optionName`` with the value to use, as shown below::

        >>> import openturns as ot
        >>> ot.ResourceMap.AddAsUnsignedInteger('Bonmin-bonmin.node_limit', 10000)
        >>> ot.ResourceMap.AddAsScalar('Bonmin-bonmin.cutoff', 1e6)

    *Convergence criteria:*

      To estimate the convergence of the algorithm during the optimization process,
      Bonmin uses specific tolerance parameters, different from the standard absolute/relative/residual errors in the library.
      The definition of Bonmin's parameters can be found in `this paper, page 3 <https://www.optimization-online.org/2004/03/836>`_.
      Thus the attributes ``maximumAbsoluteError``, ``maximumRelativeError``, ``maximumResidualError`` and ``maximumConstraintError``
      defined in ``OptimizationAlgorithm`` are not used in this case.

    See also
    --------
    AbdoRackwitz, Cobyla, Dlib, NLopt

    Examples
    --------
    The code below ensures the optimization of the following problem:

    .. math:: min \left( - x_0 - x_1 - x_2 \right)

    subject to

    .. math::

        \left(x_1 - \frac{1}{2}\right)^2 + \left(x_2 - \frac{1}{2}\right)^2 \leq \frac{1}{4}

        x_0 - x_1 \leq 0

        x_0 + x_2 + x_3 \leq 2

        x_0 \in \{0,1\}^n

        (x_1, x_2) \in \mathbb{R}^2

        x_3 \in \mathbb{N}


    >>> import openturns as ot

    >>> # Definition of objective function
    >>> objectiveFunction = ot.SymbolicFunction(['x0','x1','x2','x3'], ['-x0 -x1 -x2'])

    >>> # Definition of variables bounds
    >>> bounds = ot.Interval([0,0,0,0],[1,1e308,1e308,5],[True,True,True,True],[True,False,False,True])

    >>> # Definition of variables types
    >>> variablesType = [ot.OptimizationProblemImplementation.BINARY,
    ... ot.OptimizationProblemImplementation.CONTINUOUS,
    ... ot.OptimizationProblemImplementation.CONTINUOUS,
    ... ot.OptimizationProblemImplementation.INTEGER]

    Inequality constraints are defined by a function :math:`h` such that :math:`h(x) \geq 0`.
    The inequality expression above has to be modified to match this formulation.

    >>> # Definition of constraints
    >>> # Constraints in OpenTURNS are defined as g(x) = 0 and h(x) >= 0
    >>> #    No equality constraint -> nothing to do
    >>> #    Inequality constraints:
    >>> h = ot.SymbolicFunction(['x0','x1','x2','x3'], ['-(x1-1/2)^2 - (x2-1/2)^2 + 1/4', '-x0 + x1', '-x0 - x2 - x3 + 2'])

    >>> # Setting up Bonmin problem
    >>> problem = ot.OptimizationProblem(objectiveFunction)
    >>> problem.setBounds(bounds)
    >>> problem.setVariablesType(variablesType)
    >>> problem.setInequalityConstraint(h)

    >>> bonminAlgorithm = ot.Bonmin(problem,'B-BB')
    >>> bonminAlgorithm.setStartingPoint([0,0,0,0])
    >>> bonminAlgorithm.setMaximumCallsNumber(10000)
    >>> bonminAlgorithm.setMaximumIterationNumber(1000)

    >>> # Running the solver
    >>> bonminAlgorithm.run() # doctest: +SKIP

    >>> # Retrieving the results
    >>> result = bonminAlgorithm.getResult() # doctest: +SKIP
    >>> optimalPoint = result.getOptimalPoint() # doctest: +SKIP
    >>> optimalValue = result.getOptimalValue() # doctest: +SKIP
    >>> evaluationNumber = result.getInputSample().getSize() # doctest: +SKIP 
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.Bonmin_getClassName(self)

    @staticmethod
    def GetAlgorithmNames():
        r"""
        Retrieves the names of the available optimization algorithms.

        Returns
        -------
        algoName : :class:`~openturns.Description`
            The names of the available optimization algorithms.
        """
        return _optim.Bonmin_GetAlgorithmNames()

    def setAlgorithmName(self, algoName):
        r"""
        Accessor to algoName parameter. Sets the optimization algorithm to
        use. Possible values for algoName are B-BB, B-OA, B-QG, B-Hyb, B-Ecp, B-iFP. See
        `Bonmin's online documentation <https://www.coin-or.org/Bonmin/index.html>`_ for more details.
        Default is B-BB.

        Parameters
        ----------
        algoName : str
            The name of the optimization algorithm to use.
        """
        return _optim.Bonmin_setAlgorithmName(self, algoName)

    def getAlgorithmName(self):
        r"""
        Accessor to algoName parameter. Retrieves the name of the optimization algorithm used.

        Returns
        -------
        algoName : str
            The name of the optimization algorithm used.
        """
        return _optim.Bonmin_getAlgorithmName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.Bonmin_run(self)

    def __str__(self, *args):
        return _optim.Bonmin___str__(self, *args)

    def __repr__(self):
        return _optim.Bonmin___repr__(self)

    def __init__(self, *args):
        _optim.Bonmin_swiginit(self, _optim.new_Bonmin(*args))
    __swig_destroy__ = _optim.delete_Bonmin

# Register Bonmin in _optim:
_optim.Bonmin_swigregister(Bonmin)
class Ceres(OptimizationAlgorithmImplementation):
    r"""
    Interface to Ceres Solver.

    This class exposes the solvers from the non-linear least squares optimization library [ceres2012]_.

    More details about least squares algorithms are available `here <http://ceres-solver.org/nnls_solving.html>`_.

    Algorithms are also available for general unconstrained optimization.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Optimization problem to solve, either least-squares or general (unconstrained).
    algoName : str
        The identifier of the algorithm.
        Use :func:`GetAlgorithmNames()` to list available names.

    Notes
    -----
    Solvers use first order derivative information.

    As for constraint support, only the trust-region solvers allow for bound constraints:

    +------------------------------+---------------+------------------------+--------------------+
    | Algorithm                    | Method type   | Problem type support   | Constraint support |
    +==============================+===============+========================+====================+
    | LEVENBERG_MARQUARDT          | trust-region  | least-squares          | bounds             |
    +------------------------------+---------------+------------------------+--------------------+
    | DOGLEG                       | trust-region  | least-squares          | bounds             |
    +------------------------------+---------------+------------------------+--------------------+
    | STEEPEST_DESCENT             | line-search   | least-squares, general | none               |
    +------------------------------+---------------+------------------------+--------------------+
    | NONLINEAR_CONJUGATE_GRADIENT | line-search   | least-squares, general | none               |
    +------------------------------+---------------+------------------------+--------------------+
    | LBFGS                        | line-search   | least-squares, general | none               |
    +------------------------------+---------------+------------------------+--------------------+
    | BFGS                         | line-search   | least-squares, general | none               |
    +------------------------------+---------------+------------------------+--------------------+


    Ceres least squares solver can be further tweaked thanks to the following
    :class:`~openturns.ResourceMap` parameters, refer to
    `nlls solver options <http://ceres-solver.org/nnls_solving.html#solver-options>`_
    for more details.

    +------------------------------------------------------------+-------+
    | Key                                                        | Type  |
    +============================================================+=======+
    | Ceres-minimizer_type                                       | str   |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_direction_type                           | str   |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_type                                     | str   |
    +------------------------------------------------------------+-------+
    | Ceres-nonlinear_conjugate_gradient_type                    | str   |
    +------------------------------------------------------------+-------+
    | Ceres-max_lbfgs_rank                                       | int   |
    +------------------------------------------------------------+-------+
    | Ceres-use_approximate_eigenvalue_bfgs_scaling              | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_interpolation_type                       | str   |
    +------------------------------------------------------------+-------+
    | Ceres-min_line_search_step_size                            | float |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_sufficient_function_decrease             | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_line_search_step_contraction                     | float |
    +------------------------------------------------------------+-------+
    | Ceres-min_line_search_step_contraction                     | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_line_search_step_size_iterations             | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_line_search_direction_restarts               | int   |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_sufficient_curvature_decrease            | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_line_search_step_expansion                       | float |
    +------------------------------------------------------------+-------+
    | Ceres-trust_region_strategy_type                           | str   |
    +------------------------------------------------------------+-------+
    | Ceres-dogleg_type                                          | str   |
    +------------------------------------------------------------+-------+
    | Ceres-use_nonmonotonic_steps                               | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-max_consecutive_nonmonotonic_steps                   | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_iterations                                   | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_solver_time_in_seconds                           | float |
    +------------------------------------------------------------+-------+
    | Ceres-num_threads                                          | int   |
    +------------------------------------------------------------+-------+
    | Ceres-initial_trust_region_radius                          | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_trust_region_radius                              | float |
    +------------------------------------------------------------+-------+
    | Ceres-min_trust_region_radius                              | float |
    +------------------------------------------------------------+-------+
    | Ceres-min_relative_decrease                                | float |
    +------------------------------------------------------------+-------+
    | Ceres-min_lm_diagonal                                      | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_lm_diagonal                                      | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_consecutive_invalid_steps                    | int   |
    +------------------------------------------------------------+-------+
    | Ceres-function_tolerance                                   | float |
    +------------------------------------------------------------+-------+
    | Ceres-gradient_tolerance                                   | float |
    +------------------------------------------------------------+-------+
    | Ceres-parameter_tolerance                                  | float |
    +------------------------------------------------------------+-------+
    | Ceres-preconditioner_type                                  | str   |
    +------------------------------------------------------------+-------+
    | Ceres-visibility_clustering_type                           | str   |
    +------------------------------------------------------------+-------+
    | Ceres-dense_linear_algebra_library_type                    | str   |
    +------------------------------------------------------------+-------+
    | Ceres-sparse_linear_algebra_library_type                   | str   |
    +------------------------------------------------------------+-------+
    | Ceres-use_explicit_schur_complement                        | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-dynamic_sparsity                                     | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-min_linear_solver_iterations                         | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_linear_solver_iterations                         | int   |
    +------------------------------------------------------------+-------+
    | Ceres-eta                                                  | float |
    +------------------------------------------------------------+-------+
    | Ceres-jacobi_scaling                                       | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-use_inner_iterations                                 | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-inner_iteration_tolerance                            | float |
    +------------------------------------------------------------+-------+
    | Ceres-logging_type                                         | str   |
    +------------------------------------------------------------+-------+
    | Ceres-minimizer_progress_to_stdout                         | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-trust_region_problem_dump_directory                  | str   |
    +------------------------------------------------------------+-------+
    | Ceres-trust_region_problem_dump_format_type                | str   |
    +------------------------------------------------------------+-------+
    | Ceres-check_gradients                                      | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-gradient_check_relative_precision                    | float |
    +------------------------------------------------------------+-------+
    | Ceres-gradient_check_numeric_derivative_relative_step_size | float |
    +------------------------------------------------------------+-------+
    | Ceres-update_state_every_iteration                         | bool  |
    +------------------------------------------------------------+-------+


    Ceres unconstrained solver can be further tweaked using the following :class:`~openturns.ResourceMap` parameters,
    refer to `gradient solver options <http://ceres-solver.org/gradient_solver.html#solving>`_ for more details.

    +------------------------------------------------------------+-------+
    | Key                                                        | Type  |
    +============================================================+=======+
    | Ceres-line_search_direction_type                           | str   |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_type                                     | str   |
    +------------------------------------------------------------+-------+
    | Ceres-nonlinear_conjugate_gradient_type                    | str   |
    +------------------------------------------------------------+-------+
    | Ceres-max_lbfgs_rank                                       | int   |
    +------------------------------------------------------------+-------+
    | Ceres-use_approximate_eigenvalue_bfgs_scaling              | bool  |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_interpolation_type                       | str   |
    +------------------------------------------------------------+-------+
    | Ceres-min_line_search_step_size                            | float |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_sufficient_function_decrease             | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_line_search_step_contraction                     | float |
    +------------------------------------------------------------+-------+
    | Ceres-min_line_search_step_contraction                     | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_line_search_step_size_iterations             | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_line_search_direction_restarts               | int   |
    +------------------------------------------------------------+-------+
    | Ceres-line_search_sufficient_curvature_decrease            | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_line_search_step_expansion                       | float |
    +------------------------------------------------------------+-------+
    | Ceres-max_num_iterations                                   | int   |
    +------------------------------------------------------------+-------+
    | Ceres-max_solver_time_in_seconds                           | float |
    +------------------------------------------------------------+-------+
    | Ceres-function_tolerance                                   | float |
    +------------------------------------------------------------+-------+
    | Ceres-gradient_tolerance                                   | float |
    +------------------------------------------------------------+-------+
    | Ceres-parameter_tolerance                                  | float |
    +------------------------------------------------------------+-------+
    | Ceres-logging_type                                         | str   |
    +------------------------------------------------------------+-------+
    | Ceres-minimizer_progress_to_stdout                         | bool  |
    +------------------------------------------------------------+-------+

    See also
    --------
    AbdoRackwitz, Cobyla, CMinpack, NLopt, SQP, TNC

    Examples
    --------
    List available algorithms:

    >>> import openturns as ot
    >>> print(ot.Ceres.GetAlgorithmNames())
    [LEVENBERG_MARQUARDT,DOGLEG,...

    Solve a least-squares problem:

    >>> dim = 2
    >>> residualFunction = ot.SymbolicFunction(['x0', 'x1'], ['10*(x1-x0^2)', '1-x0'])
    >>> problem = ot.LeastSquaresProblem(residualFunction)
    >>> problem.setBounds(ot.Interval([-3.0] * dim, [5.0] * dim))
    >>> ot.ResourceMap.AddAsScalar('Ceres-gradient_tolerance', 1e-5)  # doctest: +SKIP
    >>> algo = ot.Ceres(problem, 'LEVENBERG_MARQUARDT')  # doctest: +SKIP
    >>> algo.setStartingPoint([0.0] * dim)  # doctest: +SKIP
    >>> algo.run()  # doctest: +SKIP
    >>> result = algo.getResult()  # doctest: +SKIP
    >>> x_star = result.getOptimalPoint()  # doctest: +SKIP
    >>> y_star = result.getOptimalValue()  # doctest: +SKIP

    Or, solve a general optimization problem:

    >>> dim = 4
    >>> linear = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'], ['(x1-1)^2+(x2-2)^2+(x3-3)^2+(x4-4)^2'])
    >>> problem = ot.OptimizationProblem(linear)
    >>> ot.ResourceMap.AddAsScalar('Ceres-gradient_tolerance', 1e-5)  # doctest: +SKIP
    >>> algo = ot.Ceres(problem, 'BFGS')  # doctest: +SKIP
    >>> algo.setStartingPoint([0.0] * 4)  # doctest: +SKIP
    >>> algo.run()  # doctest: +SKIP
    >>> result = algo.getResult()  # doctest: +SKIP
    >>> x_star = result.getOptimalPoint()  # doctest: +SKIP
    >>> y_star = result.getOptimalValue()  # doctest: +SKIP
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.Ceres_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.Ceres_run(self)

    @staticmethod
    def GetAlgorithmNames():
        r"""
        Accessor to the list of algorithms provided, by names.

        Returns
        -------
        names : :class:`~openturns.Description`
            List of algorithm names provided, according to its naming convention.

            The trust region methods are not able to solve general optimization
            problems, in that case a warning is printed and the default line search
            method is used instead.

        Examples
        --------
        >>> import openturns as ot
        >>> print(ot.Ceres.GetAlgorithmNames())
        [LEVENBERG_MARQUARDT,DOGLEG,STEEPEST_DESCENT,NONLINEAR_CONJUGATE_GRADIENT,LBFGS,BFGS]
        """
        return _optim.Ceres_GetAlgorithmNames()

    def setAlgorithmName(self, algoName):
        r"""
        Accessor to the algorithm name.

        Parameters
        ----------
        algoName : str
            The identifier of the algorithm.
        """
        return _optim.Ceres_setAlgorithmName(self, algoName)

    def getAlgorithmName(self):
        r"""
        Accessor to the algorithm name.

        Returns
        -------
        algoName : str
            The identifier of the algorithm.
        """
        return _optim.Ceres_getAlgorithmName(self)

    def __repr__(self):
        return _optim.Ceres___repr__(self)

    def __str__(self, *args):
        return _optim.Ceres___str__(self, *args)

    def __init__(self, *args):
        _optim.Ceres_swiginit(self, _optim.new_Ceres(*args))
    __swig_destroy__ = _optim.delete_Ceres

# Register Ceres in _optim:
_optim.Ceres_swigregister(Ceres)
class CMinpack(OptimizationAlgorithmImplementation):
    r"""
    Interface to CMinpack.

    This class exposes a least-squares solver from the non-linear optimization library [cminpack2007]_.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Least-squares optimization problem to solve.

    Notes
    -----
    The algorithm relies on the lmder function, based on first-order derivative.
    Box bound constraints are supported using a variable change.

    More details are available `here <http://devernay.free.fr/hacks/cminpack/minpack-documentation.txt>`_.

    See also
    --------
    AbdoRackwitz, Cobyla, NLopt, SQP, TNC

    Examples
    --------
    >>> import openturns as ot
    >>> dim = 2
    >>> residualFunction = ot.SymbolicFunction(['x0', 'x1'], ['10*(x1-x0^2)', '0.5-x0', '0.3-2*x1'])
    >>> problem = ot.LeastSquaresProblem(residualFunction)
    >>> problem.setBounds(ot.Interval([-3.0] * dim, [5.0] * dim))
    >>> algo = ot.CMinpack(problem)  # doctest: +SKIP
    >>> algo.setStartingPoint([0.0] * dim)  # doctest: +SKIP
    >>> algo.run()  # doctest: +SKIP
    >>> result = algo.getResult()  # doctest: +SKIP
    >>> x_star = result.getOptimalPoint()  # doctest: +SKIP
    >>> y_star = result.getOptimalValue()  # doctest: +SKIP
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.CMinpack_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.CMinpack_run(self)

    def __repr__(self):
        return _optim.CMinpack___repr__(self)

    def __str__(self, *args):
        return _optim.CMinpack___str__(self, *args)

    def __init__(self, *args):
        _optim.CMinpack_swiginit(self, _optim.new_CMinpack(*args))
    __swig_destroy__ = _optim.delete_CMinpack

# Register CMinpack in _optim:
_optim.CMinpack_swigregister(CMinpack)
class Cobyla(OptimizationAlgorithmImplementation):
    r"""
    Constrained Optimization BY Linear Approximations solver.

    Available constructors:
        Cobyla(*problem*)

        Cobyla(*problem, rhoBeg*)

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`
        Optimization problem to solve.
    rhoBeg : float
        A reasonable initial change to the variables.

    Notes
    -----
    It constructs successive linear approximations of the objective function and
    constraints via a simplex of :math:`d+1` points, and optimizes these
    approximations in a trust region at each step.
    This solver use no derivative information and supports all types of constraints.

    See also
    --------
    AbdoRackwitz, SQP, TNC, NLopt

    Examples
    --------
    >>> import openturns as ot
    >>> model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['-F*L^3/(3*E*I)'])
    >>> problem = ot.NearestPointProblem(model, 5.0)
    >>> algo = ot.Cobyla(problem)
    >>> algo.setMaximumCallsNumber(10000)
    >>> algo.setStartingPoint([1.0] * 4)
    >>> algo.run()
    >>> result = algo.getResult()
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.Cobyla_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.Cobyla_run(self)

    def getRhoBeg(self):
        r"""
        Accessor to rhoBeg parameter.

        Returns
        -------
        rhoBeg : float
            A reasonable initial change to the variables.
        """
        return _optim.Cobyla_getRhoBeg(self)

    def setRhoBeg(self, rhoBeg):
        r"""
        Accessor to rhoBeg parameter.

        Parameters
        ----------
        rhoBeg : float
            A reasonable initial change to the variables.
        """
        return _optim.Cobyla_setRhoBeg(self, rhoBeg)

    def __repr__(self):
        return _optim.Cobyla___repr__(self)

    def __init__(self, *args):
        _optim.Cobyla_swiginit(self, _optim.new_Cobyla(*args))
    __swig_destroy__ = _optim.delete_Cobyla

# Register Cobyla in _optim:
_optim.Cobyla_swigregister(Cobyla)
class Dlib(OptimizationAlgorithmImplementation):
    r"""
    Base class for optimization solvers from the [dlib2009]_ library.

    Available constructors:
        Dlib(*algoName*)

        Dlib(*problem, algoName*)

    Parameters
    ----------
    algoName : str, optional
        Identifier of the optimization method to use. Use :func:`GetAlgorithmNames()` to
        list available algorithms. Default is 'BFGS'.
    problem : :class:`~openturns.OptimizationProblem`, optional
        Optimization problem to solve. Default is an empty problem.

    Notes
    -----
    The table below presents some properties of the available algorithms from dlib.
    Details on optimization methods are available on http://dlib.net/optimization.html

    +------------------+---------------------+----------------------+-------------------+--------------------+
    | Algorithm        | Description         | Problem type support | Derivatives info  | Constraint support |
    +==================+=====================+======================+===================+====================+
    | cg               | Conjugate gradient  | General              | First derivative  | Bounds             |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | bfgs             | BFGS                | General              | First derivative  | Bounds             |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | lbfgs            | Limited memory BFGS | General              | First derivative  | Bounds             |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | newton           | Newton              | General              | First and second  | Bounds             |
    |                  |                     |                      | derivatives       |                    |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | global           | Global optimization | General              | No derivative     | Bounds needed      |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | least_squares    | Least squares (best | Least squares        | First derivative  | None               |
    |                  | for large residual) |                      |                   |                    |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | least_squares_lm | Least squares LM    | Least squares        | First derivative  | None               |
    |                  | (small residual)    |                      |                   |                    |
    +------------------+---------------------+----------------------+-------------------+--------------------+
    | trust_region     | Trust region        | General              | No derivative     | None               |
    +------------------+---------------------+----------------------+-------------------+--------------------+

    Derivatives are managed automatically by openturns, according to the available
    data (analytical formula or finite differences computation).

    The global optimization algorithm requires finite fixed bounds for all input
    variables. In this strategy, the solver starts by refining a local extremum
    until no significant improvement is found. Then it tries to find better extrema
    in the rest of the domain defined by the user, until the maximum number of
    function evaluation is reached.

    In least squares and trust region methods, the optimization process continues
    until the user criteria on absolute, relative and residual errors are satisfied,
    or until no significant improvement can be achieved.

    See also
    --------
    AbdoRackwitz, Cobyla, NLopt

    Examples
    --------
    Define an optimization problem to find the minimum of the Rosenbrock function:

    >>> import openturns as ot
    >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2'])
    >>> problem = ot.OptimizationProblem(rosenbrock)
    >>> cgSolver = ot.Dlib(problem,'cg')  # doctest: +SKIP
    >>> cgSolver.setStartingPoint([0, 0])  # doctest: +SKIP
    >>> cgSolver.setMaximumResidualError(1.e-3)  # doctest: +SKIP
    >>> cgSolver.setMaximumIterationNumber(100)  # doctest: +SKIP
    >>> cgSolver.run()  # doctest: +SKIP
    >>> result = cgSolver.getResult()  # doctest: +SKIP
    >>> x_star = result.getOptimalPoint()  # doctest: +SKIP
    >>> y_star = result.getOptimalValue()  # doctest: +SKIP
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.Dlib_getClassName(self)

    @staticmethod
    def GetAlgorithmNames():
        r"""
        List of dlib available optimization algorithms.

        Returns
        -------
        algorithmNames : :class:`~openturns.Description`
            List of the names of available dlib search strategies.
        """
        return _optim.Dlib_GetAlgorithmNames()

    def getAlgorithmName(self):
        r"""
        Accessor to the algorithm name.

        Returns
        -------
        algoName : str
            The identifier of the algorithm.
        """
        return _optim.Dlib_getAlgorithmName(self)

    def setAlgorithmName(self, algoName):
        r"""
        Accessor to the algorithm name.

        Parameters
        ----------
        algoName : str
            The identifier of the algorithm.
        """
        return _optim.Dlib_setAlgorithmName(self, algoName)

    def getWolfeRho(self):
        r"""
        Accessor to wolfeRho parameter. Relevant for algorithms CG, BFGS/LBFGS and
        Newton only.

        Returns
        -------
        wolfeRho : float
            The value of the wolfeRho parameter used in the optimization process.
        """
        return _optim.Dlib_getWolfeRho(self)

    def setWolfeRho(self, wolfeRho):
        r"""
        Accessor to wolfeRho parameter, sets the value to use during line search
        process.
        Relevant for algorithms CG, BFGS/LBFGS and Newton only.

        Parameters
        ----------
        wolfeRho : float
            The value of the wolfeRho parameter to use in the optimization process.
        """
        return _optim.Dlib_setWolfeRho(self, wolfeRho)

    def getWolfeSigma(self):
        r"""
        Accessor to wolfeSigma parameter. Relevant for algorithms CG, BFGS/LBFGS
        and Newton only.

        Returns
        -------
        wolfeSigma : float
            The value of the wolfeSigma parameter used in the optimization process.
        """
        return _optim.Dlib_getWolfeSigma(self)

    def setWolfeSigma(self, wolfeSigma):
        r"""
        Accessor to wolfeSigma parameter, sets the value to use during line search
        process.
        Relevant for algorithms CG, BFGS/LBFGS and Newton only.

        Parameters
        ----------
        wolfeSigma : float
            The value of the wolfeSigma parameter to use in the optimization process.
        """
        return _optim.Dlib_setWolfeSigma(self, wolfeSigma)

    def getMaxLineSearchIterations(self):
        r"""
        Accessor to maxLineSearchIterations parameter. Relevant for algorithms CG, 
        BFGS/LBFGS and Newton only.

        Returns
        -------
        maxLineSearchIterations : int
            The maximum number of line search iterations to perform at each iteration
            of the optimization process.
            Relevant for algorithms CG, BFGS/LBFGS and Newton only.
        """
        return _optim.Dlib_getMaxLineSearchIterations(self)

    def setMaxLineSearchIterations(self, maxLineSearchIterations):
        r"""
        Accessor to maxLineSearchIterations parameter, sets the value to use during
        line search process.
        Relevant for algorithms CG, BFGS/LBFGS and Newton only.

        Parameters
        ----------
        maxLineSearchIterations : int
            The value of the maxLineSearchIterations parameter to use in the
            optimization process.
        """
        return _optim.Dlib_setMaxLineSearchIterations(self, maxLineSearchIterations)

    def getMaxSize(self):
        r"""
        Accessor to maxSize parameter. Relevant for LBFGS algorithm only.

        Returns
        -------
        maxSize : int
            The maximum amount of memory used during optimization process. 10 is a
            typical value for maxSize. 
            Relevant for LBFGS algorithm only.
        """
        return _optim.Dlib_getMaxSize(self)

    def setMaxSize(self, maxSize):
        r"""
        Accessor to maxSize parameter, sets the value to use during optimization
        process.
        Relevant for LBFGS algorithm only.

        Parameters
        ----------
        maxSize : int
            The maximum amount of memory to use during optimization process. 10 is a
            typical value for maxSize. 
            Relevant for LBFGS algorithm only.
        """
        return _optim.Dlib_setMaxSize(self, maxSize)

    def getInitialTrustRegionRadius(self):
        r"""
        Accessor to initialTrustRegionRadius parameter. Relevant for trust region,
        least squares and least squares LM algorithms only.

        Returns
        -------
        initialTrustRegionRadius : float
            The radius of the initial trust region used in optimization algorithms.
        """
        return _optim.Dlib_getInitialTrustRegionRadius(self)

    def setInitialTrustRegionRadius(self, radius):
        r"""
        Accessor to initialTrustRegionRadius parameter, sets the value to use
        during optimization process.
        Relevant for trust region, least squares and least squares LM algorithms only.

        Parameters
        ----------
        initialTrustRegionRadius : float
            The radius of the initial trust region to use in the optimization process.
        """
        return _optim.Dlib_setInitialTrustRegionRadius(self, radius)

    def run(self):
        r"""
        Performs the actual optimization process. Results are stored in the :class:`~openturns.OptimizationResult`
        parameter of the :class:`~openturns.Dlib` object.
        """
        return _optim.Dlib_run(self)

    def __repr__(self):
        return _optim.Dlib___repr__(self)

    def __str__(self, *args):
        return _optim.Dlib___str__(self, *args)

    def __init__(self, *args):
        _optim.Dlib_swiginit(self, _optim.new_Dlib(*args))
    __swig_destroy__ = _optim.delete_Dlib

# Register Dlib in _optim:
_optim.Dlib_swigregister(Dlib)
class Ipopt(OptimizationAlgorithmImplementation):
    r"""
    Ipopt nonlinear optimization solver.

    `Ipopt <https://coin-or.github.io/Ipopt/>`_ is a software package for large-scale nonlinear optimization.

    Parameters
    ----------
    problem : :class:`~openturns.OptimizationProblem`, optional
        Optimization problem to solve. Default is an empty problem.

    Notes
    -----
    *Algorithms parameters:*

      Ipopt algorithms can be adapted using numerous parameters, described  `here <https://coin-or.github.io/Ipopt/OPTIONS.html>`_.
      These parameters can be modified using the :class:`~openturns.ResourceMap`.
      For every option ``optionName``, one simply add a key named ``Ipopt-optionName`` with the value to use, as shown below::

        >>> import openturns as ot
        >>> ot.ResourceMap.AddAsUnsignedInteger('Ipopt-print_level', 5)
        >>> ot.ResourceMap.AddAsScalar('Ipopt-diverging_iterates_tol', 1e15)

    *Convergence criteria:*

      To estimate the convergence of the algorithm during the optimization process, Ipopt uses specific tolerance parameters, different from the standard absolute/relative/residual errors used in OpenTURNS. The definition of Ipopt's parameters can be found in `this paper, page 3 <http://www.optimization-online.org/DB_FILE/2004/03/836.pdf>`_.

      Thus the attributes ``maximumAbsoluteError``, ``maximumRelativeError``, ``maximumResidualError`` and ``maximumConstraintError`` defined in' ``OptimizationAlgorithm`` are not used in this case. The tolerances used by Ipopt can be set using specific options (e.g. ``tol``, ``dual_inf_tol`` ...).


    See also
    --------
    Bonmin

    Examples
    --------
    The code below ensures the optimization of the following problem:

    .. math:: min \left( - x_0 - x_1 - x_2 \right)

    subject to

    .. math::

        \left(x_1 - \frac{1}{2}\right)^2 + \left(x_2 - \frac{1}{2}\right)^2 \leq \frac{1}{4}

        x_0 - x_1 \leq 0

        x_0 + x_2 + x_3 \leq 2

        x_0 \in \{0,1\}^n

        (x_1, x_2) \in \mathbb{R}^2

        x_3 \in \mathbb{N}


    >>> import openturns as ot

    >>> # Definition of objective function
    >>> objectiveFunction = ot.SymbolicFunction(['x0','x1','x2','x3'], ['-x0 -x1 -x2'])

    >>> # Definition of variables bounds
    >>> bounds = ot.Interval([0,0,0,0],[1,1e308,1e308,5],[True,True,True,True],[True,False,False,True])

    Inequality constraints are defined by a function :math:`h` such that :math:`h(x) \geq 0`. The inequality expression above has to be modified to match this formulation.

    >>> # Definition of constraints
    >>> h = ot.SymbolicFunction(['x0','x1','x2','x3'], ['-(x1-1/2)^2 - (x2-1/2)^2 + 1/4', '-x0 + x1', '-x0 - x2 - x3 + 2'])

    >>> # Setting up Ipopt problem
    >>> problem = ot.OptimizationProblem(objectiveFunction)
    >>> problem.setBounds(bounds)
    >>> problem.setInequalityConstraint(h)

    >>> algo = ot.Ipopt(problem)
    >>> algo.setStartingPoint([0,0,0,0])
    >>> algo.setMaximumCallsNumber(1000)
    >>> algo.setMaximumIterationNumber(1000)
    >>> algo.setMaximumTimeDuration(5.0)

    >>> # Running the solver
    >>> algo.run() # doctest: +SKIP

    >>> # Retrieving the results
    >>> result = algo.getResult() # doctest: +SKIP
    >>> optimalPoint = result.getOptimalPoint() # doctest: +SKIP
    >>> optimalValue = result.getOptimalValue() # doctest: +SKIP
    >>> evaluationNumber = result.getInputSample().getSize() # doctest: +SKIP 
    """

    thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")

    def getClassName(self):
        r"""
        Accessor to the object's name.

        Returns
        -------
        class_name : str
            The object class name (`object.__class__.__name__`).
        """
        return _optim.Ipopt_getClassName(self)

    def run(self):
        r"""Launch the optimization."""
        return _optim.Ipopt_run(self)

    def __str__(self, *args):
        return _optim.Ipopt___str__(self, *args)

    def __repr__(self):
        return _optim.Ipopt___repr__(self)

    def __init__(self, *args):
        _optim.Ipopt_swiginit(self, _optim.new_Ipopt(*args))
    __swig_destroy__ = _optim.delete_Ipopt

# Register Ipopt in _optim:
_optim.Ipopt_swigregister(Ipopt)
[docs] class TNC(OptimizationAlgorithmImplementation): r""" Truncated Newton Constrained solver. Truncated-Newton method Non-linear optimizer. This solver uses no derivative information and only supports bound constraints. Available constructors: TNC(*problem*) TNC(*problem, scale, offset, maxCGit, eta, stepmx, accuracy, fmin, rescale*) Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem to solve. scale : sequence of float Scaling factors to apply to each variable. offset : sequence of float Constant to subtract to each variable. maxCGit : int Maximum number of hessian*vector evaluation per main iteration. eta : float Severity of the line search. stepmx : float Maximum step for the line search, may be increased during call. accuracy : float Relative precision for finite difference calculations. fmin : float Minimum function value estimate. rescale : float f scaling factor (in log10) used to trigger f value rescaling. See also -------- AbdoRackwitz, SQP, Cobyla, NLopt Examples -------- >>> import openturns as ot >>> model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['-F*L^3/(3*E*I)']) >>> bounds = ot.Interval([1.0]*4, [2.0]*4) >>> problem = ot.OptimizationProblem(model, ot.Function(), ot.Function(), bounds) >>> algo = ot.TNC(problem) >>> algo.setStartingPoint([1.0] * 4) >>> algo.run() >>> result = algo.getResult() """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.TNC_getClassName(self) def run(self): r"""Launch the optimization.""" return _optim.TNC_run(self) def getScale(self): r""" Accessor to scale parameter. Returns ------- scale : :class:`~openturns.Point` Scaling factors to apply to each variable if empty, the factors are min-max for interval bounded variables and 1+|x] for the others. """ return _optim.TNC_getScale(self) def setScale(self, scale): r""" Accessor to scale parameter. Parameters ---------- scale : sequence of float Scaling factors to apply to each variable if empty, the factors are min-max for interval bounded variables and 1+|x] for the others. """ return _optim.TNC_setScale(self, scale) def getOffset(self): r""" Accessor to offset parameter. Returns ------- offset : :class:`~openturns.Point` Constant to subtract to each variable if empty, the constant are (min-max)/2 for interval bounded variables and x for the others. """ return _optim.TNC_getOffset(self) def setOffset(self, offset): r""" Accessor to offset parameter. Parameters ---------- offset : sequence of float Constant to subtract to each variable if empty, the constant are (min-max)/2 for interval bounded variables and x for the others. """ return _optim.TNC_setOffset(self, offset) def getMaxCGit(self): r""" Accessor to maxCGit parameter. Returns ------- maxCGit : int Maximum number of hessian*vector evaluation per main iteration if maxCGit = 0, the direction chosen is -gradient if maxCGit < 0, maxCGit is set to max(1,min(50,n/2)). """ return _optim.TNC_getMaxCGit(self) def setMaxCGit(self, maxCGit): r""" Accessor to maxCGit parameter. Parameters ---------- maxCGit : int Maximum number of hessian*vector evaluation per main iteration if maxCGit = 0, the direction chosen is -gradient if maxCGit < 0, maxCGit is set to max(1,min(50,n/2)). """ return _optim.TNC_setMaxCGit(self, maxCGit) def getEta(self): r""" Accessor to eta parameter. Returns ------- eta : float Severity of the line search. if < 0 or > 1, set to 0.25. """ return _optim.TNC_getEta(self) def setEta(self, eta): r""" Accessor to eta parameter. Parameters ---------- eta : float Severity of the line search. if < 0 or > 1, set to 0.25. """ return _optim.TNC_setEta(self, eta) def getStepmx(self): r""" Accessor to stepmx parameter. Returns ------- stepmx : float Maximum step for the line search. may be increased during call if too small, will be set to 10.0. """ return _optim.TNC_getStepmx(self) def setStepmx(self, stepmx): r""" Accessor to stepmx parameter. Parameters ---------- stepmx : float Maximum step for the line search. may be increased during call if too small, will be set to 10.0. """ return _optim.TNC_setStepmx(self, stepmx) def getAccuracy(self): r""" Accessor to accuracy parameter. Returns ------- accuracy : float Relative precision for finite difference calculations if <= machine_precision, set to sqrt(machine_precision). """ return _optim.TNC_getAccuracy(self) def setAccuracy(self, accuracy): r""" Accessor to accuracy parameter. Parameters ---------- accuracy : float Relative precision for finite difference calculations if <= machine_precision, set to sqrt(machine_precision). """ return _optim.TNC_setAccuracy(self, accuracy) def getFmin(self): r""" Accessor to fmin parameter. Returns ------- fmin : float Minimum function value estimate. """ return _optim.TNC_getFmin(self) def setFmin(self, fmin): r""" Accessor to fmin parameter. Parameters ---------- fmin : float Minimum function value estimate. """ return _optim.TNC_setFmin(self, fmin) def getRescale(self): r""" Accessor to rescale parameter. Returns ------- rescale : float f scaling factor (in log10) used to trigger f value rescaling if 0, rescale at each iteration if a big value, never rescale if < 0, rescale is set to 1.3. """ return _optim.TNC_getRescale(self) def setRescale(self, rescale): r""" Accessor to rescale parameter. Parameters ---------- rescale : float f scaling factor (in log10) used to trigger f value rescaling if 0, rescale at each iteration if a big value, never rescale if < 0, rescale is set to 1.3. """ return _optim.TNC_setRescale(self, rescale) def __repr__(self): return _optim.TNC___repr__(self) def __init__(self, *args): _optim.TNC_swiginit(self, _optim.new_TNC(*args)) __swig_destroy__ = _optim.delete_TNC
# Register TNC in _optim: _optim.TNC_swigregister(TNC) class SQP(OptimizationAlgorithmImplementation): r""" Sequential Quadratic Programming solver. This solver uses second derivative information and can only be used to solve level function problems. Available constructors: SQP(*problem*) SQP(*problem, tau, omega, smooth*) Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem to solve. tau : float Multiplicative decrease of linear step. omega : float Armijo factor. smooth : float Growing factor in penalization term. Notes ----- SQP methods solve a sequence of optimization subproblems, each of which optimizes a quadratic model of the objective subject to a linearization of the constraints. See also -------- AbdoRackwitz, Cobyla, TNC, NLopt Examples -------- >>> import openturns as ot >>> model = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'], ['x1*cos(x1)+2*x2*x3-3*x3+4*x3*x4']) >>> problem = ot.NearestPointProblem(model, -0.5) >>> algo = ot.SQP(problem) >>> algo.setStartingPoint([1.0] * 4) >>> algo.run() >>> result = algo.getResult() """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.SQP_getClassName(self) def run(self): r"""Launch the optimization.""" return _optim.SQP_run(self) def getTau(self): r""" Accessor to tau parameter. Returns ------- tau : float Multiplicative decrease of linear step. """ return _optim.SQP_getTau(self) def setTau(self, tau): r""" Accessor to tau parameter. Parameters ---------- tau : float Multiplicative decrease of linear step. """ return _optim.SQP_setTau(self, tau) def getOmega(self): r""" Accessor to omega parameter. Returns ------- omega : float Armijo factor. """ return _optim.SQP_getOmega(self) def setOmega(self, tau): r""" Accessor to omega parameter. Parameters ---------- omega : float Armijo factor. """ return _optim.SQP_setOmega(self, tau) def getSmooth(self): r""" Accessor to smooth parameter. Returns ------- smooth : float Growing factor in penalization term. """ return _optim.SQP_getSmooth(self) def setSmooth(self, tau): r""" Accessor to smooth parameter. Parameters ---------- smooth : float Growing factor in penalization term. """ return _optim.SQP_setSmooth(self, tau) def __repr__(self): return _optim.SQP___repr__(self) def __init__(self, *args): _optim.SQP_swiginit(self, _optim.new_SQP(*args)) __swig_destroy__ = _optim.delete_SQP # Register SQP in _optim: _optim.SQP_swigregister(SQP) class OptimizationResultCollection(object): r""" Collection. Examples -------- >>> import openturns as ot - Collection of **real values**: >>> ot.ScalarCollection(2) [0,0] >>> ot.ScalarCollection(2, 3.25) [3.25,3.25] >>> vector = ot.ScalarCollection([2.0, 1.5, 2.6]) >>> vector [2,1.5,2.6] >>> vector[1] = 4.2 >>> vector [2,4.2,2.6] >>> vector.add(3.8) >>> vector [2,4.2,2.6,3.8] - Collection of **complex values**: >>> ot.ComplexCollection(2) [(0,0),(0,0)] >>> ot.ComplexCollection(2, 3+4j) [(3,4),(3,4)] >>> vector = ot.ComplexCollection([2+3j, 1-4j, 3.0]) >>> vector [(2,3),(1,-4),(3,0)] >>> vector[1] = 4+3j >>> vector [(2,3),(4,3),(3,0)] >>> vector.add(5+1j) >>> vector [(2,3),(4,3),(3,0),(5,1)] - Collection of **booleans**: >>> ot.BoolCollection(3) [0,0,0] >>> ot.BoolCollection(3, 1) [1,1,1] >>> vector = ot.BoolCollection([0, 1, 0]) >>> vector [0,1,0] >>> vector[1] = 0 >>> vector [0,0,0] >>> vector.add(1) >>> vector [0,0,0,1] - Collection of **distributions**: >>> print(ot.DistributionCollection(2)) [Uniform(a = -1, b = 1),Uniform(a = -1, b = 1)] >>> print(ot.DistributionCollection(2, ot.Gamma(2.75, 1.0))) [Gamma(k = 2.75, lambda = 1, gamma = 0),Gamma(k = 2.75, lambda = 1, gamma = 0)] >>> vector = ot.DistributionCollection([ot.Normal(), ot.Uniform()]) >>> print(vector) [Normal(mu = 0, sigma = 1),Uniform(a = -1, b = 1)] >>> vector[1] = ot.Uniform(-0.5, 1) >>> print(vector) [Normal(mu = 0, sigma = 1),Uniform(a = -0.5, b = 1)] >>> vector.add(ot.Gamma(2.75, 1.0)) >>> print(vector) [Normal(mu = 0, sigma = 1),Uniform(a = -0.5, b = 1),Gamma(k = 2.75, lambda = 1, gamma = 0)] """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __swig_destroy__ = _optim.delete_OptimizationResultCollection def clear(self): r""" Reset the collection to zero dimension. Examples -------- >>> import openturns as ot >>> x = ot.Point(2) >>> x.clear() >>> x class=Point name=Unnamed dimension=0 values=[] """ return _optim.OptimizationResultCollection_clear(self) def __len__(self): return _optim.OptimizationResultCollection___len__(self) def __eq__(self, rhs): return _optim.OptimizationResultCollection___eq__(self, rhs) def __contains__(self, val): return _optim.OptimizationResultCollection___contains__(self, val) def select(self, marginalIndices): r""" Selection from indices. Parameters ---------- indices : sequence of int Indices to select Returns ------- coll : sequence Sub-collection of values at the selection indices. """ return _optim.OptimizationResultCollection_select(self, marginalIndices) def __getitem__(self, i): return _optim.OptimizationResultCollection___getitem__(self, i) def __setitem__(self, i, val): return _optim.OptimizationResultCollection___setitem__(self, i, val) def __delitem__(self, i): return _optim.OptimizationResultCollection___delitem__(self, i) def at(self, *args): r""" Access to an element of the collection. Parameters ---------- index : positive int Position of the element to access. Returns ------- element : type depends on the type of the collection Element of the collection at the position *index*. """ return _optim.OptimizationResultCollection_at(self, *args) def add(self, *args): r""" Append a component (in-place). Parameters ---------- value : type depends on the type of the collection. The component to append. Examples -------- >>> import openturns as ot >>> x = ot.Point(2) >>> x.add(1.) >>> print(x) [0,0,1] """ return _optim.OptimizationResultCollection_add(self, *args) def getSize(self): r""" Get the collection's dimension (or size). Returns ------- n : int The number of components in the collection. """ return _optim.OptimizationResultCollection_getSize(self) def resize(self, newSize): r""" Change the size of the collection. Parameters ---------- newSize : positive int New size of the collection. Notes ----- If the new size is smaller than the older one, the last elements are thrown away, else the new elements are set to the default value of the element type. Examples -------- >>> import openturns as ot >>> x = ot.Point(2, 4) >>> print(x) [4,4] >>> x.resize(1) >>> print(x) [4] >>> x.resize(4) >>> print(x) [4,0,0,0] """ return _optim.OptimizationResultCollection_resize(self, newSize) def isEmpty(self): r""" Tell if the collection is empty. Returns ------- isEmpty : bool *True* if there is no element in the collection. Examples -------- >>> import openturns as ot >>> x = ot.Point(2) >>> x.isEmpty() False >>> x.clear() >>> x.isEmpty() True """ return _optim.OptimizationResultCollection_isEmpty(self) def find(self, val): r""" Find the index of a given value. Parameters ---------- val : collection value type The value to find Returns ------- index : int The index of the first occurrence of the value, or the size of the container if not found. When several values match, only the first index is returned. """ return _optim.OptimizationResultCollection_find(self, val) def __repr__(self): return _optim.OptimizationResultCollection___repr__(self) def __str__(self, *args): return _optim.OptimizationResultCollection___str__(self, *args) def __init__(self, *args): _optim.OptimizationResultCollection_swiginit(self, _optim.new_OptimizationResultCollection(*args)) # Register OptimizationResultCollection in _optim: _optim.OptimizationResultCollection_swigregister(OptimizationResultCollection) class MultiStart(OptimizationAlgorithmImplementation): r""" Multi-start optimization algorithm. The algorithm runs an optimization solver for multiple starting points and returns the best result of each local search. The algorithm succeeds when at least one local search succeeds. Parameters ---------- solver : :class:`~openturns.OptimizationAlgorithm` The internal solver startingSample : 2-d sequence of float Starting points set Notes ----- The starting point of the internal solver is ignored. If you want to use it, add it to *startingSample*. Stopping criteria used are the ones set in the internal solver. A global number of evaluations can be set, in that case all starting points might not be used depending on the number of evaluations allocated to the internal solver. Starting points provided through the *startingSample* parameter should be within the bounds of the :class:`~openturns.OptimizationProblem`, but this is not enforced. Examples -------- First define the :class:`~openturns.OptimizationAlgorithm` to be run from multiple starting points. >>> import openturns as ot >>> dim = 2 >>> model = ot.SymbolicFunction(['x', 'y'], ['x^2+y^2*(1-x)^3']) >>> bounds = ot.Interval([-2.0] * dim, [3.0] * dim) >>> problem = ot.OptimizationProblem(model) >>> problem.setBounds(bounds) >>> solver = ot.TNC(problem) Starting points must be manually specified. >>> uniform = ot.JointDistribution([ot.Uniform(-2.0, 3.0)] * dim) >>> ot.RandomGenerator.SetSeed(0) >>> startingSample = uniform.getSample(5) >>> print(startingSample) [ X0 X1 ] 0 : [ 1.14938 2.84712 ] 1 : [ 2.41403 2.6034 ] 2 : [ -1.32362 0.515201 ] 3 : [ -1.83749 -1.68397 ] 4 : [ -0.264715 -0.536216 ] >>> algo = ot.MultiStart(solver, startingSample) >>> algo.run() >>> result = algo.getResult() >>> print(result.getOptimalPoint()) [3,3] """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.MultiStart_getClassName(self) def __repr__(self): return _optim.MultiStart___repr__(self) def run(self): r"""Launch the optimization.""" return _optim.MultiStart_run(self) def setProblem(self, problem): r""" Sets the optimization problem. Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem. """ return _optim.MultiStart_setProblem(self, problem) def setOptimizationAlgorithm(self, solver): r""" Internal solver accessor. Parameters ---------- solver : :class:`~openturns.OptimizationAlgorithm` The internal solver """ return _optim.MultiStart_setOptimizationAlgorithm(self, solver) def getOptimizationAlgorithm(self): r""" Internal solver accessor. Returns ------- solver : :class:`~openturns.OptimizationAlgorithm` The internal solver """ return _optim.MultiStart_getOptimizationAlgorithm(self) def setStartingPoint(self, point): r""" Inherited but raises an Exception. Notes ----- This method is inherited from :class:`OptimizationAlgorithm` but makes no sense in a multi-start context. """ return _optim.MultiStart_setStartingPoint(self, point) def setStartingSample(self, startingSample): r""" Accessor to the sample of starting points. Parameters ---------- startingSample : 2-d sequence of float A new sample of starting points to overwrite the existing sample """ return _optim.MultiStart_setStartingSample(self, startingSample) def getStartingSample(self): r"""Accessor to the sample of starting points.""" return _optim.MultiStart_getStartingSample(self) def getStartingPoint(self): r""" Inherited but raises an Exception. Notes ----- This method is inherited from :class:`OptimizationAlgorithm` but makes no sense in a multi-start context. """ return _optim.MultiStart_getStartingPoint(self) def getKeepResults(self): r""" Flag to keep intermediate results accessor. Returns ------- keepResults : bool If *True* all the intermediate results are stored, otherwise they are ignored. Default value is *MultiStart-KeepResults* in :class:`~openturns.ResourceMap` """ return _optim.MultiStart_getKeepResults(self) def setKeepResults(self, keepResults): r""" Flag to keep intermediate results accessor. Parameters ---------- keepResults : bool If *True* all the intermediate results are stored, otherwise they are ignored. Default value is *MultiStart-KeepResults* in :class:`~openturns.ResourceMap` """ return _optim.MultiStart_setKeepResults(self, keepResults) def getResultCollection(self): r""" Intermediate optimization results accessor. Returns ------- results : :class:`~openturns.OptimizationResultCollection` Intermediate optimization results """ return _optim.MultiStart_getResultCollection(self) def __init__(self, *args): _optim.MultiStart_swiginit(self, _optim.new_MultiStart(*args)) __swig_destroy__ = _optim.delete_MultiStart # Register MultiStart in _optim: _optim.MultiStart_swigregister(MultiStart) class NLopt(OptimizationAlgorithmImplementation): r""" Interface to NLopt. This class exposes the solvers from the non-linear optimization library [nlopt2009]_. More details about available algorithms are available `here <http://ab-initio.mit.edu/wiki/index.php/NLopt_Algorithms>`_. Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem to solve. algoName : str The NLopt identifier of the algorithm. Use :func:`GetAlgorithmNames()` to list available names. Notes ----- Here are some properties of the different algorithms: +----------------------------+-------------------+-----------------------------+ | Algorithm | Derivative info | Constraint support | +============================+===================+=============================+ | AUGLAG | no derivative | all | +----------------------------+-------------------+-----------------------------+ | AUGLAG_EQ | no derivative | all | +----------------------------+-------------------+-----------------------------+ | GD_MLSL | first derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GD_MLSL_LDS | first derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GD_STOGO (disabled) | first derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GD_STOGO_RAND (disabled) | first derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_AGS (disabled) | no derivative | bounds required, inequality | +----------------------------+-------------------+-----------------------------+ | GN_CRS2_LM | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_DIRECT | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_DIRECT_L | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_DIRECT_L_NOSCAL | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_DIRECT_L_RAND | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_DIRECT_L_RAND_NOSCAL | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_ESCH | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_ISRES | no derivative | bounds required, all | +----------------------------+-------------------+-----------------------------+ | GN_MLSL | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_MLSL_LDS | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | GN_ORIG_DIRECT | no derivative | bounds required, inequality | +----------------------------+-------------------+-----------------------------+ | GN_ORIG_DIRECT_L | no derivative | bounds required, inequality | +----------------------------+-------------------+-----------------------------+ | G_MLSL | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | G_MLSL_LDS | no derivative | bounds required | +----------------------------+-------------------+-----------------------------+ | LD_AUGLAG | first derivative | all | +----------------------------+-------------------+-----------------------------+ | LD_AUGLAG_EQ | first derivative | all | +----------------------------+-------------------+-----------------------------+ | LD_CCSAQ | first derivative | bounds, inequality | +----------------------------+-------------------+-----------------------------+ | LD_LBFGS | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_MMA | first derivative | bounds, inequality | +----------------------------+-------------------+-----------------------------+ | LD_SLSQP | first derivative | all | +----------------------------+-------------------+-----------------------------+ | LD_TNEWTON | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_TNEWTON_PRECOND | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_TNEWTON_PRECOND_RESTART | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_TNEWTON_RESTART | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_VAR1 | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LD_VAR2 | first derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LN_AUGLAG | no derivative | all | +----------------------------+-------------------+-----------------------------+ | LN_AUGLAG_EQ | no derivative | all | +----------------------------+-------------------+-----------------------------+ | LN_BOBYQA | no derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LN_COBYLA | no derivative | all | +----------------------------+-------------------+-----------------------------+ | LN_NELDERMEAD | no derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LN_NEWUOA | no derivative | none | +----------------------------+-------------------+-----------------------------+ | LN_NEWUOA_BOUND (disabled) | no derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LN_PRAXIS (disabled) | no derivative | bounds | +----------------------------+-------------------+-----------------------------+ | LN_SBPLX | no derivative | bounds | +----------------------------+-------------------+-----------------------------+ Availability of algorithms marked as optional may vary depending on the NLopt version or compilation options used. See also -------- AbdoRackwitz, Cobyla, SQP, TNC Examples -------- >>> import openturns as ot >>> dim = 4 >>> bounds = ot.Interval([-3.0] * dim, [5.0] * dim) >>> linear = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'], ['x1+2*x2-3*x3+4*x4']) >>> problem = ot.OptimizationProblem(linear, ot.Function(), ot.Function(), bounds) >>> print(ot.NLopt.GetAlgorithmNames()) # doctest: +SKIP [AUGLAG,AUGLAG_EQ,GD_MLSL,GD_MLSL_LDS,... >>> algo = ot.NLopt(problem, 'LD_MMA') # doctest: +SKIP >>> algo.setStartingPoint([0.0] * 4) # doctest: +SKIP >>> algo.run() # doctest: +SKIP >>> result = algo.getResult() # doctest: +SKIP >>> x_star = result.getOptimalPoint() # doctest: +SKIP >>> y_star = result.getOptimalValue() # doctest: +SKIP """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.NLopt_getClassName(self) def run(self): r"""Launch the optimization.""" return _optim.NLopt_run(self) @staticmethod def GetAlgorithmNames(): r""" Accessor to the list of algorithms provided by NLopt, by names. Returns ------- names : :class:`~openturns.Description` List of algorithm names provided by NLopt, according to its naming convention. Examples -------- >>> import openturns as ot >>> print(ot.NLopt.GetAlgorithmNames()) # doctest: +SKIP [AUGLAG,AUGLAG_EQ,GD_MLSL,... """ return _optim.NLopt_GetAlgorithmNames() def setAlgorithmName(self, algoName): r""" Accessor to the algorithm name. Parameters ---------- algoName : str The NLopt identifier of the algorithm. """ return _optim.NLopt_setAlgorithmName(self, algoName) def getAlgorithmName(self): r""" Accessor to the algorithm name. Returns ------- algoName : str The NLopt identifier of the algorithm. """ return _optim.NLopt_getAlgorithmName(self) def setInitialStep(self, initialStep): r""" Initial local derivative-free algorithms step accessor. Parameters ---------- dx : sequence of float The initial step. """ return _optim.NLopt_setInitialStep(self, initialStep) def getInitialStep(self): r""" Initial local derivative-free algorithms step accessor. Returns ------- dx : :class:`~openturns.Point` The initial step. """ return _optim.NLopt_getInitialStep(self) def setLocalSolver(self, localSolver): r""" Local solver accessor. Parameters ---------- solver : :class:`~openturns.NLopt` The local solver. """ return _optim.NLopt_setLocalSolver(self, localSolver) def getLocalSolver(self): r""" Local solver accessor. Returns ------- solver : :class:`~openturns.NLopt` The local solver. """ return _optim.NLopt_getLocalSolver(self) def setSeed(self, seed): r""" Random generator seed accessor. Parameters ---------- seed : int The RNG seed. Notes ----- The default is set by the `NLopt-InitialSeed` ResourceMap entry. """ return _optim.NLopt_setSeed(self, seed) def getSeed(self): r""" Random generator seed accessor. Returns ------- seed : int Seed. """ return _optim.NLopt_getSeed(self) def __repr__(self): return _optim.NLopt___repr__(self) def __str__(self, *args): return _optim.NLopt___str__(self, *args) def __init__(self, *args): _optim.NLopt_swiginit(self, _optim.new_NLopt(*args)) __swig_destroy__ = _optim.delete_NLopt # Register NLopt in _optim: _optim.NLopt_swigregister(NLopt) class Pagmo(OptimizationAlgorithmImplementation): r""" Pagmo algorithms. This class exposes bio-inspired and evolutionary global optimization algorithms from the `Pagmo <https://esa.github.io/pagmo2/>`_ library. These algorithms start from an initial population and make it evolve to obtain a final population after a defined number of generations (by :meth:`setMaximumIterationNumber`). A few of these algorithms allow for multi-objective optimization, and in that case the result is not the best point among the final population but a set of dominant points: a pareto front. Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem to solve algoName : str, default='gaco' Identifier of the optimization method to use. startingSample : 2-d sequence of float, optional Initial population Notes ----- The total number of evaluations is the size of the initial population multiplied by the iteration number plus one. Starting points provided through the *startingSample* parameter should be within the bounds of the :class:`~openturns.OptimizationProblem`, but this is not enforced. Pagmo provides the following global `heuristics <https://esa.github.io/pagmo2/overview.html#list-of-algorithms>`_: +---------------------+-------------------------------------------------+-----------------+-------+-------+ | Algorithm | Description | Multi-objective | MINLP | Batch | +=====================+=================================================+=================+=======+=======+ | gaco | Extended Ant Colony Optimization | no | yes | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | de | Differential Evolution | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | sade | Self-adaptive DE (jDE and iDE) | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | de1220 | Self-adaptive DE (de_1220 aka pDE) | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | gwo | Grey wolf optimizer | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | ihs | Improved Harmony Search | no | yes | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | pso | Particle Swarm Optimization | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | pso_gen | Particle Swarm Optimization Generational | no | no | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | sea | (N+1)-ES Simple Evolutionary Algorithm | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | sga | Simple Genetic Algorithm | no | yes | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | simulated_annealing | Corana's Simulated Annealing | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | bee_colony | Artificial Bee Colony | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | cmaes | Covariance Matrix Adaptation Evo. Strategy | no | no | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | xnes | Exponential Evolution Strategies | no | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | nsga2 | Non-dominated Sorting GA | yes | yes | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | moead | Multi-objective EA with Decomposition | yes | no | no | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | moead_gen | Multi-objective EA with Decomposition Gen. | yes | no | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | mhaco | Multi-objective Hypervolume-based ACO | yes | yes | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ | nspso | Non-dominated Sorting PSO | yes | no | yes | +---------------------+-------------------------------------------------+-----------------+-------+-------+ Only gaco and ihs natively support constraints, but for the other algorithms constraints are emulated through penalization. For mhaco, the initial population must satisfy constraints, else it is built by boostrap on valid points with the same population size as the one provided. Some algorithms support batch evaluation, see :meth:`setBlockSize`. Default parameters are available in the :class:`~openturns.ResourceMap` for each algorithm, refer to the correspondings keys in the Pagmo `documentation <https://esa.github.io/pagmo2/>`_. Examples -------- Define an optimization problem to find the minimum of the Rosenbrock function: >>> import openturns as ot >>> dim = 2 >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2']) >>> bounds = ot.Interval([-5.0] * dim, [5.0] * dim) >>> problem = ot.OptimizationProblem(rosenbrock) >>> problem.setBounds(bounds) Sample the initial population inside a box: >>> uniform = ot.JointDistribution([ot.Uniform(-2.0, 2.0)] * dim) >>> ot.RandomGenerator.SetSeed(0) >>> init_pop = uniform.getSample(5) Run GACO on our problem: >>> algo = ot.Pagmo(problem, 'gaco', init_pop) # doctest: +SKIP >>> algo.setMaximumIterationNumber(5) # doctest: +SKIP >>> algo.run() # doctest: +SKIP >>> result = algo.getResult() # doctest: +SKIP >>> x_star = result.getOptimalPoint() # doctest: +SKIP >>> y_star = result.getOptimalValue() # doctest: +SKIP Get the final population: >>> final_pop_x = result.getFinalPoints() # doctest: +SKIP >>> final_pop_y = result.getFinalValues() # doctest: +SKIP Define a multi-objective problem: >>> dim = 2 >>> model = ot.SymbolicFunction(['x', 'y'], ['x^2+y^2*(1-x)^3', '-x^2']) >>> bounds = ot.Interval([-2.0] * dim, [3.0] * dim) >>> problem = ot.OptimizationProblem(model) >>> problem.setBounds(bounds) Sample the initial population inside a box: >>> uniform = ot.JointDistribution([ot.Uniform(-2.0, 3.0)] * dim) >>> ot.RandomGenerator.SetSeed(0) >>> init_pop = uniform.getSample(5) Run NSGA2 on our problem: >>> algo = ot.Pagmo(problem, 'nsga2', init_pop) # doctest: +SKIP >>> algo.setMaximumIterationNumber(5) # doctest: +SKIP >>> algo.run() # doctest: +SKIP >>> result = algo.getResult() # doctest: +SKIP >>> final_pop_x = result.getFinalPoints() # doctest: +SKIP >>> final_pop_y = result.getFinalValues() # doctest: +SKIP Get the best front points and values: >>> front0 = result.getParetoFrontsIndices()[0] # doctest: +SKIP >>> front0_x = final_pop_x.select(front0) # doctest: +SKIP >>> front0_y = final_pop_y.select(front0) # doctest: +SKIP """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.Pagmo_getClassName(self) def __repr__(self): return _optim.Pagmo___repr__(self) def run(self): r"""Launch the optimization.""" return _optim.Pagmo_run(self) @staticmethod def GetAlgorithmNames(): r""" Accessor to the list of algorithm names provided. Returns ------- names : :class:`~openturns.Description` List of algorithm names provided, according to its naming convention. """ return _optim.Pagmo_GetAlgorithmNames() def setAlgorithmName(self, algoName): r""" Accessor to the algorithm name. Parameters ---------- algoName : str The identifier of the algorithm. """ return _optim.Pagmo_setAlgorithmName(self, algoName) def getAlgorithmName(self): r""" Accessor to the algorithm name. Returns ------- algoName : str The identifier of the algorithm. """ return _optim.Pagmo_getAlgorithmName(self) def setSeed(self, seed): r""" Random generator seed accessor. Parameters ---------- seed : int Seed. Notes ----- The default is set by the `Pagmo-InitialSeed` ResourceMap entry. """ return _optim.Pagmo_setSeed(self, seed) def getSeed(self): r""" Random generator seed accessor. Returns ------- seed : int Seed. """ return _optim.Pagmo_getSeed(self) def setBlockSize(self, blockSize): r""" Block size accessor. Parameters ---------- blockSize : int Batch evaluation granularity. """ return _optim.Pagmo_setBlockSize(self, blockSize) def getBlockSize(self): r""" Block size accessor. Returns ------- blockSize : int Batch evaluation granularity. """ return _optim.Pagmo_getBlockSize(self) def setStartingPoint(self, point): r""" Accessor to starting point. Parameters ---------- startingPoint : :class:`~openturns.Point` Starting point. """ return _optim.Pagmo_setStartingPoint(self, point) def getStartingPoint(self): r""" Accessor to starting point. Returns ------- startingPoint : :class:`~openturns.Point` Starting point. """ return _optim.Pagmo_getStartingPoint(self) def setStartingSample(self, startingSample): r""" Accessor to the sample of starting points. Parameters ---------- startingSample : 2-d sequence of float The initial population. """ return _optim.Pagmo_setStartingSample(self, startingSample) def getStartingSample(self): r""" Accessor to the sample of starting points. Returns ------- startingSample : :class:`~openturns.Sample` The initial population. """ return _optim.Pagmo_getStartingSample(self) def __init__(self, *args): _optim.Pagmo_swiginit(self, _optim.new_Pagmo(*args)) __swig_destroy__ = _optim.delete_Pagmo # Register Pagmo in _optim: _optim.Pagmo_swigregister(Pagmo) class LevelSet(openturns.typ.DomainImplementation): r""" Level set. Parameters ---------- function : :class:`~openturns.Function` A function such that: :math:`f: \Rset^{dim} \mapsto \Rset` defining the LevelSet. operator : :class:`~openturns.ComparisonOperator`, optional Comparison operator against the level. The default value is :class:`~openturns.LessOrEqual`. level : float, optional Level :math:`s` defining the LevelSet. The default value is 0.0. Notes ----- A LevelSet is a :class:`~openturns.Domain` defined as follows: .. math:: \{ \vect{x} \in \Rset^{dim} \, | \, f(\vect{x}) \leq s \} Examples -------- >>> import openturns as ot >>> function = ot.SymbolicFunction(['x1', 'x2'], ['x1^4 + x2^4']) >>> s = 1.0 >>> op = ot.LessOrEqual() >>> levelSet = ot.LevelSet(function, op, s) """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.LevelSet_getClassName(self) def contains(self, *args): r""" Check if the given point is inside of the domain. Parameters ---------- point or sample : sequence of float or 2-d sequence of float Point or Sample with the same dimension as the current domain's dimension. Returns ------- isInside : bool or sequence of bool Flag telling whether the given point is inside of the domain. """ return _optim.LevelSet_contains(self, *args) def intersect(self, other): r""" Return the intersection with another LevelSet. Parameters ---------- other : :class:`~openturns.LevelSet` A LevelSet defined by :math:`(f_2, s_2)`. Returns ------- levelSet : :class:`~openturns.LevelSet` The intersection between this LevelSet and the LevelSet *other*, i.e. the LevelSet defined as: :math:`\{\vect{x} \in \Rset^{dim} | f(\vect{x}) \leq s \, \mbox{and} \, f_2(\vect{x}) \leq s_2\}`. Examples -------- >>> import openturns as ot >>> # First level set >>> function = ot.SymbolicFunction(['x'], ['3*x-1']) >>> levelSet1 = ot.LevelSet(function, ot.LessOrEqual(), 0.5) >>> # Second level set >>> function = ot.SymbolicFunction(['x'], ['x']) >>> levelSet2 = ot.LevelSet(function, ot.LessOrEqual(), 0.5) >>> # Intersection between levelSet1 and levelSet2 >>> intersection = levelSet1.intersect(levelSet2) >>> # Tests >>> print([1.0] in intersection) False >>> print([0.25] in intersection) True """ return _optim.LevelSet_intersect(self, other) def join(self, other): r""" Return the union with another LevelSet. Parameters ---------- other : :class:`~openturns.LevelSet` A LevelSet defined by :math:`(f_2, s_2)`. Returns ------- levelSet : :class:`~openturns.LevelSet` The union between this LevelSet and the LevelSet *other*, i.e. the LevelSet defined as: :math:`\{\vect{x} \in \Rset^{dim} | f(\vect{x}) \leq s \, \mbox{or} \, f_2(\vect{x}) \leq s_2\}`. Examples -------- >>> import openturns as ot >>> # First level set >>> function = ot.SymbolicFunction(['x'], ['3*x-1']) >>> levelSet1 = ot.LevelSet(function, ot.LessOrEqual(), 0.0) >>> # Second level set >>> function = ot.SymbolicFunction(['x'], ['x']) >>> levelSet2 = ot.LevelSet(function, ot.LessOrEqual(), 0.0) >>> # Union between levelSet1 and levelSet2 >>> union = levelSet1.join(levelSet2) >>> # Tests >>> print([0.5] in union) False >>> print([0.25] in union) True """ return _optim.LevelSet_join(self, other) def __eq__(self, *args): return _optim.LevelSet___eq__(self, *args) def getFunction(self): r""" Get the function defining the level set. Returns ------- function : :class:`~openturns.Function` A function such that: :math:`f: \Rset^{dim} \mapsto \Rset` defining the LevelSet. Examples -------- >>> import openturns as ot >>> function = ot.SymbolicFunction(['x'], ['3*x-1']) >>> levelSet = ot.LevelSet(function, ot.LessOrEqual(), 0.0) >>> print(levelSet.getFunction().getEvaluation()) [x]->[3*x-1] """ return _optim.LevelSet_getFunction(self) def setFunction(self, function): r""" Set the function defining the level set. Parameters ---------- function : :class:`~openturns.Function` A function such that: :math:`f: \Rset^{dim} \mapsto \Rset` defining the LevelSet. Examples -------- >>> import openturns as ot >>> levelSet = ot.LevelSet() >>> function = ot.SymbolicFunction(['x'], ['3*x-1']) >>> levelSet.setFunction(function) """ return _optim.LevelSet_setFunction(self, function) def getOperator(self): r""" Operator accessor. Returns ------- op : :class:`~openturns.ComparisonOperator` Comparison operator against the level. """ return _optim.LevelSet_getOperator(self) def setOperator(self, op): r""" Operator accessor. Parameters ---------- op : :class:`~openturns.ComparisonOperator` Comparison operator against the level. """ return _optim.LevelSet_setOperator(self, op) def getLevel(self): r""" Get the level defining the level set. Returns ------- level : float Level :math:`s` defining the LevelSet. Examples -------- >>> import openturns as ot >>> function = ot.SymbolicFunction(['x'], ['3*x-1']) >>> levelSet = ot.LevelSet(function, ot.LessOrEqual(), 0.0) >>> print(levelSet.getLevel()) 0.0 """ return _optim.LevelSet_getLevel(self) def setLevel(self, level): r""" Set the level defining the level set. Parameters ---------- level : float Level :math:`s` defining the LevelSet. Examples -------- >>> import openturns as ot >>> levelSet = ot.LevelSet() >>> levelSet.setLevel(3.0) """ return _optim.LevelSet_setLevel(self, level) def setLowerBound(self, bound): r""" Set the lower bound of the bounding box. Parameters ---------- bound : sequence of floats Lower bound of the bounding box of the level set. It allows one to clip the level set. """ return _optim.LevelSet_setLowerBound(self, bound) def getLowerBound(self): r""" Get the lower bound of the bounding box. Returns ------- bound : :class:`~openturns.Point` Lower bound of the bounding box of the level set. It allows one to clip the level set. """ return _optim.LevelSet_getLowerBound(self) def setUpperBound(self, bound): r""" Set the upper bound of the bounding box. Parameters ---------- bound : sequence of floats Upper bound of the bounding box of the level set. It allows one to clip the level set. """ return _optim.LevelSet_setUpperBound(self, bound) def getUpperBound(self): r""" Get the upper bound of the bounding box. Returns ------- bound : :class:`~openturns.Point` Upper bound of the bounding box of the level set. It allows one to clip the level set. """ return _optim.LevelSet_getUpperBound(self) def __repr__(self): return _optim.LevelSet___repr__(self) def __str__(self, *args): return _optim.LevelSet___str__(self, *args) def __init__(self, *args): _optim.LevelSet_swiginit(self, _optim.new_LevelSet(*args)) __swig_destroy__ = _optim.delete_LevelSet # Register LevelSet in _optim: _optim.LevelSet_swigregister(LevelSet) class LevelSetMesher(openturns.common.PersistentObject): r""" Creation of mesh from a level set. Parameters ---------- discretization : sequence of int Discretization of the level set bounding box. solver : :class:`~openturns.OptimizationAlgorithm`, optional Optimization solver used to project the vertices onto the level set. It must be able to solve nearest point problems. Default is :class:`~openturns.AbdoRackwitz`. Notes ----- The meshing algorithm is based on the :class:`~openturns.IntervalMesher` class. First, the bounding box of the level set (provided by the user or automatically computed) is meshed. Then, all the simplices with all vertices outside of the level set are rejected, while the simplices with all vertices inside of the level set are kept. The remaining simplices are adapted the following way: * The mean point of the vertices inside of the level set is computed * Each vertex outside of the level set is projected onto the level set using a linear interpolation * If the *project* flag is *True*, then the projection is refined using an optimization solver. Examples -------- Create a mesh: >>> import openturns as ot >>> mesher = ot.LevelSetMesher([5, 10]) >>> level = 1.0 >>> function = ot.SymbolicFunction(['x0', 'x1'], ['x0^2+x1^2']) >>> levelSet = ot.LevelSet(function, ot.LessOrEqual(), level) >>> mesh = mesher.build(levelSet, ot.Interval([-2.0]*2, [2.0]*2)) """ thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim.LevelSetMesher_getClassName(self) def __init__(self, *args): _optim.LevelSetMesher_swiginit(self, _optim.new_LevelSetMesher(*args)) def setDiscretization(self, discretization): r""" Accessor to the discretization. Parameters ---------- discretization : sequence of int Discretization of the bounding box of the level sets. """ return _optim.LevelSetMesher_setDiscretization(self, discretization) def getDiscretization(self): r""" Accessor to the discretization. Returns ------- discretization : :class:`~openturns.Indices` Discretization of the bounding box of the level sets. """ return _optim.LevelSetMesher_getDiscretization(self) def __repr__(self): return _optim.LevelSetMesher___repr__(self) def __str__(self, *args): return _optim.LevelSetMesher___str__(self, *args) def setOptimizationAlgorithm(self, solver): r""" Accessor to the optimization solver. Parameters ---------- solver : :class:`~openturns.OptimizationAlgorithm` The optimization solver used to project vertices onto the level set. """ return _optim.LevelSetMesher_setOptimizationAlgorithm(self, solver) def getOptimizationAlgorithm(self): r""" Accessor to the optimization solver. Returns ------- solver : :class:`~openturns.OptimizationAlgorithm` The optimization solver used to project vertices onto the level set. """ return _optim.LevelSetMesher_getOptimizationAlgorithm(self) def build(self, *args): r""" Build the mesh of level set type. Parameters ---------- levelSet : :class:`~openturns.LevelSet` The level set to be meshed, of dimension equal to the dimension of `discretization`. boundingBox : :class:`~openturns.Interval` The bounding box used to mesh the level set. project : bool Flag to tell if the vertices outside of the level set of a simplex partially included into the level set have to be projected onto the level set. Default is *True*. Returns ------- mesh : :class:`~openturns.Mesh` The mesh built. """ return _optim.LevelSetMesher_build(self, *args) __swig_destroy__ = _optim.delete_LevelSetMesher # Register LevelSetMesher in _optim: _optim.LevelSetMesher_swigregister(LevelSetMesher) class _SolverImplementationPointer(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr ptr_ = property(_optim._SolverImplementationPointer_ptr__get, _optim._SolverImplementationPointer_ptr__set) def __init__(self, *args): _optim._SolverImplementationPointer_swiginit(self, _optim.new__SolverImplementationPointer(*args)) __swig_destroy__ = _optim.delete__SolverImplementationPointer def reset(self): return _optim._SolverImplementationPointer_reset(self) def __ref__(self, *args): return _optim._SolverImplementationPointer___ref__(self, *args) def __deref__(self, *args): return _optim._SolverImplementationPointer___deref__(self, *args) def isNull(self): return _optim._SolverImplementationPointer_isNull(self) def __nonzero__(self): return _optim._SolverImplementationPointer___nonzero__(self) __bool__ = __nonzero__ def get(self): return _optim._SolverImplementationPointer_get(self) def getImplementation(self): return _optim._SolverImplementationPointer_getImplementation(self) def unique(self): return _optim._SolverImplementationPointer_unique(self) def use_count(self): return _optim._SolverImplementationPointer_use_count(self) def swap(self, other): return _optim._SolverImplementationPointer_swap(self, other) # Register _SolverImplementationPointer in _optim: _optim._SolverImplementationPointer_swigregister(_SolverImplementationPointer) class _OptimizationProblemImplementationPointer(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") ptr_ = property(_optim._OptimizationProblemImplementationPointer_ptr__get, _optim._OptimizationProblemImplementationPointer_ptr__set) def __init__(self, *args): _optim._OptimizationProblemImplementationPointer_swiginit(self, _optim.new__OptimizationProblemImplementationPointer(*args)) __swig_destroy__ = _optim.delete__OptimizationProblemImplementationPointer def reset(self): return _optim._OptimizationProblemImplementationPointer_reset(self) def __ref__(self, *args): return _optim._OptimizationProblemImplementationPointer___ref__(self, *args) def __deref__(self, *args): return _optim._OptimizationProblemImplementationPointer___deref__(self, *args) def isNull(self): return _optim._OptimizationProblemImplementationPointer_isNull(self) def __nonzero__(self): return _optim._OptimizationProblemImplementationPointer___nonzero__(self) __bool__ = __nonzero__ def get(self): return _optim._OptimizationProblemImplementationPointer_get(self) def getImplementation(self): return _optim._OptimizationProblemImplementationPointer_getImplementation(self) def unique(self): return _optim._OptimizationProblemImplementationPointer_unique(self) def use_count(self): return _optim._OptimizationProblemImplementationPointer_use_count(self) def swap(self, other): return _optim._OptimizationProblemImplementationPointer_swap(self, other) def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim._OptimizationProblemImplementationPointer_getClassName(self) def getObjective(self): r""" Accessor to objective function. Returns ------- objective : :class:`~openturns.Function` Objective function. """ return _optim._OptimizationProblemImplementationPointer_getObjective(self) def setObjective(self, objective): r""" Accessor to objective function. Parameters ---------- objectiveFunction : :class:`~openturns.Function` Objective function. Notes ----- Constraints and bounds are cleared if the objective has a different input dimension in order to keep the problem valid at all time. """ return _optim._OptimizationProblemImplementationPointer_setObjective(self, objective) def hasMultipleObjective(self): r""" Test whether objective function is a scalar or vector function. Returns ------- value : bool *False* if objective function is scalar, *True* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasMultipleObjective(self) def getEqualityConstraint(self): r""" Accessor to equality constraints. Returns ------- equality : :class:`~openturns.Function` Describe equality constraints. """ return _optim._OptimizationProblemImplementationPointer_getEqualityConstraint(self) def setEqualityConstraint(self, equalityConstraint): r""" Accessor to equality constraints. Parameters ---------- equalityConstraint : :class:`~openturns.Function` Equality constraints. """ return _optim._OptimizationProblemImplementationPointer_setEqualityConstraint(self, equalityConstraint) def hasEqualityConstraint(self): r""" Test whether equality constraints had been specified. Returns ------- value : bool *True* if equality constraints had been set for this problem, *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasEqualityConstraint(self) def getInequalityConstraint(self): r""" Accessor to inequality constraints. Returns ------- inequality : :class:`~openturns.Function` Describe inequality constraints. """ return _optim._OptimizationProblemImplementationPointer_getInequalityConstraint(self) def setInequalityConstraint(self, inequalityConstraint): r""" Accessor to inequality constraints. Parameters ---------- inequalityConstraint : :class:`~openturns.Function` Inequality constraints. """ return _optim._OptimizationProblemImplementationPointer_setInequalityConstraint(self, inequalityConstraint) def hasInequalityConstraint(self): r""" Test whether inequality constraints had been specified. Returns ------- value : bool *True* if inequality constraints had been set for this problem, *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasInequalityConstraint(self) def getBounds(self): r""" Accessor to bounds. Returns ------- bounds : :class:`~openturns.Interval` Problem bounds. """ return _optim._OptimizationProblemImplementationPointer_getBounds(self) def setBounds(self, bounds): r""" Accessor to bounds. Parameters ---------- bounds : :class:`~openturns.Interval` Problem bounds. """ return _optim._OptimizationProblemImplementationPointer_setBounds(self, bounds) def hasBounds(self): r""" Test whether bounds had been specified. Returns ------- value : bool *True* if bounds had been set for this problem, *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasBounds(self) def getLevelFunction(self): r""" Accessor to level function. Returns ------- level : :class:`~openturns.Function` Level function. """ return _optim._OptimizationProblemImplementationPointer_getLevelFunction(self) def setLevelFunction(self, levelFunction): r""" Accessor to level function. Parameters ---------- levelFunction : :class:`~openturns.Function` Level function. """ return _optim._OptimizationProblemImplementationPointer_setLevelFunction(self, levelFunction) def hasLevelFunction(self): r""" Test whether level function had been specified. Returns ------- value : bool *True* if level function had been set for this problem, *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasLevelFunction(self) def getLevelValue(self): r""" Accessor to level value. Returns ------- value : float Level value. """ return _optim._OptimizationProblemImplementationPointer_getLevelValue(self) def setLevelValue(self, levelValue): r""" Accessor to level value. Parameters ---------- levelValue : float Level value. """ return _optim._OptimizationProblemImplementationPointer_setLevelValue(self, levelValue) def getResidualFunction(self): r""" Accessor to level function. Returns ------- level : :class:`~openturns.Function` Level function. """ return _optim._OptimizationProblemImplementationPointer_getResidualFunction(self) def setResidualFunction(self, residualFunction): r""" Accessor to level function. Parameters ---------- levelFunction : :class:`~openturns.Function` Level function. """ return _optim._OptimizationProblemImplementationPointer_setResidualFunction(self, residualFunction) def hasResidualFunction(self): r""" Test whether a least-square problem is defined. Returns ------- value : bool *True* if this is a least-squares problem, *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_hasResidualFunction(self) def getDimension(self): r""" Accessor to input dimension. Returns ------- dimension : int Input dimension of objective function. """ return _optim._OptimizationProblemImplementationPointer_getDimension(self) def setMinimization(self, minimization, marginalIndex=0): r""" Tell whether this is a minimization or maximization problem. Parameters ---------- minimization : bool *True* if this is a minimization problem, *False* otherwise. marginal_index : int, default=0 Index of the output marginal (for multi-objective only) """ return _optim._OptimizationProblemImplementationPointer_setMinimization(self, minimization, marginalIndex) def isMinimization(self, marginalIndex=0): r""" Test whether this is a minimization or maximization problem. Parameters ---------- marginal_index : int, default=0 Index of the output marginal (for multi-objective only) Returns ------- value : bool *True* if this is a minimization problem (default), *False* otherwise. """ return _optim._OptimizationProblemImplementationPointer_isMinimization(self, marginalIndex) def setVariablesType(self, variableType): r""" Accessor to the variables type. Parameters ---------- variablesType : :class:`~openturns.Indices` Types of the variables. Notes ----- Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and `ot.OptimizationProblemImplementation.BINARY`. """ return _optim._OptimizationProblemImplementationPointer_setVariablesType(self, variableType) def getVariablesType(self): r""" Accessor to the variables type. Returns ------- variablesType : :class:`~openturns.Indices` Types of the variables. Notes ----- Possible values for each variable are `ot.OptimizationProblemImplementation.CONTINUOUS`, `ot.OptimizationProblemImplementation.INTEGER` and ot.OptimizationProblemImplementation.`BINARY`. """ return _optim._OptimizationProblemImplementationPointer_getVariablesType(self) def isContinuous(self): r""" Check if the problem is continuous. Returns ------- isContinuous : bool Returns True if all variables are continuous. """ return _optim._OptimizationProblemImplementationPointer_isContinuous(self) def __repr__(self): return _optim._OptimizationProblemImplementationPointer___repr__(self) def __eq__(self, other): return _optim._OptimizationProblemImplementationPointer___eq__(self, other) def __ne__(self, other): return _optim._OptimizationProblemImplementationPointer___ne__(self, other) def __str__(self, *args): return _optim._OptimizationProblemImplementationPointer___str__(self, *args) def __repr_markdown__(self): return _optim._OptimizationProblemImplementationPointer___repr_markdown__(self) def _repr_html_(self): return _optim._OptimizationProblemImplementationPointer__repr_html_(self) def hasName(self): r""" Test if the object is named. Returns ------- hasName : bool True if the name is not empty. """ return _optim._OptimizationProblemImplementationPointer_hasName(self) def getName(self): r""" Accessor to the object's name. Returns ------- name : str The name of the object. """ return _optim._OptimizationProblemImplementationPointer_getName(self) def setName(self, name): r""" Accessor to the object's name. Parameters ---------- name : str The name of the object. """ return _optim._OptimizationProblemImplementationPointer_setName(self, name) # Register _OptimizationProblemImplementationPointer in _optim: _optim._OptimizationProblemImplementationPointer_swigregister(_OptimizationProblemImplementationPointer) class _OptimizationAlgorithmImplementationPointer(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") ptr_ = property(_optim._OptimizationAlgorithmImplementationPointer_ptr__get, _optim._OptimizationAlgorithmImplementationPointer_ptr__set) def __init__(self, *args): _optim._OptimizationAlgorithmImplementationPointer_swiginit(self, _optim.new__OptimizationAlgorithmImplementationPointer(*args)) __swig_destroy__ = _optim.delete__OptimizationAlgorithmImplementationPointer def reset(self): return _optim._OptimizationAlgorithmImplementationPointer_reset(self) def __ref__(self, *args): return _optim._OptimizationAlgorithmImplementationPointer___ref__(self, *args) def __deref__(self, *args): return _optim._OptimizationAlgorithmImplementationPointer___deref__(self, *args) def isNull(self): return _optim._OptimizationAlgorithmImplementationPointer_isNull(self) def __nonzero__(self): return _optim._OptimizationAlgorithmImplementationPointer___nonzero__(self) __bool__ = __nonzero__ def get(self): return _optim._OptimizationAlgorithmImplementationPointer_get(self) def getImplementation(self): return _optim._OptimizationAlgorithmImplementationPointer_getImplementation(self) def unique(self): return _optim._OptimizationAlgorithmImplementationPointer_unique(self) def use_count(self): return _optim._OptimizationAlgorithmImplementationPointer_use_count(self) def swap(self, other): return _optim._OptimizationAlgorithmImplementationPointer_swap(self, other) def getClassName(self): r""" Accessor to the object's name. Returns ------- class_name : str The object class name (`object.__class__.__name__`). """ return _optim._OptimizationAlgorithmImplementationPointer_getClassName(self) def run(self): r"""Launch the optimization.""" return _optim._OptimizationAlgorithmImplementationPointer_run(self) def getStartingPoint(self): r""" Accessor to starting point. Returns ------- startingPoint : :class:`~openturns.Point` Starting point. """ return _optim._OptimizationAlgorithmImplementationPointer_getStartingPoint(self) def setStartingPoint(self, startingPoint): r""" Accessor to starting point. Parameters ---------- startingPoint : :class:`~openturns.Point` Starting point. """ return _optim._OptimizationAlgorithmImplementationPointer_setStartingPoint(self, startingPoint) def getProblem(self): r""" Accessor to optimization problem. Returns ------- problem : :class:`~openturns.OptimizationProblem` Optimization problem. """ return _optim._OptimizationAlgorithmImplementationPointer_getProblem(self) def setProblem(self, problem): r""" Accessor to optimization problem. Parameters ---------- problem : :class:`~openturns.OptimizationProblem` Optimization problem. """ return _optim._OptimizationAlgorithmImplementationPointer_setProblem(self, problem) def getResult(self): r""" Accessor to optimization result. Returns ------- result : :class:`~openturns.OptimizationResult` Result class. """ return _optim._OptimizationAlgorithmImplementationPointer_getResult(self) def setResult(self, result): r""" Accessor to optimization result. Parameters ---------- result : :class:`~openturns.OptimizationResult` Result class. """ return _optim._OptimizationAlgorithmImplementationPointer_setResult(self, result) def setMaximumIterationNumber(self, maximumIterationNumber): r""" Accessor to maximum allowed number of iterations. Parameters ---------- maximumIterationNumber : int Maximum allowed number of iterations. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumIterationNumber(self, maximumIterationNumber) def getMaximumIterationNumber(self): r""" Accessor to maximum allowed number of iterations. Returns ------- maximumIterationNumber : int Maximum allowed number of iterations. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumIterationNumber(self) def setMaximumCallsNumber(self, maximumCallsNumber): r""" Accessor to maximum allowed number of calls Parameters ---------- maximumEvaluationNumber : int Maximum allowed number of direct objective function calls through the `()` operator. Does not take into account eventual indirect calls through finite difference gradient calls. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumCallsNumber(self, maximumCallsNumber) def getMaximumCallsNumber(self): r""" Accessor to maximum allowed number of calls. Returns ------- maximumEvaluationNumber : int Maximum allowed number of direct objective function calls through the `()` operator. Does not take into account eventual indirect calls through finite difference gradient calls. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumCallsNumber(self) def getMaximumAbsoluteError(self): r""" Accessor to maximum allowed absolute error. Returns ------- maximumAbsoluteError : float Maximum allowed absolute error, where the absolute error is defined by :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}` and :math:`\vect{x}_n` are two consecutive approximations of the optimum. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumAbsoluteError(self) def setMaximumAbsoluteError(self, maximumAbsoluteError): r""" Accessor to maximum allowed absolute error. Parameters ---------- maximumAbsoluteError : float Maximum allowed absolute error, where the absolute error is defined by :math:`\epsilon^a_n=\|\vect{x}_{n+1}-\vect{x}_n\|_{\infty}` where :math:`\vect{x}_{n+1}` and :math:`\vect{x}_n` are two consecutive approximations of the optimum. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumAbsoluteError(self, maximumAbsoluteError) def getMaximumRelativeError(self): r""" Accessor to maximum allowed relative error. Returns ------- maximumRelativeError : float Maximum allowed relative error, where the relative error is defined by :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}` if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumRelativeError(self) def setMaximumRelativeError(self, maximumRelativeError): r""" Accessor to maximum allowed relative error. Parameters ---------- maximumRelativeError : float Maximum allowed relative error, where the relative error is defined by :math:`\epsilon^r_n=\epsilon^a_n/\|\vect{x}_{n+1}\|_{\infty}` if :math:`\|\vect{x}_{n+1}\|_{\infty}\neq 0`, else :math:`\epsilon^r_n=-1`. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumRelativeError(self, maximumRelativeError) def getMaximumResidualError(self): r""" Accessor to maximum allowed residual error. Returns ------- maximumResidualError : float Maximum allowed residual error, where the residual error is defined by :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}` if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumResidualError(self) def setMaximumResidualError(self, maximumResidualError): r""" Accessor to maximum allowed residual error. Parameters ---------- maximumResidualError : float Maximum allowed residual error, where the residual error is defined by :math:`\epsilon^r_n=\frac{\|f(\vect{x}_{n+1})-f(\vect{x}_{n})\|}{\|f(\vect{x}_{n+1})\|}` if :math:`\|f(\vect{x}_{n+1})\|\neq 0`, else :math:`\epsilon^r_n=-1`. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumResidualError(self, maximumResidualError) def getMaximumConstraintError(self): r""" Accessor to maximum allowed constraint error. Returns ------- maximumConstraintError : float Maximum allowed constraint error, where the constraint error is defined by :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only) """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumConstraintError(self) def setMaximumConstraintError(self, maximumConstraintError): r""" Accessor to maximum allowed constraint error. Parameters ---------- maximumConstraintError : float Maximum allowed constraint error, where the constraint error is defined by :math:`\gamma_n=\|g(\vect{x}_n)\|_{\infty}` where :math:`\vect{x}_n` is the current approximation of the optimum and :math:`g` is the function that gathers all the equality and inequality constraints (violated values only) """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumConstraintError(self, maximumConstraintError) def setMaximumTimeDuration(self, maximumTime): r""" Accessor to the maximum duration. Parameters ---------- maximumTime : float Maximum optimization duration in seconds. """ return _optim._OptimizationAlgorithmImplementationPointer_setMaximumTimeDuration(self, maximumTime) def getMaximumTimeDuration(self): r""" Accessor to the maximum duration. Returns ------- maximumTime : float Maximum optimization duration in seconds. """ return _optim._OptimizationAlgorithmImplementationPointer_getMaximumTimeDuration(self) def __repr__(self): return _optim._OptimizationAlgorithmImplementationPointer___repr__(self) def setProgressCallback(self, *args): r""" Set up a progress callback. Can be used to programmatically report the progress of an optimization. Parameters ---------- callback : callable Takes a float as argument as percentage of progress. Examples -------- >>> import sys >>> import openturns as ot >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2']) >>> problem = ot.OptimizationProblem(rosenbrock) >>> solver = ot.OptimizationAlgorithm(problem) >>> solver.setStartingPoint([0, 0]) >>> solver.setMaximumResidualError(1.e-3) >>> solver.setMaximumCallsNumber(10000) >>> def report_progress(progress): ... sys.stderr.write('-- progress=' + str(progress) + '%\n') >>> solver.setProgressCallback(report_progress) >>> solver.run() """ return _optim._OptimizationAlgorithmImplementationPointer_setProgressCallback(self, *args) def setStopCallback(self, *args): r""" Set up a stop callback. Can be used to programmatically stop an optimization. Parameters ---------- callback : callable Returns an int deciding whether to stop or continue. Examples -------- >>> import openturns as ot >>> rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+100*(x2-x1^2)^2']) >>> problem = ot.OptimizationProblem(rosenbrock) >>> solver = ot.OptimizationAlgorithm(problem) >>> solver.setStartingPoint([0, 0]) >>> solver.setMaximumResidualError(1.e-3) >>> solver.setMaximumCallsNumber(10000) >>> def ask_stop(): ... return True >>> solver.setStopCallback(ask_stop) >>> solver.run() """ return _optim._OptimizationAlgorithmImplementationPointer_setStopCallback(self, *args) def setCheckStatus(self, checkStatus): r""" Accessor to check status flag. Parameters ---------- checkStatus : bool Whether to check the termination status. If set to **False**, :meth:`run` will not throw an exception if the algorithm does not fully converge and will allow one to still find a feasible candidate. """ return _optim._OptimizationAlgorithmImplementationPointer_setCheckStatus(self, checkStatus) def getCheckStatus(self): r""" Accessor to check status flag. Returns ------- checkStatus : bool Whether to check the termination status. If set to **False**, :meth:`run` will not throw an exception if the algorithm does not fully converge and will allow one to still find a feasible candidate. """ return _optim._OptimizationAlgorithmImplementationPointer_getCheckStatus(self) def __eq__(self, other): return _optim._OptimizationAlgorithmImplementationPointer___eq__(self, other) def __ne__(self, other): return _optim._OptimizationAlgorithmImplementationPointer___ne__(self, other) def __str__(self, *args): return _optim._OptimizationAlgorithmImplementationPointer___str__(self, *args) def __repr_markdown__(self): return _optim._OptimizationAlgorithmImplementationPointer___repr_markdown__(self) def _repr_html_(self): return _optim._OptimizationAlgorithmImplementationPointer__repr_html_(self) def hasName(self): r""" Test if the object is named. Returns ------- hasName : bool True if the name is not empty. """ return _optim._OptimizationAlgorithmImplementationPointer_hasName(self) def getName(self): r""" Accessor to the object's name. Returns ------- name : str The name of the object. """ return _optim._OptimizationAlgorithmImplementationPointer_getName(self) def setName(self, name): r""" Accessor to the object's name. Parameters ---------- name : str The name of the object. """ return _optim._OptimizationAlgorithmImplementationPointer_setName(self, name) # Register _OptimizationAlgorithmImplementationPointer in _optim: _optim._OptimizationAlgorithmImplementationPointer_swigregister(_OptimizationAlgorithmImplementationPointer)