How to use the casadi.MX function in casadi

To help you get started, we’ve selected a few casadi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github adbuerger / casiopeia / concept_tests / sd_check_pendulum_linear.py View on Github external
#
#     - m: representing the ball of the mass in kg
#     - L: the length of the pendulum bar in meters
#     - g: the gravity constant in m/s^2
#     - psi: the actuation angle of the manuver in radians, which stays
#            constant for this problem

m = 1.0
L = 3.0
g = 9.81
# psi = pl.pi / 2.0
psi = pl.pi / (180.0 * 2)

# System

x = ca.MX.sym("x", 2)
p = ca.MX.sym("p", 1)
u = ca.MX.sym("u", 1)

# f = ca.vertcat([x[1], p[0]/(m*(L**2))*(u-x[0]) - g/L * pl.sin(x[0])])
f = ca.vertcat(x[1], p[0]/(m*(L**2))*(u-x[0]) - g/L * x[0])

phi = x

system = cp.system.System(x = x, u = u, p = p, f = f, phi = phi)

data = pl.loadtxt('data_pendulum.txt')
time_points = data[:500, 0]
numeas = data[:500, 1]
wmeas = data[:500, 2]
N = time_points.size
ydata = pl.array([numeas,wmeas])
github helgeanl / GP-MPC / gp_mpc / gp_functions.py View on Github external
var_func  = ca.Function('var', [kss_s, ksT_invK_s, ks_s],
                            [kss_s - ca.mtimes(ksT_invK_s, ks_s)])

    for output in range(Ny):
        m = get_mean_function(hyper[output, :], inputmean, func=meanFunc)
        ell = ca.MX(hyper[output, 0:Nx])
        sf2 = ca.MX(hyper[output, Nx]**2)

        kss = covSE(inputmean, inputmean, ell, sf2)
        ks = ca.MX.zeros(N, 1)
        for i in range(N):
            ks[i] = covSE(X[i, :], inputmean, ell, sf2)

        ksT_invK = ksT_invK_func(ks, ca.MX(invK[output]))
        if alpha is not None:
            mean[output] = mean_func(ks, ca.MX(alpha[output]))
        else:
            mean[output] = mean_func(ksT_invK, Y[:, output])
        var[output] = var_func(kss, ks, ksT_invK)

    if log:
        mean = ca.exp(mean)
        var = ca.exp(var)

    covar = ca.diag(var)
    return mean, covar
github adbuerger / casiopeia / examples / lotka_volterra_multi.py View on Github external
T = pl.linspace(0, 10, 11)

yN = pl.array([[1.0, 0.9978287, 2.366363, 6.448709, 5.225859, 2.617129, \
           1.324945, 1.071534, 1.058930, 3.189685, 6.790586], \

           [1.0, 2.249977, 3.215969, 1.787353, 1.050747, 0.2150848, \
           0.109813, 1.276422, 2.493237, 3.079619, 1.665567]])

# T = T[:2]
# yN = yN[:, :2]

sigma_x1 = 0.1
sigma_x2 = 0.2

x = ca.MX.sym("x", 2)

alpha = 1.0
gamma = 1.0

p = ca.MX.sym("p", 2)

f = ca.vertcat( \
    [-alpha * x[0] + p[0] * x[0] * x[1], 
    gamma * x[1] - p[1] * x[0] * x[1]])

phi = x

system = cp.system.System(x = x, p = p, f = f, phi = phi)

# The weightings for the measurements errors given to casiopeia are calculated
# from the standard deviations of the measurements, so that the least squares
github helgeanl / GP-MPC / gp_mpc / gp_class.py View on Github external
if gp_method is 'ME':
            self.__predict = ca.Function('gp_mean', [x, u, covar_s],
                                [self.__mean(ca.vertcat(x,u)),
                                 self.__covar(ca.vertcat(x,u))])
        elif gp_method is 'TA':
            self.__predict = ca.Function('gp_taylor', [x, u, covar_s],
                                [self.__mean(ca.vertcat(x,u)),
                                 self.__TA_covar(ca.vertcat(x,u), covar_s)])
        elif gp_method is 'EM':
            self.__predict = ca.Function('gp_exact_moment', [x, u, covar_s],
                                gp_exact_moment(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s))
        elif gp_method is 'old_ME':
            self.__predict = ca.Function('gp_mean', [x, u, covar_s],
                                gp(self.__invK, ca.MX(self.__X), ca.MX(self.__Y),
                                   ca.MX(self.__hyper),
                                   ca.vertcat(x, u).T, meanFunc=self.__mean_func))
        elif gp_method is 'old_TA':
            self.__predict = ca.Function('gp_taylor_approx', [x, u, covar_s],
                                gp_taylor_approx(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s,
                                        meanFunc=self.__mean_func, diag=True))
        else:
            raise NameError('No GP method called: ' + gp_method)

        self.__discrete_jac_x = ca.Function('jac_x', [x, u, covar_s],
                                      [ca.jacobian(self.__predict(x,u, covar_s)[0], x)])
        self.__discrete_jac_u = ca.Function('jac_x', [x, u, covar_s],
                                      [ca.jacobian(self.__predict(x,u,covar_s)[0], u)])
github adbuerger / casiopeia / examples / 2d_vehicle_doe_validation.py View on Github external
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with casiopeia. If not, see .

# Model and data taken from: Verschueren, Robin: Design and implementation of a 
# time-optimal controller for model race cars, Master’s thesis, KU Leuven, 2014.

import casadi as ca
import pylab as pl
import casiopeia as cp

# System

x = ca.MX.sym("x", 4)
p = ca.MX.sym("p", 6)
u = ca.MX.sym("u", 2)

f = ca.vertcat( \

    [x[3] * pl.cos(x[2] + p[0] * u[0]),

    x[3] * pl.sin(x[2] + p[0] * u[0]),

    x[3] * u[0] * p[1],

    p[2] * u[1] \
        - p[3] * u[1] * x[3] \
        - p[4] * x[3]**2 \
        - p[5] \
        - (x[3] * u[0])**2 * p[1]* p[0]])
github adbuerger / casiopeia / examples / 2d_vehicle_doe_scaled.py View on Github external
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with casiopeia. If not, see .

# Model and data taken from: Verschueren, Robin: Design and implementation of a 
# time-optimal controller for model race cars, Master’s thesis, KU Leuven, 2014.

import casadi as ca
import pylab as pl
import casiopeia as cp

# System

x = ca.MX.sym("x", 4)
p = ca.MX.sym("p", 6)
u = ca.MX.sym("u", 2)

pdata_scale = [0.273408, 11.5602, 2.45652, 7.90959, -0.44353, -0.249098]

f = ca.vertcat( \

    [x[3] * pl.cos(x[2] + pdata_scale[0] * p[0] * u[0]),

    x[3] * pl.sin(x[2] + pdata_scale[0] * p[0] * u[0]),

    x[3] * u[0] * pdata_scale[1] * p[1],

    pdata_scale[2] * p[2] * u[1] \
        - pdata_scale[3] * p[3] * u[1] * x[3] \
        - pdata_scale[4] * p[4] * x[3]**2 \
        - pdata_scale[5] * p[5] \
github pymoca / pymoca / src / pymoca / backends / casadi / model.py View on Github external
sign = 1

                    alias_state = all_states[alias]

                    variables.append(alias_state.symbol)
                    values.append(sign * canonical_state.symbol)

                    # If any of the aliases has a nonstandard type, apply it to
                    # the canonical state as well
                    if alias_state.python_type != float:
                        python_type = alias_state.python_type

                    # If any of the aliases has a nondefault start value, apply it to
                    # the canonical state as well
                    if not isinstance(alias_state.start, _DefaultValue):
                        alias_start_mx = ca.MX(alias_state.start)
                        if not isinstance(start, _DefaultValue):
                            start_mx = ca.MX(start)
                            # If the state already has a non-default start
                            # attribute we check for conflicts.
                            if (start_mx.is_constant() != ca.MX(alias_start_mx).is_constant()
                                or (start_mx.is_symbolic() and str(start_mx) != str(sign * alias_start_mx))
                                or start != alias_start_mx):

                                logger.warning(
                                    "Current start attribute of canonical variable '{}' ({})"
                                    " conflicts with that of its alias '{}' ({})."
                                    " Will keep existing value of {}."
                                    .format(canonical, start, alias, alias_start_mx, start))
                            else:
                                # Alias has equal start attribute, so nothing to do
                                pass
github meco-group / omg-tools / omgtools / problems / dualmethod.py View on Github external
def _dict2struct(self, var, stru):
        if isinstance(var, list):
            return [self._dict2struct(v, stru) for v in var]
        elif 'dd' in list(var.keys())[0] or 'admm' in list(var.keys())[0]:
            chck = list(list(list(var.values())[0].values())[0].values())[0]
            if isinstance(chck, SX):
                ret = struct_SX(stru)
            elif isinstance(chck, MX):
                ret = struct_MX_mutable(stru)
            elif isinstance(chck, DM):
                ret = stru(0)
            for nghb in var.keys():
                for child, q in var[nghb].items():
                    for name in q.keys():
                        ret[nghb, child, name] = var[nghb][child][name]
            return ret
        else:
            chck = list(list(var.values())[0].values())[0]
            if isinstance(chck, SX):
                ret = struct_SX(stru)
            elif isinstance(chck, MX):
                ret = struct_MX_mutable(stru)
            elif isinstance(chck, DM):
                ret = stru(0)
github helgeanl / GP-MPC / gp_mpc / gp_class.py View on Github external
[self.__mean(ca.vertcat(x,u)),
                                 self.__TA_covar(ca.vertcat(x,u), covar_s)])
        elif gp_method is 'EM':
            self.__predict = ca.Function('gp_exact_moment', [x, u, covar_s],
                                gp_exact_moment(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s))
        elif gp_method is 'old_ME':
            self.__predict = ca.Function('gp_mean', [x, u, covar_s],
                                gp(self.__invK, ca.MX(self.__X), ca.MX(self.__Y),
                                   ca.MX(self.__hyper),
                                   ca.vertcat(x, u).T, meanFunc=self.__mean_func))
        elif gp_method is 'old_TA':
            self.__predict = ca.Function('gp_taylor_approx', [x, u, covar_s],
                                gp_taylor_approx(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s,
                                        meanFunc=self.__mean_func, diag=True))
        else:
            raise NameError('No GP method called: ' + gp_method)

        self.__discrete_jac_x = ca.Function('jac_x', [x, u, covar_s],
                                      [ca.jacobian(self.__predict(x,u, covar_s)[0], x)])
        self.__discrete_jac_u = ca.Function('jac_x', [x, u, covar_s],
                                      [ca.jacobian(self.__predict(x,u,covar_s)[0], u)])
github helgeanl / GP-MPC / gp_mpc / gp_class.py View on Github external
covar_s = ca.MX.sym('covar', self.__Nx, self.__Nx)
        u = ca.MX.sym('u', self.__Nu)
        self.__gp_method = gp_method

        if gp_method is 'ME':
            self.__predict = ca.Function('gp_mean', [x, u, covar_s],
                                [self.__mean(ca.vertcat(x,u)),
                                 self.__covar(ca.vertcat(x,u))])
        elif gp_method is 'TA':
            self.__predict = ca.Function('gp_taylor', [x, u, covar_s],
                                [self.__mean(ca.vertcat(x,u)),
                                 self.__TA_covar(ca.vertcat(x,u), covar_s)])
        elif gp_method is 'EM':
            self.__predict = ca.Function('gp_exact_moment', [x, u, covar_s],
                                gp_exact_moment(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s))
        elif gp_method is 'old_ME':
            self.__predict = ca.Function('gp_mean', [x, u, covar_s],
                                gp(self.__invK, ca.MX(self.__X), ca.MX(self.__Y),
                                   ca.MX(self.__hyper),
                                   ca.vertcat(x, u).T, meanFunc=self.__mean_func))
        elif gp_method is 'old_TA':
            self.__predict = ca.Function('gp_taylor_approx', [x, u, covar_s],
                                gp_taylor_approx(self.__invK, ca.MX(self.__X),
                                        ca.MX(self.__Y), ca.MX(self.__hyper),
                                        ca.vertcat(x, u).T, covar_s,
                                        meanFunc=self.__mean_func, diag=True))
        else:
            raise NameError('No GP method called: ' + gp_method)

        self.__discrete_jac_x = ca.Function('jac_x', [x, u, covar_s],