How to use the chaospy.J function in chaospy

To help you get started, we’ve selected a few chaospy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jonathf / chaospy / tests / test_stress.py View on Github external
def test_dist():
    dist = [cp.Normal()]
    for d in range(dim-1):
        dist.append(cp.Normal(dist[-1]))
    dist = cp.J(*dist)
    out = dist.sample(samples)
    out = dist.fwd(out)
    out = dist.inv(out)
github jonathf / chaospy / tests / distributions / operators / test_matmul.py View on Github external
"""
Testing of the matmul operator '@' for distributions.

For py27 reasons, the dunder-methods '__matmul__' and '__rmatmul__' are used
instead of the literal '@' operator.
"""
from pytest import raises
import numpy

import chaospy

UNIVARIATE = chaospy.Uniform(2, 3)
MULTIVARIATE = chaospy.J(chaospy.Uniform(1, 2), chaospy.Uniform(2, 4))
DEPENDENT1 = chaospy.J(UNIVARIATE, chaospy.Uniform(-2, 1)*UNIVARIATE)
DEPENDENT2 = chaospy.J(UNIVARIATE, UNIVARIATE*chaospy.Uniform(-2, 1))

EPS = 1E-1


def test_dist_matmul_illigals():
    with raises(ValueError):
        UNIVARIATE.__matmul__(4)
    with raises(ValueError):
        UNIVARIATE.__rmatmul__(4)
    with raises(ValueError):
        MULTIVARIATE.__matmul__(4)
    with raises(ValueError):
        MULTIVARIATE.__rmatmul__(4)
    with raises(ValueError):
        UNIVARIATE.__matmul__([2, 3])
    with raises(ValueError):
github jonathf / chaospy / tests / distributions / operators / test_multiply.py View on Github external
from copy import deepcopy

from pytest import raises
import numpy

import chaospy

UNIVARIATE = chaospy.Uniform(-2, 3)
MULTIVARIATE = chaospy.J(chaospy.Uniform(-1, 2), chaospy.Uniform(2, 4))
DEPENDENT1 = chaospy.J(UNIVARIATE, chaospy.Uniform(-2, 1)*UNIVARIATE)
DEPENDENT2 = chaospy.J(UNIVARIATE, UNIVARIATE*chaospy.Uniform(-2, 1))

EPS = 1E-10


def test_dist_multiply_illigals():
    with raises(ValueError):
        _ = UNIVARIATE * [1, 1]
    with raises(ValueError):
        _ = [1, 1] * UNIVARIATE
    with raises(ValueError):
        _ = MULTIVARIATE * [[1, 1], [0, 1]]
    with raises(ValueError):
        _ = [[1, 1], [0, 1]] * MULTIVARIATE
    with raises(ValueError):
        chaospy.Mul(2, 3)
github simetenn / uncertainpy / tests / test_uncertainty.py View on Github external
def create_PCE_custom(self, uncertain_parameters=None):
            data = Data()

            q0, q1 = cp.variable(2)
            parameter_space = [cp.Uniform(), cp.Uniform()]
            distribution = cp.J(*parameter_space)

            data.uncertain_parameters = ["a", "b"]

            data.test_value = "custom PCE method"
            data.add_features(["TestingModel1d", "feature0d_var", "feature1d_var", "feature2d_var"])

            U_hat = {}
            U_hat["TestingModel1d"] = cp.Poly([q0, q1*q0, q1])
            U_hat["feature0d_var"] = cp.Poly([q0, q1*q0, q1])
            U_hat["feature1d_var"] = cp.Poly([q0, q1*q0, q1])
            U_hat["feature2d_var"] = cp.Poly([q0, q1*q0, q1])

            return U_hat, distribution, data
github redmod-team / profit / examples / old / mossco / postprocess.py View on Github external
def read_input(inputtxt):
    data = np.genfromtxt(inputtxt, names = True)
    return data.view((float, len(data.dtype.names))).T

eval_points = read_input('%s/input.txt'%cdir)

# read data and create distribution:
nrun = eval_points.shape[1]

data = get_data(nrun,cdir)
#rescale oxygen flux
data[:,0,:] = -data[:,0,:]*86400.

uq = profit.UQ(yaml='uq.yaml')
distribution = cp.J(*uq.params.values())
sparse=uq.backend.sparse
if sparse:
  order=2*3
else:
  order=3+1

# actually start the postprocessing now:

nodes, weights = cp.generate_quadrature(order, distribution, rule='G',sparse=sparse)
expansion,norms = cp.orth_ttr(3, distribution,retall=True)
approx_denit = cp.fit_quadrature(expansion, nodes, weights, np.mean(data[:,1,:], axis=1))
approx_oxy = cp.fit_quadrature(expansion, nodes, weights, np.mean(data[:,0,:], axis=1))

annual_oxy = cp.fit_quadrature(expansion,nodes,weights,data[:,0,:])
annual_denit = cp.fit_quadrature(expansion,nodes,weights,data[:,1,:])
github simetenn / uncertainpy / src / uncertainpy / core / uncertainty_calculations.py View on Github external
-----
        If a multivariate distribution is defined in the Parameters.distribution,
        that multivariate distribution is returned. Otherwise the joint
        multivariate distribution for the selected parameters is created from
        the univariate distributions.

        See also
        --------
        uncertainpy.Parameters
        """
        uncertain_parameters = self.convert_uncertain_parameters(uncertain_parameters)

        if self.parameters.distribution is None:
            parameter_distributions = self.parameters.get("distribution", uncertain_parameters)

            distribution = cp.J(*parameter_distributions)
        else:
            distribution = self.parameters.distribution

        return distribution
github UCL-CCS / EasyVVUQ / easyvvuq / sampling / qmc.py View on Github external
self.n_samples = n_samples

        # List of the probability distributions of uncertain parameters
        params_distribution = list(vary.values())

        # Multivariate distribution
        self.distribution = cp.J(*params_distribution)

        # Generate samples
        self.n_uncertain_params = len(vary)
        n_sobol_samples = int(np.round(self.n_samples / 2.))

        dist_U = []
        for i in range(self.n_uncertain_params):
            dist_U.append(cp.Uniform())
        dist_U = cp.J(*dist_U)

        problem = {
            "num_vars": self.n_uncertain_params,
            "names": list(vary.keys()),
            "bounds": [[0, 1]] * self.n_uncertain_params
        }

        nodes = saltelli.sample(problem, n_sobol_samples, calc_second_order=False)
        self._samples = self.distribution.inv(dist_U.fwd(nodes.transpose()))

        self.n_total_samples = n_sobol_samples * (self.n_uncertain_params + 2)

        # Fast forward to specified count, if possible
        self.count = 0
        if self.count >= self.n_total_samples:
            msg = (f"Attempt to start sampler fastforwarded to count {self.count}, "
github jonathf / chaospy / docs / distributions / multivariate.py View on Github external
s,t = meshgrid(linspace(0,5,200), linspace(-6,6,200))
    contourf(s,t,Q.pdf([s,t]),50)
    xlabel("$q_1$")
    ylabel("$q_2$")
    subplot(122)
    Qr = Q.sample(500)
    scatter(*Qr, s=10, c="k", marker="s")
    xlabel("$Q_1$")
    ylabel("$Q_2$")
    axis([0,5,-6,6])

    savefig("multivariate.png"); clf()

    Q2 = cp.Gamma(1)
    Q1 = cp.Normal(Q2**2, Q2+1)
    Q = cp.J(Q1, Q2)
    #end

    subplot(121)
    s,t = meshgrid(linspace(-4,7,200), linspace(0,3,200))
    contourf(s,t,Q.pdf([s,t]),30)
    xlabel("$q_1$")
    ylabel("$q_2$")
    subplot(122)
    Qr = Q.sample(500)
    scatter(*Qr)
    xlabel("$Q_1$")
    ylabel("$Q_2$")
    axis([-4,7,0,3])

    savefig("multivariate2.png"); clf()
github jonathf / chaospy / docs / distributions / multivariate.py View on Github external
def plot_figures():
    """Plot figures for multivariate distribution section."""
    rc("figure", figsize=[8.,4.])
    rc("figure.subplot", left=.08, top=.95, right=.98)
    rc("image", cmap="gray")
    seed(1000)

    Q1 = cp.Gamma(2)
    Q2 = cp.Normal(0, Q1)
    Q = cp.J(Q1, Q2)
    #end

    subplot(121)
    s,t = meshgrid(linspace(0,5,200), linspace(-6,6,200))
    contourf(s,t,Q.pdf([s,t]),50)
    xlabel("$q_1$")
    ylabel("$q_2$")
    subplot(122)
    Qr = Q.sample(500)
    scatter(*Qr, s=10, c="k", marker="s")
    xlabel("$Q_1$")
    ylabel("$Q_2$")
    axis([0,5,-6,6])

    savefig("multivariate.png"); clf()
github simetenn / uncertainpy / src / uncertainpy / core / uncertainty_calculations.py View on Github external
distribution = self.create_distribution(uncertain_parameters=uncertain_parameters)

        # nodes = distribution.sample(nr_samples, "M")

        problem = {
            "num_vars": len(uncertain_parameters),
            "names": uncertain_parameters,
            "bounds": [[0,1]]*len(uncertain_parameters)
        }

        # Create the Multivariate normal distribution
        dist_R = []
        for parameter in uncertain_parameters:
            dist_R.append(cp.Uniform())

        dist_R = cp.J(*dist_R)

        nr_sobol_samples = int(np.round(nr_samples/2.))

        nodes_R = saltelli.sample(problem, nr_sobol_samples, calc_second_order=False)

        nodes = distribution.inv(dist_R.fwd(nodes_R.transpose()))


        data = self.runmodel.run(nodes, uncertain_parameters)

        data.method = "monte carlo method. nr_samples={}".format(nr_samples)
        data.seed = seed

        logger = get_logger(self)
        for feature in data:
            if feature == self.model.name and self.model.ignore: