How to use the deepxde.legacy.data.Data function in DeepXDE

To help you get started, we’ve selected a few DeepXDE examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
return x, self.func(x)

    @runifnone('train_x', 'train_y')
    def train_next_batch(self, batch_size, *args, **kwargs):
        self.train_x, self.train_y = self.get_x(batch_size - self.nbc)
        return self.train_x, self.train_y

    @runifnone('test_x', 'test_y')
    def test(self, n, *args, **kwargs):
        self.test_x = self.geom.uniform_points(n, True)
        self.test_x = np.roll(self.test_x, 1, axis=0)
        self.test_y = self.func(self.test_x)
        return self.test_x, self.test_y


class DataIDE(Data):
    """Training data for solving IDE
    """

    def __init__(self, ide, func, geom, nbc, quad_deg):
        assert nbc == 2
        super(DataIDE, self).__init__('ide')
        self.ide, self.func, self.geom = ide, func, geom
        self.nbc = nbc
        self.quad_deg = quad_deg

        self.train_x, self.train_y = None, None
        self.test_x, self.test_y = None, None
        self.quad_x, self.quad_w = np.polynomial.legendre.leggauss(quad_deg)

    def gen_data(self, size):
        def get_quad_points(x):
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
return scaler, X1, X2

        self.scaler_x, self.train_x, self.test_x = standardize_one(self.train_x, self.test_x)
        self.scaler_y, self.train_y, self.test_y = standardize_one(self.train_y, self.test_y)

    def inverse_transform_y(self, y):
        return self.scaler_y.inverse_transform(y)

    def train_next_batch(self, batch_size, *args, **kwargs):
        return self.train_x, self.train_y

    def test(self, n, *args, **kwargs):
        return self.test_x, self.test_y


class DataSet2(Data):
    def __init__(self, X_train, y_train, X_test, y_test):
        super(DataSet2, self).__init__('func')

        self.train_x, self.train_y = X_train, y_train
        self.test_x, self.test_y = X_test, y_test

        self.scaler_x = None
        self._standardize()

    def _standardize(self):
        def standardize_one(X1, X2):
            scaler = preprocessing.StandardScaler(with_mean=True, with_std=True)
            X1 = scaler.fit_transform(X1)
            X2 = scaler.transform(X2)
            return scaler, X1, X2
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
self.train_x = self.geom.random_points(batch_size, 'pseudo')
            self.train_y = self.func(self.train_x)
        elif self.train_x is None:
            # self.train_x = self.geom.random_points(batch_size, 'sobol')
            self.train_x = self.geom.uniform_points(batch_size, True)
            self.train_y = self.func(self.train_x)
        return self.train_x, self.train_y

    @runifnone('test_x', 'test_y')
    def test(self, n, *args, **kwargs):
        self.test_x = self.geom.uniform_points(n, True)
        self.test_y = self.func(self.test_x)
        return self.test_x, self.test_y


class DataPDE(Data):
    """Training data for solving PDE
    """

    def __init__(self, pde, func, geom, anchors):
        super(DataPDE, self).__init__('pde')
        self.pde, self.func, self.geom = pde, func, geom
        self.anchors = anchors

        self.train_x, self.train_y = None, None
        self.test_x, self.test_y = None, None
        self.nbc = len(anchors)

    def get_x(self, n):
        x = self.geom.uniform_points(n, True)
        x = np.append(self.anchors, x, axis=0)
        return x, self.func(x)
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
def train_next_batch(self, batch_size):
        # only support x_dim = 1, y_dim = 1
        if self.train_x is None:
            self.train_x, self.train_y = self.gen_data(batch_size)
            noisey = 0.01 * np.random.randn(*self.train_y.shape)
            self.train_y += noisey
        return self.train_x, self.train_y

    def test(self, n, dist=None):
        if self.test_x is None:
            self.test_x, self.test_y = self.gen_data(n)
        return self.test_x, self.test_y


class DataFunctional(Data):
    """Training data for functional approximation
    """

    def __init__(self, functional, x_dim, y_dim, x_min, x_max, func2sensors, nsensor):
        super(DataFunctional, self).__init__('functional')
        self.functional = functional
        self.x_dim, self.y_dim = x_dim, y_dim
        self.x_min, self.x_max = x_min, x_max
        self.func2sensors, self.nsensor = func2sensors, nsensor

        # sensors in [0, 1]
        self.sensors = np.linspace(0, 1, num=nsensor)

    def train_next_batch(self, batch_size, *args, **kwargs):
        return self.test(batch_size, 'grid')
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
for _ in range(500):
            self.train_x = np.vstack((self.train_x, x))
            self.train_y = np.vstack((self.train_y, np.hstack((ylow, yhi))))

        return self.train_x, self.train_y

    @runifnone('test_x', 'test_y')
    def test(self, n, *args, **kwargs):
        self.test_x = self.geom.uniform_points(n, True)
        ylow = self.flow(self.test_x)
        yhi = self.fhi(self.test_x)
        self.test_y = np.hstack((ylow, yhi))
        return self.test_x, self.test_y


class DataClassification(Data):
    """Training data for classification
    """

    def __init__(self, func, geom, online=False):
        super(DataClassification, self).__init__('classification')
        self.func = func
        self.geom = geom
        self.online = online

        self.train_x, self.train_y = None, None
        self.test_x, self.test_y = None, None

    def train_next_batch(self, batch_size, *args, **kwargs):
        if self.online:
            self.train_x = self.geom.random_points(batch_size, 'pseudo')
            self.train_y = self.func(self.train_x)
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
self.x_dim, self.y_dim = x_dim, y_dim
        self.x_min, self.x_max = x_min, x_max
        self.func2sensors, self.nsensor = func2sensors, nsensor

        # sensors in [0, 1]
        self.sensors = np.linspace(0, 1, num=nsensor)

    def train_next_batch(self, batch_size, *args, **kwargs):
        return self.test(batch_size, 'grid')

    def test(self, n, *args, **kwargs):
        x, y = super(DataFunctional, self).test(n)
        return self.func2sensors(x, self.sensors), y


class DataFunctional2(Data):
    """Training data for functional approximation
    """

    def __init__(self, functional, x_dim, y_dim, x_min, x_max, func2sensors, nsensor):
        super(DataFunctional2, self).__init__('functional')
        self.functional = functional
        self.x_dim, self.y_dim = x_dim, y_dim
        self.x_min, self.x_max = x_min, x_max
        self.func2sensors, self.nsensor = func2sensors, nsensor

        # sensors in [0, 1]
        self.sensors = np.linspace(0, 1, num=nsensor)

    def train_next_batch(self, batch_size, *args, **kwargs):
        return self.test(batch_size, 'grid')
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
if training:
            if self.train_x is None:
                self.train_next_batch(size)
            x = self.train_x
        else:
            if self.test_x is None:
                self.test(size)
            x = self.test_x
        int_mat = np.zeros((size, x.size), dtype=config.real(np))
        for i in range(size):
            int_mat[i, size+self.quad_deg*i: size + self.quad_deg*(i+1)] = \
                get_quad_weights(x[i, 0])
        return int_mat


class DataFrac(Data):
    """Training data for solving fractional DE
    """

    def __init__(self, frac, alpha, func, geom, disc):
        if disc.meshtype == 'static':
            assert geom.idstr == 'Interval', 'Only Interval supports static mesh.'

        super(DataFrac, self).__init__('frac')
        self.frac, self.alpha, self.func, self.geom = frac, alpha, func, geom
        self.disc = disc

        self.nbc = disc.nanchor
        self.train_x, self.train_y, self.frac_train = None, None, None
        self.test_x, self.test_y, self.frac_test = None, None, None

    def get_x(self, size):
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
self.test_x, self.test_y, self.frac_test = self.get_x(n)
        return self.test_x, self.test_y

    def get_int_matrix(self, size, training):
        if training:
            if self.train_x is None:
                self.train_next_batch(size)
            int_mat = self.frac_train.get_matrix(True)
        else:
            if self.test_x is None:
                self.test(size)
            int_mat = self.frac_test.get_matrix(True)
        return int_mat


class DataFracInv(Data):
    """Training data for solving fractional DE inverse problem
    """

    def __init__(self, frac, func, geom, disc):
        if disc.meshtype == 'static':
            assert geom.idstr == 'Interval', 'Only Interval supports static mesh.'

        super(DataFracInv, self).__init__('frac inv')
        self.frac, self.func, self.geom = frac, func, geom
        self.disc = disc

        self.nbc = disc.nanchor
        self.train_x, self.train_y, self.frac_train = None, None, None
        self.test_x, self.test_y, self.frac_test = None, None, None

        self.alpha = 1.5
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
def get_int_matrix(self, size, training):
        if training:
            if self.train_x is None:
                self.train_next_batch(size)
            int_mat = self.frac_train.get_matrix(True)
        else:
            if self.test_x is None:
                self.test(size)
            int_mat = self.frac_test.get_matrix(True)
        if self.disc.meshtype == 'static':
            int_mat = np.roll(int_mat, int_mat.shape[1]-1, axis=1)
            int_mat = int_mat[1:-1]
        return int_mat


class DataFracTime(Data):
    """Training data for solving time-dependent fractional DE
    """

    def __init__(self, frac, alpha, func, geom, t_min, t_max, disc):
        super(DataFracTime, self).__init__('frac time')
        self.frac, self.alpha, self.func, self.geom = frac, alpha, func, geom
        self.t_min, self.t_max = t_min, t_max
        self.disc = disc

        self.train_x, self.train_y, self.frac_train = None, None, None
        self.test_x, self.test_y, self.frac_test = None, None, None
        self.nt, self.nbc = None, None

    def get_x(self, size):
        if self.disc.meshtype == 'static':
            self.nt = int(round(size / self.disc.resolution[0]))
github lululxvi / deepxde / deepxde / legacy / data.py View on Github external
def standardize_one(X1, X2):
            scaler = preprocessing.StandardScaler(with_mean=True, with_std=True)
            X1 = scaler.fit_transform(X1)
            X2 = scaler.transform(X2)
            return scaler, X1, X2

        self.scaler_x, self.train_x, self.test_x = standardize_one(self.train_x, self.test_x)

    def train_next_batch(self, batch_size, *args, **kwargs):
        return self.train_x, self.train_y

    def test(self, n, *args, **kwargs):
        return self.test_x, self.test_y


class DataFunc(Data):
    """Training data for function approximation
    """

    def __init__(self, func, geom, online=False):
        super(DataFunc, self).__init__('func')
        self.func = func
        self.geom = geom
        self.online = online

        self.train_x, self.train_y = None, None
        self.test_x, self.test_y = None, None

    def train_next_batch(self, batch_size, *args, **kwargs):
        if self.online:
            self.train_x = self.geom.random_points(batch_size, 'pseudo')
            self.train_y = self.func(self.train_x)