Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_generator1d_4():
g = Generator1d(290, 1, n_neuron=[100, 90, 80, 70, 60, 50], drop_out=(0.2, 0.3, 0.4),
batch_normalize=(None, L1.batch_norm()))
m = g(5, n_models=20, replace=True)
assert len(list(m)) == 20, "when 'replace=False' is OK"
def test_generator1d_1():
g = Generator1d(290, 1, n_neuron=(100, 70, 50), drop_out=(0.2, 0.3, 0.4),
batch_normalize=(None, L1.batch_norm()))
m = g(1)
assert len(list(m)) == 18, '3x3x2'
def test_generator1d_2():
g = Generator1d(290, 1, n_neuron=[100, 70, 50], drop_out=(0.2, 0.3, 0.4),
batch_normalize=(None, L1.batch_norm()))
m = g(1, n_models=10)
assert len(list(m)) == 10, '0 < n_models <= 3x3x2'
def test_generator1d_3():
g = Generator1d(290, 1, n_neuron=[100, 70, 50], drop_out=(0.2, 0.3, 0.4),
batch_normalize=(None, L1.batch_norm()))
m = g(1, n_models=20)
with pytest.raises(ValueError):
len(list(m)) == 18
def __init__(self, n_features: int, n_predict: int, *,
output_layer=L1.linear(),
n_neuron,
drop_out=(0.0,),
layer_func=(L1.linear(),),
act_func=(nn.ReLU(),),
batch_normalize=(L1.batch_norm(),)
):
"""
Parameters
----------
n_features: int
Input dimension.
n_predict: int
Output dimension.
output_layer: func
Output layer.
n_neuron: [int]
Number of neuron.
drop_out: [float]
Dropout rate.
layer_func: [func]
Layer functions. such like: :meth:`~.L1.linear`.
def __init__(self, n_in, n_out, *,
drop_out=0.,
layer_func=L1.linear(bias=True),
act_func=nn.ReLU(),
batch_nor=L1.batch_norm(eps=1e-05, momentum=0.1, affine=True)
):
"""
Parameters
----------
n_in: int
Size of each input sample.
n_out: int
Size of each output sample
drop_out: float
Probability of an element to be zeroed. Default: 0.5
layer_func: func
Layers come with PyTorch.
act_func: func
Activation function.
batch_nor: func
Normalization layers