How to use the nnabla.parameter.get_parameter_or_create function in nnabla

To help you get started, we’ve selected a few nnabla examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github sony / nnabla / python / src / nnabla / parametric_functions.py View on Github external
n_b (int): Bit width used for bias.
        m_b (int): :math:`2^m` is upper bound and :math:`-2^m` is lower bound for bias. Default is 2.
        ste_fine_grained_b (bool): STE is fine-grained if `True`.

    Returns:
        :class:`~nnabla.Variable`: N-D array.

    """
    if w_init is None:
        w_init = UniformInitializer(
            calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng)
    if with_bias and b_init is None:
        b_init = ConstantInitializer()

    # Floating Weight
    w = get_parameter_or_create(
        "W", (outmaps, inp.shape[base_axis] // group) + tuple(kernel),
        w_init, True, not fix_parameters)

    # Quantized Weight
    if quantize_w:
        w_q = get_parameter_or_create(
            "W_q", (outmaps, inp.shape[base_axis] // group) + tuple(kernel),
            w_init, False)

        # Link computation graph
        real_w_q = F.pow2_quantize(w, quantize=quantize_w,
                                   sign=sign_w, with_zero=with_zero_w,
                                   n=n_w, m=m_w, ste_fine_grained=ste_fine_grained_w,
                                   outputs=[w_q.data])
        real_w_q.persistent = True
    else:
github sony / nnabla / python / src / nnabla / experimental / graph_converters / batch_normalization_folded.py View on Github external
def _inner_prod_bn_conversion(self, inner_prod_func, bn_func):
        # Fold parameters
        w_data, b_data = self._compute_folded_parameters(
            inner_prod_func, bn_func)

        # W
        w = inner_prod_func.inputs[1]
        idx = list(self.params.values()).index(w)
        name = list(self.params.keys())[idx]
        w = nn.parameter.get_parameter_or_create(name,
                                                 w.shape,
                                                 w_data,
                                                 w.need_grad)
        # b (borrow from w)
        name = os.path.join("/".join(name.rstrip().split("/")[:-1]), "b")
        b = nn.parameter.get_parameter_or_create(name,
                                                 b_data.shape,
                                                 b_data,
                                                 need_grad=True)

        # Input conversion
        x = inner_prod_func.inputs[0]
        x = self.input_map[x] if x in self.input_map else x
        inputs = [x, w, b]

        # Function call
github sony / nnabla-examples / GANs / munit / models.py View on Github external
def IN(inp, axes=[1], decay_rate=0.9, eps=1e-5, fix_parameters=True):
    """Instance Normalization
    """
    if inp.shape[0] == 1:
        return INByBatchNorm(inp, axes, decay_rate, eps, fix_parameters)

    b, c = inp.shape[0:2]
    spacial_shape = inp.shape[2:]

    shape_stat = [1 for _ in inp.shape]
    shape_stat[axes[0]] = inp.shape[axes[0]]
    beta = get_parameter_or_create(
        "beta", shape_stat, ConstantInitializer(0), not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", shape_stat, ConstantInitializer(1), not fix_parameters)

    # Instance normalization
    # normalize over spatial dimensions
    axis = [i for i in range(len(inp.shape)) if i > 1]
    mean = F.sum(inp, axis=axis, keepdims=True) / np.prod(axis)
    var = F.pow_scalar(F.sum(inp - mean, axis=axis,
                             keepdims=True), 2.0) / np.prod(axis)
    h = (inp - mean) / F.pow_scalar(var + eps, 0.5)
    return gamma * inp + beta
github sony / nnabla-examples / GANs / cycle-gan / models.py View on Github external
def instance_normalization(inp, axes=[1], decay_rate=0.9, eps=1e-5,
                           batch_stat=True, output_stat=False, fix_parameters=False):
    """Instance Normalization (implemented using BatchNormalization)

    Instance normalization is equivalent to the batch normalization if a batch size is one, in
    other words, it normalizes over spatial dimension(s), meaning all dimensions except for
    the batch and feature dimension.

    """
    assert len(axes) == 1
    shape_stat = [1 for _ in inp.shape]
    shape_stat[axes[0]] = inp.shape[axes[0]]
    beta = get_parameter_or_create(
        "beta", shape_stat, ConstantInitializer(0), not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", shape_stat, ConstantInitializer(1), not fix_parameters)
    mean = get_parameter_or_create(
        "mean", shape_stat, ConstantInitializer(0), False)
    var = get_parameter_or_create(
        "var", shape_stat, ConstantInitializer(0), False)
    return F.batch_normalization(inp, beta, gamma, mean, var, axes,
                                 decay_rate, eps, batch_stat, output_stat)
github sony / nnabla-examples / GANs / munit / models.py View on Github external
def LN(inp, fix_parameters=False):
    """Layer normalization.
    """
    beta_shape = (1, inp.shape[1], 1, 1)
    gamma_shape = (1, inp.shape[1], 1, 1)
    beta = get_parameter_or_create(
        "beta", beta_shape, ConstantInitializer(0), not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", gamma_shape, ConstantInitializer(1), not fix_parameters)
    return f_layer_normalization(inp, beta, gamma)
github sony / nnabla-examples / GANs / cycle-gan / models.py View on Github external
Instance normalization is equivalent to the batch normalization if a batch size is one, in
    other words, it normalizes over spatial dimension(s), meaning all dimensions except for
    the batch and feature dimension.

    """
    assert len(axes) == 1
    shape_stat = [1 for _ in inp.shape]
    shape_stat[axes[0]] = inp.shape[axes[0]]
    beta = get_parameter_or_create(
        "beta", shape_stat, ConstantInitializer(0), not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", shape_stat, ConstantInitializer(1), not fix_parameters)
    mean = get_parameter_or_create(
        "mean", shape_stat, ConstantInitializer(0), False)
    var = get_parameter_or_create(
        "var", shape_stat, ConstantInitializer(0), False)
    return F.batch_normalization(inp, beta, gamma, mean, var, axes,
                                 decay_rate, eps, batch_stat, output_stat)
github sony / nnabla-examples / GANs / munit / models.py View on Github external
def BN(inp, axes=[1], decay_rate=0.9, eps=1e-5,
       batch_stat=True, output_stat=False, fix_parameters=False):
    """Batch Normalization
    """
    shape_stat = [1 for _ in inp.shape]
    shape_stat[axes[0]] = inp.shape[axes[0]]
    beta = get_parameter_or_create(
        "beta", shape_stat, ConstantInitializer(0), not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", shape_stat, ConstantInitializer(1), not fix_parameters)
    mean = get_parameter_or_create(
        "mean", shape_stat, ConstantInitializer(0), False)
    var = get_parameter_or_create(
        "var", shape_stat, ConstantInitializer(0), False)
    return F.batch_normalization(inp, beta, gamma, mean, var, axes,
                                 decay_rate, eps, batch_stat, output_stat)
github sony / nnabla / python / src / nnabla / parametric_functions.py View on Github external
rng (numpy.random.RandomState): Random generator for Initializer.
        with_bias (bool): Specify whether to include the bias term.

    Returns:
        :class:`~nnabla.Variable`

    """
    if w_init is None:
        w_init = UniformInitializer(
            calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng)
    if wb_init is None:
        wb_init = UniformInitializer(
            calc_uniform_lim_glorot(inp.shape[base_axis], outmaps, tuple(kernel)), rng=rng)
    if b_init is None:
        b_init = ConstantInitializer()
    w = get_parameter_or_create(
        "W", (outmaps, inp.shape[base_axis]) + tuple(kernel),
        w_init, True, not fix_parameters)
    wb = get_parameter_or_create(
        "Wb", (outmaps, inp.shape[base_axis]) + tuple(kernel),
        wb_init, False)
    b = None
    if with_bias:
        b = get_parameter_or_create(
            "b", (outmaps,), b_init, True, not fix_parameters)
    return F.binary_connect_convolution(inp, w, wb, b, base_axis, pad, stride, dilation, group, quantize_zero_to)
github sony / nnabla / python / src / nnabla / parametric_functions.py View on Github external
(using numpy expression as an example).

    """
    assert len(axes) == 1
    shape_stat = [1 for _ in inp.shape]
    shape_stat[axes[0]] = inp.shape[axes[0]]

    if param_init is None:
        param_init = {}
    beta_init = param_init.get('beta', ConstantInitializer(0))
    gamma_init = param_init.get('gamma', ConstantInitializer(1))
    mean_init = param_init.get('mean', ConstantInitializer(0))
    var_init = param_init.get('var', ConstantInitializer(1))
    beta = get_parameter_or_create(
        "beta", shape_stat, beta_init, True, not fix_parameters)
    gamma = get_parameter_or_create(
        "gamma", shape_stat, gamma_init, True, not fix_parameters)
    mean = get_parameter_or_create(
        "mean", shape_stat, mean_init, False)
    var = get_parameter_or_create(
        "var", shape_stat, var_init, False)
    return F.batch_normalization(inp, beta, gamma, mean, var, axes,
                                 decay_rate, eps, batch_stat, output_stat)
github sony / nnabla / python / src / nnabla / parametric_functions.py View on Github external
"""

    if not hasattr(n_outmaps, '__iter__'):
        n_outmaps = [n_outmaps]
    n_outmaps = list(n_outmaps)
    n_outmap = int(np.prod(n_outmaps))
    if w_init is None:
        inmaps = np.prod(inp.shape[base_axis:])
        w_init = UniformInitializer(
            calc_uniform_lim_glorot(inmaps, n_outmap), rng=rng)
    if with_bias and b_init is None:
        b_init = ConstantInitializer()

    # Floating Weight
    w = get_parameter_or_create(
        "W", [int(np.prod(inp.shape[base_axis:]))] + n_outmaps,
        w_init, True, not fix_parameters)

    # Quantized Weight
    if quantize_w:
        w_q = get_parameter_or_create(
            "W_q", [int(np.prod(inp.shape[base_axis:]))] + n_outmaps,
            w_init, False)
        # Link computation graph
        real_w_q = F.fixed_point_quantize(w, quantize=quantize_w,
                                          sign=sign_w, n=n_w, delta=delta_w,
                                          ste_fine_grained=ste_fine_grained_w,
                                          outputs=[w_q.data])
        real_w_q.persistent = True
    else:
        real_w_q = w