How to use the nnabla.parametric_functions.convolution function in nnabla

To help you get started, we’ve selected a few nnabla examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github sony / nnabla-examples / reduction / cifar10 / factorized-layers / models.py View on Github external
def res_unit(x, scope_name, dn=False):
        C = x.shape[1]
        with nn.parameter_scope(scope_name):

            # Conv -> BN -> Relu
            with nn.parameter_scope("conv1"):
                h = PF.convolution(x, C // 2, kernel=(1, 1), pad=(0, 0),
                                   with_bias=False)
                h = PF.batch_normalization(h, batch_stat=not test)
                h = F.relu(h)
            # Conv -> BN -> Relu
            with nn.parameter_scope("conv2"):
                h = PF.convolution(h, C // 2, kernel=(3, 3), pad=(1, 1),
                                   with_bias=False)
                h = PF.batch_normalization(h, batch_stat=not test)
                h = F.relu(h)
            # Conv -> BN
            with nn.parameter_scope("conv3"):
                h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
                                   with_bias=False)
                h = PF.batch_normalization(h, batch_stat=not test)
            # Residual -> Relu
            h = F.relu(h + x)

            # Maxpooling
            if dn:
                h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2))

        return h
github sony / nnabla-examples / mnist-collection / classification.py View on Github external
def res_unit(x, scope):
        C = x.shape[1]
        with nn.parameter_scope(scope):
            with nn.parameter_scope('conv1'):
                h = F.elu(bn(PF.convolution(x, C / 2, (1, 1), with_bias=False)))
            with nn.parameter_scope('conv2'):
                h = F.elu(
                    bn(PF.convolution(h, C / 2, (3, 3), pad=(1, 1), with_bias=False)))
            with nn.parameter_scope('conv3'):
                h = bn(PF.convolution(h, C, (1, 1), with_bias=False))
        return F.elu(F.add2(h, x, inplace=True))
    # Conv1 --> 64 x 32 x 32
github sony / nnabla / examples / cpp / forward_check / mnist / classification.py View on Github external
def res_unit(x, scope):
        C = x.shape[1]
        with nn.parameter_scope(scope):
            with nn.parameter_scope('conv1'):
                h = F.elu(bn(PF.convolution(x, C / 2, (1, 1), with_bias=False)))
            with nn.parameter_scope('conv2'):
                h = F.elu(
                    bn(PF.convolution(h, C / 2, (3, 3), pad=(1, 1), with_bias=False)))
            with nn.parameter_scope('conv3'):
                h = bn(PF.convolution(h, C, (1, 1), with_bias=False))
        return F.elu(F.add2(h, x, inplace=True))
    # Conv1 --> 64 x 32 x 32
github sony / nnabla-examples / reduction / cifar10 / structured-sparsity / models.py View on Github external
h = F.max_pooling(h, kernel=(2, 2), stride=(2, 2))

        return h

    ncls = 10

    # Conv -> BN -> Relu
    with nn.parameter_scope("conv1"):
        # Preprocess
        image /= 255.0
        if not test:
            image = F.image_augmentation(image, contrast=1.0,
                                         angle=0.25,
                                         flip_lr=True)
            image.need_grad = False
        h = PF.convolution(image, maps, kernel=(3, 3), pad=(1, 1),
                           with_bias=False)
        h = PF.batch_normalization(h, batch_stat=not test)
        h = F.relu(h)

    h = res_unit(h, "conv2", False)    # -> 32x32
    h = res_unit(h, "conv3", True)     # -> 16x16
    h = res_unit(h, "conv4", False)    # -> 16x16
    h = res_unit(h, "conv5", True)     # -> 8x8
    h = res_unit(h, "conv6", False)    # -> 8x8
    h = res_unit(h, "conv7", True)     # -> 4x4
    h = res_unit(h, "conv8", False)    # -> 4x4
    h = F.average_pooling(h, kernel=(4, 4))  # -> 1x1
    pred = PF.affine(h, ncls)

    return pred
github sony / nnabla-examples / reduction / cifar10 / shiftnet / models.py View on Github external
def sc2(x, scope_name, dn=False):
        C = x.shape[1]
        h = x
        with nn.parameter_scope(scope_name):

            with nn.parameter_scope("shift1"):  # no meaning but semantics
                h = shift(h)

            with nn.parameter_scope("conv1"):
                h = PF.batch_normalization(h, batch_stat=not test)
                h = F.relu(h, True)
                h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
                                   with_bias=False)

            with nn.parameter_scope("shift2"):  # no meaning but semantics
                h = shift(h)

            with nn.parameter_scope("conv2"):
                h = PF.batch_normalization(h, batch_stat=not test)
                h = F.relu(h, True)
                stride = (2, 2) if dn else (1, 1)
                if p > 0:
                    h = F.dropout(h, p=0.5) if not test else h
                h = PF.convolution(h, C, kernel=(1, 1), pad=(0, 0),
                                   stride=stride,
                                   with_bias=False)
        s = F.average_pooling(x, (2, 2)) if dn else x
        return h + s
github sony / nnabla / examples / vision / imagenet / model_resnet.py View on Github external
def shortcut(x, ochannels, stride, shortcut_type, test):
    ichannels = x.shape[1]
    ishape = x.shape[1]
    use_conv = shortcut_type.lower() == 'c'
    if ichannels != ochannels:
        assert (ichannels * 2 == ochannels) or (ichannels * 4 == ochannels)
        if shortcut_type.lower() == 'b':
            use_conv = True
    if use_conv:
        # Convolution does everything.
        # Matching channels, striding.
        with nn.parameter_scope("shortcut_conv"):
            x = PF.convolution(x, ochannels, (1, 1),
                               stride=stride, with_bias=False)
            x = PF.batch_normalization(x, batch_stat=not test)
    else:
        if stride != (1, 1):
            # Stride
            x = F.average_pooling(x, (1, 1), stride)
        if ichannels != ochannels:
            # Zero-padding to channel axis
            zeros = nn.Variable((ishape[0], ichannels) + ishape[-2:])
            zeros.data.zero()
            x = F.concatenate(x, zeros, axis=1)
    return x
github sony / nnabla / examples / cpp / forward_check / mnist / dcgan.py View on Github external
assert maxh / 8 > 0
    with nn.parameter_scope("dis"):
        # (1, 28, 28) --> (32, 16, 16)
        with nn.parameter_scope("conv1"):
            c1 = F.elu(bn(PF.convolution(x, maxh / 8,
                                         (3, 3), pad=(3, 3), stride=(2, 2), with_bias=False)))
        # (32, 16, 16) --> (64, 8, 8)
        with nn.parameter_scope("conv2"):
            c2 = F.elu(bn(downsample2(c1, maxh / 4)))
        # (64, 8, 8) --> (128, 4, 4)
        with nn.parameter_scope("conv3"):
            c3 = F.elu(bn(downsample2(c2, maxh / 2)))
        # (128, 4, 4) --> (256, 4, 4)
        with nn.parameter_scope("conv4"):
            c4 = bn(PF.convolution(c3, maxh, (3, 3),
                                   pad=(1, 1), with_bias=False))
        # (256, 4, 4) --> (1,)
        with nn.parameter_scope("fc1"):
            f = PF.affine(c4, 1)
    if output_hidden:
        return f, [c1, c2, c3, c4]
    return f
github sony / nnabla-examples / mnist-collection / classification.py View on Github external
def mnist_lenet_prediction(image, test=False, aug=None):
    """
    Construct LeNet for MNIST.
    """
    image /= 255.0
    image = augmentation(image, test, aug)
    c1 = PF.convolution(image, 16, (5, 5), name='conv1')
    c1 = F.relu(F.max_pooling(c1, (2, 2)), inplace=True)
    c2 = PF.convolution(c1, 16, (5, 5), name='conv2')
    c2 = F.relu(F.max_pooling(c2, (2, 2)), inplace=True)
    c3 = F.relu(PF.affine(c2, 50, name='fc3'), inplace=True)
    c4 = PF.affine(c3, 10, name='fc4')
    return c4
github sony / nnabla-examples / mnist-collection / dcgan.py View on Github external
def downsample2(xx, c):
        return PF.convolution(xx, c, (3, 3), pad=(1, 1), stride=(2, 2), with_bias=False)
github sony / nnabla-examples / semantic-segmentation / deeplabv3plus / xception_65.py View on Github external
1, 1), with_bias=False, fix_parameters=fix_params)
        elif(aspp == True):
            h = PF.depthwise_convolution(x, (3, 3), pad=(atrous_rate, atrous_rate), stride=(
                1, 1), dilation=(atrous_rate, atrous_rate), with_bias=False, fix_parameters=fix_params)

        else:
            h = PF.depthwise_convolution(x, (3, 3), pad=(
                1, 1), with_bias=False, fix_parameters=fix_params)

        h = PF.batch_normalization(
            h, batch_stat=not test, eps=eps, fix_parameters=fix_params)
        if last_block == True:
            h = F.relu(h)

    with nn.parameter_scope("pointwise"):
        h = PF.convolution(h, f, (1, 1), stride=(
            1, 1), with_bias=False, fix_parameters=fix_params)
        h = PF.batch_normalization(
            h, batch_stat=not test, eps=eps, fix_parameters=fix_params)
        if end_point == True:
            global endpoints
            endpoints['Decoder End Point 1'] = h

        if act_fn == True:
            h = F.relu(h)

    return h