How to use the dragon.vm.torch.nn.Module function in dragon

To help you get started, we’ve selected a few dragon examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / linear.py View on Github external
#
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import math

from dragon.vm.torch.tensor import Tensor
from dragon.vm.torch.nn import Module, Parameter


class Linear(Module):
    def __init__(self, in_features, out_features, bias=True):
        super(Linear, self).__init__()
        self.in_features = in_features
        self.out_features = out_features
        self.weight = Parameter(Tensor(out_features, in_features))
        if bias:
            self.bias = Parameter(Tensor(out_features))
        else:
            self.bias = None
        self.reset_parameters()
        self.register_op()

    def register_op(self):
        self.op_meta = {
            'op_type': 'FullyConnected',
            'arguments': {
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / affine.py View on Github external
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from dragon.vm.torch.nn import Module, Parameter
from dragon.vm.torch.ops.builtin import zeros, ones


class Affine(Module):
    def __init__(
        self,
        num_features,
        bias=True,
        fix_weight=False,
        fix_bias=False,
        inplace=False,
    ):
        super(Affine, self).__init__()
        self.num_features = num_features
        self.inplace = inplace
        if not fix_weight:
            self.weight = Parameter(ones(num_features))
            if inplace:
                raise ValueError('Inplace computation requires fixed weight.')
        else:
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / loss.py View on Github external
#
# Codes are based on:
#
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from dragon.vm.torch.nn import Module
from dragon.vm.torch.nn.functional import _Reduction


class _Loss(Module):
    def __init__(
        self,
        size_average=None,
        reduce=None,
        reduction='elementwise_mean',
    ):
        super(_Loss, self).__init__()
        if size_average is not None or reduce is not None:
            self.reduction = _Reduction.legacy_get_string(size_average, reduce)
        else:
            self.reduction = reduction


class _WeightedLoss(_Loss):
    def __init__(
        self,
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / activation.py View on Github external
self.register_op()

    def register_op(self):
        self.op_meta = {'op_type': 'SElu', 'arguments': {}}

    def extra_repr(self):
        inplace_str = 'inplace' if self.inplace else ''
        return inplace_str

    def forward(self, x):
        inputs = [x]; self.unify_devices(inputs)
        outputs = [x if self.inplace else self.register_output()]
        return self.run(inputs, outputs)


class Sigmoid(Module):
    def __init__(self, inplace=False):
        super(Sigmoid, self).__init__()
        self.inplace = inplace
        self.register_op()

    def register_op(self):
        self.op_meta = {'op_type': 'Sigmoid', 'arguments': {}}

    def extra_repr(self):
        inplace_str = 'inplace' if self.inplace else ''
        return inplace_str

    def forward(self, x):
        inputs = [x]; self.unify_devices(inputs)
        outputs = [x if self.inplace else self.register_output()]
        return self.run(inputs, outputs)
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / activation.py View on Github external
self.register_op()

    def register_op(self):
        self.op_meta = {'op_type': 'Tanh', 'arguments': {}}

    def extra_repr(self):
        inplace_str = 'inplace' if self.inplace else ''
        return inplace_str

    def forward(self, x):
        inputs = [x]; self.unify_devices(inputs)
        outputs = [x if self.inplace else self.register_output()]
        return self.run(inputs, outputs)


class Softmax(Module):
    def __init__(self, dim=None, inplace=False):
        super(Softmax, self).__init__()
        self.dim = dim
        self.inplace = inplace
        if dim is None:
            raise ValueError('Excepted a valid dim, got None.')
        self.register_op()

    def register_op(self):
        self.op_meta = {
            'op_type': 'Softmax',
            'arguments': {'axis': self.dim},
        }

    def extra_repr(self):
        inplace_str = ', inplace' if self.inplace else ''
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / activation.py View on Github external
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from dragon.vm.torch.nn import Module


class ReLU(Module):
    def __init__(self, inplace=False):
        super(ReLU, self).__init__()
        self.inplace = inplace
        self.register_op()

    def register_op(self):
        self.op_meta = {'op_type': 'Relu', 'arguments':{}}

    def extra_repr(self):
        inplace_str = 'inplace' if self.inplace else ''
        return inplace_str

    def forward(self, x):
        inputs = [x]; self.unify_devices(inputs)
        outputs = [x if self.inplace else self.register_output()]
        return self.run(inputs, outputs)
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / container.py View on Github external
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import warnings
from collections import OrderedDict, Iterable
from itertools import islice
import operator

from dragon.vm.torch.environ import get_module_name
from dragon.vm.torch.nn import Module


class Container(Module):
    def __init__(self, **kwargs):
        super(Container, self).__init__()
        warnings.warn("nn.Container is deprecated. All of it's functionality "
                      "is now implemented in nn.Module. Subclass that instead.")
        for key, value in kwargs.items():
            self.add_module(key, value)


class Sequential(Module):
    """A sequential container.
    Modules will be added to it in the order they are passed in the constructor.
    Alternatively, an ordered dict of modules can also be passed in.

    To make it easier to understand, here is a small example::

        # Example of using Sequential
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / container.py View on Github external
import operator

from dragon.vm.torch.environ import get_module_name
from dragon.vm.torch.nn import Module


class Container(Module):
    def __init__(self, **kwargs):
        super(Container, self).__init__()
        warnings.warn("nn.Container is deprecated. All of it's functionality "
                      "is now implemented in nn.Module. Subclass that instead.")
        for key, value in kwargs.items():
            self.add_module(key, value)


class Sequential(Module):
    """A sequential container.
    Modules will be added to it in the order they are passed in the constructor.
    Alternatively, an ordered dict of modules can also be passed in.

    To make it easier to understand, here is a small example::

        # Example of using Sequential
        model = nn.Sequential(
                  nn.Conv2d(1,20,5),
                  nn.ReLU(),
                  nn.Conv2d(20,64,5),
                  nn.ReLU()
                )

        # Example of using Sequential with OrderedDict
        model = nn.Sequential(OrderedDict([
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / depthwise_conv.py View on Github external
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import math

from dragon.vm.torch.tensor import Tensor
from dragon.vm.torch.nn import Module, Parameter
from dragon.vm.torch.nn.modules.utils import _pair


class _DepthwiseConvNd(Module):
    def __init__(
        self,
        in_channels,
        out_channels,
        kernel_size,
        stride,
        padding,
        dilation,
        bias,
    ):
        super(_DepthwiseConvNd, self).__init__()
        if in_channels != out_channels:
            raise ValueError('in/out channels must be same')
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.kernel_size = kernel_size
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / nn / modules / dropout.py View on Github external
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
#      
#
# ------------------------------------------------------------

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from dragon.vm.torch.nn import Module


class Dropout(Module):
    def __init__(self, p=0.5, inplace=False):
        super(Dropout, self).__init__()
        self.p = p
        self.inplace = inplace
        self.register_op()

    def register_op(self):
        self.op_meta = {
            'op_type': 'Dropout',
            'arguments': {
                'prob': self.p,
                'phase': 'TRAIN',
            }
        }

    def extra_repr(self):