How to use batchflow - 10 common examples

To help you get started, we’ve selected a few batchflow examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github analysiscenter / batchflow / batchflow / models / torch / blocks.py View on Github external
if filters > self.input_num_channels:
                growth_rate = (filters - self.input_num_channels) // num_layers
            else:
                growth_rate = filters // num_layers
        filters = growth_rate

        if bottleneck:
            bottleneck = 4 if bottleneck is True else bottleneck
            layout = 'cna' + layout
            kernel_size = [1, kernel_size]
            strides = [1, strides]
            filters = [growth_rate * bottleneck, filters]

        layout = 'R' + layout + '.'
        self.layer = ConvBlock(layout=layout, kernel_size=kernel_size, strides=strides, dropout_rate=dropout_rate,
                               filters=filters, n_repeats=num_layers, inputs=inputs, **kwargs)
github analysiscenter / batchflow / batchflow / models / tf / linknet.py View on Github external
def default_config(cls):
        config = TFModel.default_config()

        filters = 64   # number of filters in the first block

        config['initial_block'] = dict(layout='cnap', filters=filters, kernel_size=7, strides=2,
                                       pool_size=3, pool_strides=2)
        config['body/num_blocks'] = 4
        config['body/upsample'] = dict(layout='tna', factor=2, kernel_size=3)

        config['head/filters'] = filters // 2
        config['head/upsample1'] = dict(layout='tna cna', factor=2, kernel_size=3, strides=[2, 1])
        config['head/upsample2'] = dict(layout='t', factor=2)

        config['loss'] = 'ce'

        return config
github analysiscenter / batchflow / batchflow / models / torch / layers / core.py View on Github external
def _calc_output_shape(inputs, kernel_size=None, stride=None, dilation=1, padding=0, transposed=False, **kwargs):
    shape = get_shape(inputs)
    output_shape = list(shape)
    for i in range(2, len(shape)):
        if shape[i]:
            k = kernel_size[i - 2] if isinstance(kernel_size, tuple) else kernel_size
            p = padding[i - 2] if isinstance(padding, tuple) else padding
            p = sum(p) if isinstance(p, tuple) else p * 2
            s = stride[i - 2] if isinstance(stride, tuple) else stride
            d = dilation[i - 2] if isinstance(dilation, tuple) else dilation
            if transposed:
                output_shape[i] = (shape[i] - 1) * s + k - p
            else:
                output_shape[i] = (shape[i] + p - d * (k - 1) - 1) // s + 1
        else:
            output_shape[i] = None

    output_shape[1] = kwargs.get('out_channels') or output_shape[1]
github analysiscenter / batchflow / batchflow / models / torch / base.py View on Github external
steps = len(targets) // microbatch
            splitted_inputs = [[item[i:i + microbatch] for item in inputs] for i in range(0, len(targets), microbatch)]
            splitted_targets = [targets[i:i + microbatch] for i in range(0, len(targets), microbatch)]
        else:
            steps = 1
            splitted_inputs = [inputs]
            splitted_targets = [targets]


        if self.model is None:
            if isinstance(splitted_inputs[0], (list, tuple)):
                self.input_shapes = [get_shape(item) for item in splitted_inputs[0]]
            else:
                self.input_shapes = get_shape(splitted_inputs[0])

            self.target_shape = get_shape(splitted_targets[0])
            if self.classes is None:
                if len(self.target_shape) > 1: # segmentation
                    self.classes = self.target_shape[1]

            self.build_config()
            self._build(splitted_inputs[0])

        self.model.train()

        if use_lock:
            self.train_lock.acquire()

        outputs = []
        for i in range(steps):
            _inputs = splitted_inputs[i]
            _targets = splitted_targets[i]
github analysiscenter / batchflow / batchflow / models / torch / layers / resize.py View on Github external
def __init__(self, inputs=None, ratio=4, squeeze_layout='Vfafa', squeeze_units=None, squeeze_activations=None):
        from .conv_block import ConvBlock # can't be imported in the file beginning due to recursive imports
        super().__init__()
        in_units = get_shape(inputs)[1]
        units = squeeze_units or [in_units // ratio, in_units]
        activations = squeeze_activations or ['relu', 'sigmoid']

        self.layer = ConvBlock(layout=squeeze_layout, units=units, activations=activations, inputs=inputs)
github analysiscenter / batchflow / batchflow / models / torch / layers / pyramid.py View on Github external
def __init__(self, inputs, layout='cna', filters=None, kernel_size=1, pool_op='mean',
                 pyramid=(0, 1, 2, 3, 6), **kwargs):
        super().__init__()

        spatial_shape = np.array(get_shape(inputs)[2:])
        filters = filters if filters else 'same // {}'.format(len(pyramid))

        modules = nn.ModuleList()
        for level in pyramid:
            if level == 0:
                module = nn.Identity()
            else:
                x = inputs
                pool_size = tuple(np.ceil(spatial_shape / level).astype(np.int32).tolist())
                pool_strides = tuple(np.floor((spatial_shape - 1) / level + 1).astype(np.int32).tolist())

                layer = ConvBlock(inputs=x, layout='p' + layout, filters=filters, kernel_size=kernel_size,
                                  pool_op=pool_op, pool_size=pool_size, pool_strides=pool_strides, **kwargs)
                x = layer(x)

                upsample_layer = Upsample(inputs=x, factor=None, layout='b',
github analysiscenter / batchflow / batchflow / config.py View on Github external
variable = variable.strip('/')
        if '/' in variable:
            var = variable.split('/')
            prefix = var[:-1]
            var_name = var[-1]
        else:
            prefix = []
            var_name = variable

        for i, p in enumerate(prefix):
            if p not in config:
                config[p] = dict()
            if isinstance(config[p], dict):
                config = config[p]
            else: # for example, we put value with key 'a/b' into `{a: c}`
                value = Config({'/'.join(prefix[i+1:] + [var_name]): value})
                var_name = p
                break
        if var_name in config and isinstance(config[var_name], dict) and isinstance(value, Config):
            config[var_name] = Config(config[var_name])
            config[var_name].update(value)
            config[var_name] = config[var_name].config
        else:
            if isinstance(value, Config):
                config[var_name] = value.config
            else:
                config[var_name] = value
github analysiscenter / batchflow / batchflow / config.py View on Github external
""" Put a new variable into config

        Parameters
        ----------
        variable : str
            variable to add. '/' is used to put value into nested dict
        value : masc
        config : dict, Config or None
            if None value will be putted into self.config else from config
        """
        if config is None:
            config = self.config
        elif isinstance(config, Config):
            config = config.config
        if isinstance(value, dict):
            value = Config(value)
        variable = variable.strip('/')
        if '/' in variable:
            var = variable.split('/')
            prefix = var[:-1]
            var_name = var[-1]
        else:
            prefix = []
            var_name = variable

        for i, p in enumerate(prefix):
            if p not in config:
                config[p] = dict()
            if isinstance(config[p], dict):
                config = config[p]
            else: # for example, we put value with key 'a/b' into `{a: c}`
                value = Config({'/'.join(prefix[i+1:] + [var_name]): value})
github analysiscenter / batchflow / batchflow / research / named_expr.py View on Github external
""" Contains named expression classes for Research """

import os

from .results import Results
from ..named_expr import NamedExpression

class ResearchNamedExpression(NamedExpression):
    """ NamedExpression base class for Research objects """
    def _get(self, **kwargs):
        name = self._get_name(**kwargs)
        return name, kwargs

class ResearchExecutableUnit(ResearchNamedExpression):
    """ NamedExpression for ExecutableUnit """
    def _get(self, **kwargs):
        _, kwargs = super()._get(**kwargs)
        experiment = kwargs['experiment']
        return experiment

    def get(self, **kwargs):
        experiment = self._get(**kwargs)
        if isinstance(experiment, (list, tuple)):
            _experiment = experiment
github analysiscenter / batchflow / batchflow / models / tf / inception_v3.py View on Github external
name : str
            scope name

        Returns
        -------
        tf.Tensor
        """
        with tf.variable_scope(name):
            axis = cls.channels_axis(kwargs['data_format'])
            branch_1 = conv_block(inputs, layout, filters[0], 1, name='conv_1', **kwargs)

            branch_pool = conv_block(inputs, 'p'+layout, filters[3], 1, name='c_pool',
                                     **{**kwargs, 'pool_strides': 1})

            branch_a1 = conv_block(inputs, layout, filters[1], 1, name='conv_a1', **kwargs)
            branch_a1_31 = conv_block(branch_a1, layout, filters[1], [3, 1], name='conv_1_31', **kwargs)
            branch_a1_13 = conv_block(branch_a1, layout, filters[1], [1, 3], name='conv_1_13', **kwargs)
            branch_a = tf.concat([branch_a1_31, branch_a1_13], axis=axis)

            branch_b13 = conv_block(inputs, layout*2, [filters[2], filters[1]], [1, 3], name='conv_b13', **kwargs)
            branch_b13_31 = conv_block(branch_b13, layout, filters[1], [3, 1], name='conv_b13_31', **kwargs)
            branch_b13_13 = conv_block(branch_b13, layout, filters[1], [1, 3], name='conv_b13_13', **kwargs)
            branch_b = tf.concat([branch_b13_31, branch_b13_13], axis=axis)

            output = tf.concat([branch_1, branch_pool, branch_a, branch_b], axis=axis, name='output')
        return output