How to use the dragon.core.logging.info function in dragon

To help you get started, we’ve selected a few dragon examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github seetaresearch / Dragon / Dragon / python / dragon / vm / theano / compile / function.py View on Github external
(param, grad), arguments = e
        if _workspace.HasTensor(grad):
            grads.append(grad)
            arguments = dict(arguments, **extra_arguments)
            update_ops.append(
                _proto_utils.
                    MakeOperatorDef(
                        op_type=updater.type(),
                        inputs=[grad],
                        outputs=[param],
                        name=_helper.OperatorHelper.get_name(),
                        **arguments
                    )
                )
        else:
            _logging.info('Skip to update Tensor({}).'.format(param))

    # Check data parallel if necessary
    if _mpi.Is_Init():
        (rank, group), arguments = _mpi.AllowParallel(), {}
        if rank != -1:
            arguments['mode'] = '%s_ALLREDUCE' % _mpi.GetParallelMode()
            arguments['root'], (arguments['comm'], arguments['group']) \
                = group[0], _mpi.CreateGroup(root=group[0], incl=group)
            update_ops.insert(
                0, _proto_utils.
                    MakeOperatorDef(
                        op_type='CollectiveUpdate',
                        inputs=grads,
                        outputs=grads,
                        name=_helper.OperatorHelper.get_name(),
                        **arguments
github seetaresearch / Dragon / Dragon / python / dragon / core / workspace.py View on Github external
file_path = prefix + filename + suffix

    if _mpi.Is_Init():
        if not _mpi.AllowSnapshot(): return
        file_path = file_path + '.rank.{}'.format(_mpi.Rank())

    dir = os.path.split(file_path)[0]
    if len(dir) > 0 and not os.path.exists(dir): os.makedirs(dir)

    if format == 'pickle':
        state_dict = {}
        for tensor in tensors:
            state_dict[tensor.name] = FetchTensor(tensor)
        with open(file_path, 'wb') as f:
            pickle.dump(state_dict, f, pickle.HIGHEST_PROTOCOL)
        _logging.info('Snapshot Model@: ' + file_path)
        _logging.info('Model Format: Pickle')
    elif format == 'caffe':
        names = [tensor.name for tensor in tensors]
        get_default_workspace().Snapshot(file_path, names, 1)
    else:
        raise TypeError('Unknown binary format: ' + format)
github seetaresearch / Dragon / Dragon / python / dragon / utils / vision / data_batch.py View on Github external
def cleanup():
            def terminate(processes):
                for process in processes:
                    process.terminate()
                    process.join()
            terminate(self._fetchers)
            if local_rank == 0: _logging.info('Terminate BlobFetcher.')
            terminate(self._transformers)
            if local_rank == 0: _logging.info('Terminate DataTransformer.')
            terminate(self._readers)
            if local_rank == 0: _logging.info('Terminate DataReader.')
        import atexit
github seetaresearch / Dragon / Dragon / python / dragon / core / workspace.py View on Github external
The graph name to run.

    """
    options = _cfg.GetGlobalOptions()
    if options['log_meta_graph']: print(graph_def)
    if options['export_meta_graph']:
        if not os.path.exists(options['export_meta_graph']):
            try:
                os.makedirs(options['export_meta_graph'])
            except Exception:
                raise ValueError('The given prefix is invalid.')
        path = os.path.join(
            options['export_meta_graph'],
                graph_def.name + '.metatxt')
        with open(path, 'w') as f: f.write(str(graph_def))
        _logging.info('Export meta graph to: {}'.format(path))
    return get_default_workspace().CreateGraph(
        _stringify_proto(graph_def), options['log_optimized_graph'])
github seetaresearch / Dragon / Dragon / python / dragon / utils / vision / data_batch.py View on Github external
def cleanup():
            def terminate(processes):
                for process in processes:
                    process.terminate()
                    process.join()
            terminate(self._fetchers)
            if local_rank == 0: _logging.info('Terminate BlobFetcher.')
            terminate(self._transformers)
            if local_rank == 0: _logging.info('Terminate DataTransformer.')
            terminate(self._readers)
            if local_rank == 0: _logging.info('Terminate DataReader.')
        import atexit
github seetaresearch / Dragon / Dragon / python / dragon / utils / vision / data_batch.py View on Github external
def cleanup():
            def terminate(processes):
                for process in processes:
                    process.terminate()
                    process.join()
            terminate(self._fetchers)
            if local_rank == 0: _logging.info('Terminate BlobFetcher.')
            terminate(self._transformers)
            if local_rank == 0: _logging.info('Terminate DataTransformer.')
            terminate(self._readers)
            if local_rank == 0: _logging.info('Terminate DataReader.')
        import atexit
github seetaresearch / Dragon / Dragon / python / dragon / core / workspace.py View on Github external
The format of this binary file.

    Returns
    -------
    None

    """
    assert os.path.exists(binary_file), \
        'Binary file({}) does not exist.'.format(binary_file)
    if format == 'pickle':
        try:
            state_dict = pickle.load(open(binary_file, 'rb'))
        except UnicodeDecodeError:
            state_dict = pickle.load(
                open(binary_file, 'rb'), encoding='iso-8859-1')
        _logging.info('Restore From Model@: ' + binary_file)
        _logging.info('Model Format: Pickle')
        for k, v in state_dict.items():
            if HasTensor(k):
                FeedTensor(k, v)
                _logging.info('Tensor({}) is restored.'.format(k))
    elif format == 'caffe':
        get_default_workspace().Restore(binary_file, 1)
    else:
        raise TypeError('Unknown binary format: ' + format)
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / utils / data / data_batch.py View on Github external
def cleanup():
            def terminate(processes):
                for process in processes:
                    process.terminate()
                    process.join()
            terminate(self._fetchers)
            if local_rank == 0: _logging.info('Terminate BlobFetcher.')
            terminate(self._transformers)
            if local_rank == 0: _logging.info('Terminate DataTransformer.')
            terminate(self._readers)
            if local_rank == 0: _logging.info('Terminate DataReader.')
        import atexit
github seetaresearch / Dragon / Dragon / python / dragon / vm / torch / utils / data / data_batch.py View on Github external
def cleanup():
            def terminate(processes):
                for process in processes:
                    process.terminate()
                    process.join()
            terminate(self._fetchers)
            if local_rank == 0: _logging.info('Terminate BlobFetcher.')
            terminate(self._transformers)
            if local_rank == 0: _logging.info('Terminate DataTransformer.')
            terminate(self._readers)
            if local_rank == 0: _logging.info('Terminate DataReader.')
        import atexit