How to use the datacube.gdf._gdfutils.log_multiline function in datacube

To help you get started, we’ve selected a few datacube examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github opendatacube / datacube-core / datacube / gdf / _arguments.py View on Github external
'default': ,
            'action': 'store_const',
            'const': ,
            'help': 

    Returns:
        argparse namespace object
    """
    logger.debug('Calling _parse_args()')
    log_multiline(logger.debug, arg_descriptors, 'arg_descriptors', '\t')

    _arg_parser = argparse.ArgumentParser(description=os.path.basename(sys.argv[0]))

    for arg_dest in sorted(arg_descriptors.keys()):
        arg_descriptor = arg_descriptors[arg_dest]
        log_multiline(logger.debug, arg_descriptor, 'arg_descriptor for %s' % arg_dest, '\t')

        _arg_parser.add_argument(arg_descriptor['short_flag'],
                                 arg_descriptor['long_flag'],
                                 dest=arg_dest,
                                 default=arg_descriptor['default'],
                                 action=arg_descriptor['action'],
                                 const=arg_descriptor['const'],
                                 help=arg_descriptor['help']
                                 )

    args, _unknown_args = _arg_parser.parse_known_args()

    return args.__dict__
github opendatacube / datacube-core / datacube / gdf / _arguments.py View on Github external
"""Virtual function to parse command line arguments.

    Parameters:
        arg_descriptors: dict keyed by dest variable name containing sub-dicts as follows:
            'short_flag': '-d',
            'long_flag': '--debug',
            'default': ,
            'action': 'store_const',
            'const': ,
            'help': 

    Returns:
        argparse namespace object
    """
    logger.debug('Calling _parse_args()')
    log_multiline(logger.debug, arg_descriptors, 'arg_descriptors', '\t')

    _arg_parser = argparse.ArgumentParser(description=os.path.basename(sys.argv[0]))

    for arg_dest in sorted(arg_descriptors.keys()):
        arg_descriptor = arg_descriptors[arg_dest]
        log_multiline(logger.debug, arg_descriptor, 'arg_descriptor for %s' % arg_dest, '\t')

        _arg_parser.add_argument(arg_descriptor['short_flag'],
                                 arg_descriptor['long_flag'],
                                 dest=arg_dest,
                                 default=arg_descriptor['default'],
                                 action=arg_descriptor['action'],
                                 const=arg_descriptor['const'],
                                 help=arg_descriptor['help']
                                 )
github opendatacube / datacube-core / datacube / gdf / __init__.py View on Github external
name=storage_type
                                              )
            thread_list.append(process_thread)
            process_thread.setDaemon(False)
            process_thread.start()
            logger.debug('Started thread %s', storage_type)

        # Wait for all threads to finish
        for process_thread in thread_list:
            check_thread_exception()
            process_thread.join()

        check_thread_exception()
        logger.debug('All threads finished')

        log_multiline(logger.debug, result_dict, 'result_dict', '\t')
        return result_dict
github opendatacube / datacube-core / datacube / gdf / __init__.py View on Github external
name=db_ref
                                          )
        thread_list.append(process_thread)
        process_thread.setDaemon(False)
        process_thread.start()
        logger.debug('Started thread %s', db_ref)

    # Wait for all threads to finish
    for process_thread in thread_list:
        check_thread_exception()
        process_thread.join()

    check_thread_exception()
    logger.debug('All threads finished')

    log_multiline(logger.debug, result_dict, 'result_dict', '\t')
    return result_dict
github opendatacube / datacube-core / datacube / gdf / __init__.py View on Github external
def thread_execute(storage_type_function, *args, **kwargs):
            """Helper function to capture exception within the thread and set a global
            variable to be checked in the main thread
            N.B: THIS FUNCTION RUNS WITHIN THE SPAWNED THREAD
            """
            global thread_exception
            try:
                storage_type_function(*args, **kwargs)
            except Exception as e:
                thread_exception = e
                log_multiline(logger.error, traceback.format_exc(),
                              'Error in thread %s: %s' % (storage_type, e.message), '\t')
                raise thread_exception  # Re-raise the exception within the thread
            finally:
                logger.debug('Thread finished')
github opendatacube / datacube-core / datacube / gdf / __init__.py View on Github external
def thread_execute(db_function, *args, **kwargs):
        """
        Helper function to capture exception within the thread and set a global
        variable to be checked in the main thread
        N.B: THIS FUNCTION RUNS WITHIN THE SPAWNED THREAD
        """
        global thread_exception
        try:
            db_function(*args, **kwargs)
        except Exception as e:
            thread_exception = e
            log_multiline(logger.error, traceback.format_exc(), 'Error in thread: ' + e.message, '\t')
            raise thread_exception  # Re-raise the exception within the thread
        finally:
            logger.debug('Thread finished')
github opendatacube / datacube-core / datacube / gdf / __init__.py View on Github external
from dataset
      join dataset_dimension using(dataset_type_id, dataset_id)
      join dimension using(dimension_id)
      where dimension_tag = '%s'
    ) dataset_index using(dataset_type_id, dataset_id)
''' % slice_dimension

                # Restrict slices to those within range if required
                if slice_dimension in range_dimensions:
                    sql += '''where slice_index_value between %f and %f
''' % (dimension_range_dict[slice_dimension][0], dimension_range_dict[slice_dimension][1])  # Min, Max

                sql += '''
order by ''' + '_index, '.join(storage_type_dimensions) + '''_index, slice_index_value;
'''
                log_multiline(logger.debug, sql, 'SQL', '\t')

                slice_result_set = database.submit_query(sql)

                storage_units_descriptor = {}  # Dict to hold all storage unit descriptors for this storage type

                regular_storage_type_dimensions = [dimension for dimension in storage_type_dimensions if
                                                   self._storage_config[storage_type]['dimensions'][dimension][
                                                       'indexing_type'] == 'regular']
                irregular_storage_type_dimensions = [dimension for dimension in storage_type_dimensions if
                                                     self._storage_config[storage_type]['dimensions'][dimension][
                                                         'indexing_type'] == 'irregular']
                fixed_storage_type_dimensions = [dimension for dimension in storage_type_dimensions if
                                                 self._storage_config[storage_type]['dimensions'][dimension][
                                                     'indexing_type'] == 'fixed']

                # Define initial max/min/shape values