How to use the datacube.model.Range function in datacube

To help you get started, we’ve selected a few datacube examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github opendatacube / datacube-core / tests / ui / test_expression_parsing.py View on Github external
def test_parse_date_ranges():
    eighth_march_2014 = {
        'time': Range(datetime(2014, 3, 8, tzinfo=tzutc()), datetime(2014, 3, 8, 23, 59, 59, 999999, tzinfo=tzutc()))
    }
    assert parse_expressions('time in 2014-03-08') == eighth_march_2014
    assert parse_expressions('time in 2014-03-8') == eighth_march_2014

    march_2014 = {
        'time': Range(datetime(2014, 3, 1, tzinfo=tzutc()), datetime(2014, 3, 31, 23, 59, 59, 999999, tzinfo=tzutc()))
    }
    assert parse_expressions('time in 2014-03') == march_2014
    assert parse_expressions('time in 2014-3') == march_2014
    # Leap year, 28 days
    feb_2014 = {
        'time': Range(datetime(2014, 2, 1, tzinfo=tzutc()), datetime(2014, 2, 28, 23, 59, 59, 999999, tzinfo=tzutc()))
    }
    assert parse_expressions('time in 2014-02') == feb_2014
    assert parse_expressions('time in 2014-2') == feb_2014
github opendatacube / datacube-core / integration_tests / index / test_search.py View on Github external
def test_search_dataset_ranges(index: Index, pseudo_ls8_dataset: Dataset) -> None:
    # In the lat bounds.
    datasets = index.datasets.search_eager(
        lat=Range(-30.5, -29.5),
        time=Range(
            datetime.datetime(2014, 7, 26, 23, 0, 0),
            datetime.datetime(2014, 7, 26, 23, 59, 0)
        )
    )
    assert len(datasets) == 1
    assert datasets[0].id == pseudo_ls8_dataset.id

    # Out of the lat bounds.
    datasets = index.datasets.search_eager(
        lat=Range(28, 32),
        time=Range(
            datetime.datetime(2014, 7, 26, 23, 48, 0),
            datetime.datetime(2014, 7, 26, 23, 50, 0)
        )
    )
    assert len(datasets) == 0

    # Out of the time bounds
    datasets = index.datasets.search_eager(
        lat=Range(-30.5, -29.5),
        time=Range(
            datetime.datetime(2014, 7, 26, 21, 48, 0),
            datetime.datetime(2014, 7, 26, 21, 50, 0)
        )
    )
    assert len(datasets) == 0
github opendatacube / datacube-core / integration_tests / index / test_config_docs.py View on Github external
# No arguments, return all.
    res = list(index.products.search())
    assert res == [ls5_telem_type]

    # Matching fields
    res = list(index.products.search(
        product_type='satellite_telemetry_data',
        product='ls5_telem_test'
    ))
    assert res == [ls5_telem_type]

    # Matching fields and non-available fields
    res = list(index.products.search(
        product_type='satellite_telemetry_data',
        product='ls5_telem_test',
        lat=Range(142.015625, 142.015625),
        lon=Range(-12.046875, -12.046875)
    ))
    assert res == []

    # Matching fields and available fields
    [(res, q)] = list(index.products.search_robust(
        product_type='satellite_telemetry_data',
        product='ls5_telem_test',
        sat_path=Range(142.015625, 142.015625),
        sat_row=Range(-12.046875, -12.046875)
    ))
    assert res == ls5_telem_type
    assert 'sat_path' in q
    assert 'sat_row' in q

    # Or expression test
github opendatacube / datacube-core / tests / storage / test_access.py View on Github external
def test_storage_unit_stack():
    stack = StorageUnitStack([ds1, ds2], 't')
    expected = numpy.array([
        [
            [624, 625, 626],
            [634, 635, 636]
        ],
        [
            [24, 25, 26],
            [34, 35, 36]
        ]
    ])

    data = stack.get('B10', t=Range(400, 500), x=Range(3, 5), y=Range(1, 1.5))
    assert len(data.coords['t']) == 2
    assert len(data.coords['x']) == 3
    assert len(data.coords['y']) == 2
    assert (data.values == expected).all()

    data = stack.get('B10', t=slice(3, 5), x=slice(4, 7), y=slice(2, 4))
    assert len(data.coords['t']) == 2
    assert len(data.coords['x']) == 3
    assert len(data.coords['y']) == 2
    assert (data.values == expected).all()
github GeoscienceAustralia / COG-Conversion / work_list.py View on Github external
def get_dataset_values(product, year=None, month=None, from_date=None, datacube_env=None):
    """
    Extract the file list corresponding to a product for the given year and month using datacube API.
    """

    query = {'product': product}
    if from_date:
        query['time'] = Range(datetime(year=from_date.year, month=from_date.month, day=from_date.day),
                              datetime.now())
    elif year and month:
        query['time'] = Range(datetime(year=year, month=month, day=1), datetime(year=year, month=month + 1, day=1))
    elif year:
        query['time'] = Range(datetime(year=year, month=1, day=1), datetime(year=year + 1, month=1, day=1))
    dc = Datacube(app='cog-worklist query', env=datacube_env)

    field_names = get_field_names(product)
    files = dc.index.datasets.search_returning(field_names=tuple(field_names), **query)

    # Extract file name from search_result
    def filename_from_uri(uri):
        return uri.split('//')[1]

    for result in files:
        yield filename_from_uri(result.uri), compute_prefix_from_query_result(result, product)
github opendatacube / datacube-core / datacube / scripts / ingest.py View on Github external
def create_task_list(index, output_type, year, source_type, config):
    config['taskfile_utctime'] = int(time.time())

    query = {}
    if year:
        query['time'] = Range(datetime(year=year[0], month=1, day=1), datetime(year=year[1] + 1, month=1, day=1))
    if 'ingestion_bounds' in config:
        bounds = config['ingestion_bounds']
        query['x'] = Range(bounds['left'], bounds['right'])
        query['y'] = Range(bounds['bottom'], bounds['top'])

    tasks = find_diff(source_type, output_type, index, **query)
    _LOG.info('%s tasks discovered', len(tasks))

    def check_valid(tile, tile_index):
        if FUSER_KEY in config:
            return True

        require_fusing = [source for source in tile.sources.values if len(source) > 1]
        if require_fusing:
            _LOG.warning('Skipping %s - no "%s" specified in config: %s', tile_index, FUSER_KEY, require_fusing)

        return not require_fusing

    def update_sources(sources):
github opendatacube / datacube-core / datacube / scripts / run_ingest.py View on Github external
def _stack_storage_type(storage_type, start_date, end_date, index):
    period, date_format = {
        'year': (relativedelta(years=1), '%Y'),
        'month': (relativedelta(months=1), '%Y%m'),
    }[storage_type.aggregation_period]
    # TODO: order by time will remove the need to run multiple searches
    while start_date < end_date:
        storage_units_by_tile_index = {}
        for storage_unit in index.storage.search(type=storage_type.id, time=Range(start_date, start_date + period)):
            storage_units_by_tile_index.setdefault(storage_unit.tile_index, []).append(storage_unit)

        for tile_index, storage_units in storage_units_by_tile_index.items():
            if len(storage_units) < 2:
                continue

            storage_units.sort(key=lambda su: su.coordinates['time'].begin)
            filename = storage_type.generate_uri(tile_index=tile_index,
                                                 start_time=start_date.strftime(date_format),
                                                 end_time=(start_date + period).strftime(date_format))
            yield (storage_units, filename)
        start_date += period
github opendatacube / datacube-core / datacube / api.py View on Github external
input_coords['right'] = float(data['range'])
                else:
                    input_coords['left'] = data['range'][0]
                    input_coords['right'] = data['range'][-1]
            elif dim in ['time', 't']:
                # TODO: Handle time formatting strings & other CRS's
                # Assume dateime object or seconds since UNIX epoch 1970-01-01 for now...
                search_query['time'] = Range(to_datetime(data['range'][0]),
                                             to_datetime(data['range'][1]))
            else:
                # Assume the search function will sort it out, add it to the query
                search_query[dim] = Range(*data['range'])
    try:
        search_coords = geospatial_warp_bounds(input_coords, input_crs, tolerance=FLOAT_TOLERANCE)
        search_query['lat'] = Range(search_coords['bottom'], search_coords['top'])
        search_query['lon'] = Range(search_coords['left'], search_coords['right'])
    except ValueError:
        _LOG.warning("Couldn't convert spatial dimension ranges")
    return search_query
github opendatacube / datacube-core / datacube / api / query.py View on Github external
def _values_to_search(**kwargs):
    search = {}
    for key, value in kwargs.items():
        if key.lower() in ('time', 't'):
            search['time'] = _time_to_search_dims(value)
        elif key not in ['latitude', 'lat', 'y'] + ['longitude', 'lon', 'x']:
            if isinstance(value, collections.Sequence) and len(value) == 2:
                search[key] = Range(*value)
            else:
                search[key] = value
    return search
github opendatacube / datacube-core / datacube / summary / datacube_summarygen.py View on Github external
def _get_time_ranges(date_list):

    for i, date in enumerate(date_list):
        if i == 0:
            prev = date
            continue

        yield Range(prev, date)
        prev = date