How to use the gnocchi.carbonara.SplitKey function in gnocchi

To help you get started, we’ve selected a few gnocchi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gnocchixyz / gnocchi / gnocchi / storage / s3.py View on Github external
Bucket=bucket,
                    Prefix=self._prefix(metric) + '%s_%s' % (
                        aggregation.method,
                        utils.timespan_total_seconds(
                            aggregation.granularity),
                    ),
                    **kwargs)
                # If response is empty then check that the metric exists
                contents = response.get('Contents', ())
                if not contents and not self._metric_exists_p(metric, version):
                    raise storage.MetricDoesNotExist(metric)
                for f in contents:
                    try:
                        if (self._version_check(f['Key'], version)):
                            meta = f['Key'].split('_')
                            keys[aggregation].add(carbonara.SplitKey(
                                utils.to_timestamp(meta[2]),
                                sampling=aggregation.granularity))
                    except (ValueError, IndexError):
                        # Might be "none", or any other file. Be resilient.
                        continue
        return keys
github gnocchixyz / gnocchi / gnocchi / storage / ceph.py View on Github external
for name, value in omaps
                        if self._version_check(name, version)]
            keys = collections.defaultdict(set)
            if not raw_keys:
                return keys
            zipped = list(zip(*raw_keys))
            k_timestamps = utils.to_timestamps(zipped[2])
            k_methods = zipped[3]
            k_granularities = list(map(utils.to_timespan, zipped[4]))

            for timestamp, method, granularity in six.moves.zip(
                    k_timestamps, k_methods, k_granularities):
                for aggregation in aggregations:
                    if (aggregation.method == method
                       and aggregation.granularity == granularity):
                        keys[aggregation].add(carbonara.SplitKey(
                            timestamp,
                            sampling=granularity))
                        break
            return keys
github gnocchixyz / gnocchi / gnocchi / storage / __init__.py View on Github external
retrieve in format
                                         {metric: [aggregation, …]}.
        :param from timestamp: The timestamp to get the measure from.
        :param to timestamp: The timestamp to get the measure to.
        """
        metrics_aggs_keys = self._list_split_keys(metrics_and_aggregations)

        for metric, aggregations_keys in six.iteritems(metrics_aggs_keys):
            for aggregation, keys in six.iteritems(aggregations_keys):
                start = (
                    carbonara.SplitKey.from_timestamp_and_sampling(
                        from_timestamp, aggregation.granularity)
                ) if from_timestamp else None

                stop = (
                    carbonara.SplitKey.from_timestamp_and_sampling(
                        to_timestamp, aggregation.granularity)
                ) if to_timestamp else None

                # Replace keys with filtered version
                metrics_aggs_keys[metric][aggregation] = [
                    key for key in sorted(keys)
                    if ((not start or key >= start)
                        and (not stop or key <= stop))
                ]

        metrics_aggregations_splits = self._get_splits_and_unserialize(
            metrics_aggs_keys)

        results = collections.defaultdict(dict)
        for metric, aggregations in six.iteritems(metrics_and_aggregations):
            for aggregation in aggregations:
github gnocchixyz / gnocchi / gnocchi / storage / __init__.py View on Github external
from_timestamp=None, to_timestamp=None,
                                resample=None):
        """Get aggregated measures from a metric.

        :param metrics_and_aggregations: The metrics and aggregations to
                                         retrieve in format
                                         {metric: [aggregation, …]}.
        :param from timestamp: The timestamp to get the measure from.
        :param to timestamp: The timestamp to get the measure to.
        """
        metrics_aggs_keys = self._list_split_keys(metrics_and_aggregations)

        for metric, aggregations_keys in six.iteritems(metrics_aggs_keys):
            for aggregation, keys in six.iteritems(aggregations_keys):
                start = (
                    carbonara.SplitKey.from_timestamp_and_sampling(
                        from_timestamp, aggregation.granularity)
                ) if from_timestamp else None

                stop = (
                    carbonara.SplitKey.from_timestamp_and_sampling(
                        to_timestamp, aggregation.granularity)
                ) if to_timestamp else None

                # Replace keys with filtered version
                metrics_aggs_keys[metric][aggregation] = [
                    key for key in sorted(keys)
                    if ((not start or key >= start)
                        and (not stop or key <= stop))
                ]

        metrics_aggregations_splits = self._get_splits_and_unserialize(
github gnocchixyz / gnocchi / gnocchi / storage / _carbonara.py View on Github external
def _list_split_keys_for_metric(self, metric, aggregation, granularity,
                                    version=3):
        return set(map(
            functools.partial(carbonara.SplitKey, sampling=granularity),
            (numpy.array(
                list(self._list_split_keys(
                    metric, aggregation, granularity, version)),
                dtype=numpy.float) * 10e8).astype('datetime64[ns]')))
github gnocchixyz / gnocchi / gnocchi / carbonara.py View on Github external
def get_split_key(self, timestamp=None):
        """Return the split key for a particular timestamp.

        :param timestamp: If None, the first timestamp of the timeseries
                          is used.
        :return: A SplitKey object or None if the timeseries is empty.
        """
        if timestamp is None:
            timestamp = self.first
            if timestamp is None:
                return
        return SplitKey.from_timestamp_and_sampling(
            timestamp, self.aggregation.granularity)
github gnocchixyz / gnocchi / gnocchi / storage / swift.py View on Github external
if self._version_check(f['name'], version)
             and not f['name'].startswith('none'))))
        keys = collections.defaultdict(set)
        if not raw_keys:
            return keys
        zipped = list(zip(*raw_keys))
        k_timestamps = utils.to_timestamps(zipped[0])
        k_methods = zipped[1]
        k_granularities = list(map(utils.to_timespan, zipped[2]))

        for timestamp, method, granularity in six.moves.zip(
                k_timestamps, k_methods, k_granularities):
            for aggregation in aggregations:
                if (aggregation.method == method
                   and aggregation.granularity == granularity):
                    keys[aggregation].add(carbonara.SplitKey(
                        timestamp,
                        sampling=granularity))
                    break
        return keys
github gnocchixyz / gnocchi / gnocchi / storage / redis.py View on Github external
aggregations = metrics_and_aggregations[metric]
            number_of_aggregations = len(aggregations)
            keys_for_aggregations = results[
                start + 1:start + 1 + number_of_aggregations
            ]
            start += 1 + number_of_aggregations  # 1 for metric_exists_p
            for aggregation, k in six.moves.zip(
                    aggregations, keys_for_aggregations):
                if not k:
                    keys[metric][aggregation] = set()
                    continue
                timestamps, methods, granularities = list(zip(*k))
                timestamps = utils.to_timestamps(timestamps)
                granularities = map(utils.to_timespan, granularities)
                keys[metric][aggregation] = {
                    carbonara.SplitKey(timestamp,
                                       sampling=granularity)
                    for timestamp, granularity
                    in six.moves.zip(timestamps, granularities)
                }
        return keys