How to use the gnocchi.storage.MetricDoesNotExist function in gnocchi

To help you get started, we’ve selected a few gnocchi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gnocchixyz / gnocchi / gnocchi / storage / s3.py View on Github external
'ContinuationToken': response['NextContinuationToken']
                    }
                else:
                    kwargs = {}
                response = self.s3.list_objects_v2(
                    Bucket=bucket,
                    Prefix=self._prefix(metric) + '%s_%s' % (
                        aggregation.method,
                        utils.timespan_total_seconds(
                            aggregation.granularity),
                    ),
                    **kwargs)
                # If response is empty then check that the metric exists
                contents = response.get('Contents', ())
                if not contents and not self._metric_exists_p(metric, version):
                    raise storage.MetricDoesNotExist(metric)
                for f in contents:
                    try:
                        if (self._version_check(f['Key'], version)):
                            meta = f['Key'].split('_')
                            keys[aggregation].add(carbonara.SplitKey(
                                utils.to_timestamp(meta[2]),
                                sampling=aggregation.granularity))
                    except (ValueError, IndexError):
                        # Might be "none", or any other file. Be resilient.
                        continue
        return keys
github gnocchixyz / gnocchi / gnocchi / storage / ceph.py View on Github external
def _list_split_keys_unbatched(self, metric, aggregations, version=3):
        with rados.ReadOpCtx() as op:
            omaps, ret = self.ioctx.get_omap_vals(op, "", "", -1)
            try:
                self.ioctx.operate_read_op(
                    op, self._build_unaggregated_timeserie_path(metric, 3))
            except rados.ObjectNotFound:
                raise storage.MetricDoesNotExist(metric)

            # NOTE(sileht): after reading the libradospy, I'm
            # not sure that ret will have the correct value
            # get_omap_vals transforms the C int to python int
            # before operate_read_op is called, I dunno if the int
            # content is copied during this transformation or if
            # this is a pointer to the C int, I think it's copied...
            try:
                ceph.errno_to_exception(ret)
            except rados.ObjectNotFound:
                raise storage.MetricDoesNotExist(metric)

            raw_keys = [name.split("_")
                        for name, value in omaps
                        if self._version_check(name, version)]
            keys = collections.defaultdict(set)
github gnocchixyz / gnocchi / gnocchi / storage / swift.py View on Github external
def _list_split_keys_unbatched(self, metric, aggregations, version=3):
        container = self._container_name(metric)
        try:
            headers, files = self.swift.get_container(
                container, full_listing=True)
        except swclient.ClientException as e:
            if e.http_status == 404:
                raise storage.MetricDoesNotExist(metric)
            raise

        raw_keys = list(map(
            lambda k: k.split("_"),
            (f['name'] for f in files
             if self._version_check(f['name'], version)
             and not f['name'].startswith('none'))))
        keys = collections.defaultdict(set)
        if not raw_keys:
            return keys
        zipped = list(zip(*raw_keys))
        k_timestamps = utils.to_timestamps(zipped[0])
        k_methods = zipped[1]
        k_granularities = list(map(utils.to_timespan, zipped[2]))

        for timestamp, method, granularity in six.moves.zip(
github gnocchixyz / gnocchi / gnocchi / storage / redis.py View on Github external
pipe.exists(key)
            aggregations = metrics_and_aggregations[metric]
            for aggregation in aggregations:
                self._scripts["list_split_keys"](
                    keys=[key], args=[self._aggregated_field_for_split(
                        aggregation.method, "*",
                        version, aggregation.granularity)],
                    client=pipe,
                )
        results = pipe.execute()
        keys = collections.defaultdict(dict)
        start = 0
        for metric in metrics:
            metric_exists_p = results[start]
            if not metric_exists_p:
                raise storage.MetricDoesNotExist(metric)
            aggregations = metrics_and_aggregations[metric]
            number_of_aggregations = len(aggregations)
            keys_for_aggregations = results[
                start + 1:start + 1 + number_of_aggregations
            ]
            start += 1 + number_of_aggregations  # 1 for metric_exists_p
            for aggregation, k in six.moves.zip(
                    aggregations, keys_for_aggregations):
                if not k:
                    keys[metric][aggregation] = set()
                    continue
                timestamps, methods, granularities = list(zip(*k))
                timestamps = utils.to_timestamps(timestamps)
                granularities = map(utils.to_timespan, granularities)
                keys[metric][aggregation] = {
                    carbonara.SplitKey(timestamp,
github gnocchixyz / gnocchi / gnocchi / storage / ceph.py View on Github external
try:
                self.ioctx.operate_read_op(
                    op, self._build_unaggregated_timeserie_path(metric, 3))
            except rados.ObjectNotFound:
                raise storage.MetricDoesNotExist(metric)

            # NOTE(sileht): after reading the libradospy, I'm
            # not sure that ret will have the correct value
            # get_omap_vals transforms the C int to python int
            # before operate_read_op is called, I dunno if the int
            # content is copied during this transformation or if
            # this is a pointer to the C int, I think it's copied...
            try:
                ceph.errno_to_exception(ret)
            except rados.ObjectNotFound:
                raise storage.MetricDoesNotExist(metric)

            raw_keys = [name.split("_")
                        for name, value in omaps
                        if self._version_check(name, version)]
            keys = collections.defaultdict(set)
            if not raw_keys:
                return keys
            zipped = list(zip(*raw_keys))
            k_timestamps = utils.to_timestamps(zipped[2])
            k_methods = zipped[3]
            k_granularities = list(map(utils.to_timespan, zipped[4]))

            for timestamp, method, granularity in six.moves.zip(
                    k_timestamps, k_methods, k_granularities):
                for aggregation in aggregations:
                    if (aggregation.method == method
github gnocchixyz / gnocchi / gnocchi / rest / api.py View on Github external
self.MetricIDsSchema, required=True)(arg_to_list(metric))
            except voluptuous.Error as e:
                abort(400, "Invalid input: %s" % e)
        else:
            self._workaround_pecan_issue_88()
            metric_ids = deserialize_and_validate(self.MetricIDsSchema)

        metric_ids = [six.text_type(m) for m in metric_ids]
        # Check RBAC policy
        metrics = pecan.request.indexer.list_metrics(
            attribute_filter={"in": {"id": metric_ids}})
        missing_metric_ids = (set(metric_ids)
                              - set(six.text_type(m.id) for m in metrics))
        if missing_metric_ids:
            # Return one of the missing one in the error
            abort(404, six.text_type(storage.MetricDoesNotExist(
                missing_metric_ids.pop())))
        return self.get_cross_metric_measures_from_objs(
            metrics, start, stop, aggregation, reaggregation,
            granularity, needed_overlap, fill, refresh, resample)
github gnocchixyz / gnocchi / gnocchi / rest / __init__.py View on Github external
metric_ids = voluptuous.Schema(
                    self.MetricIDsSchema, required=True)(arg_to_list(metric))
            except voluptuous.Error as e:
                abort(400, "Invalid input: %s" % e)
        else:
            self._workaround_pecan_issue_88()
            metric_ids = deserialize_and_validate(self.MetricIDsSchema)

        metric_ids = [six.text_type(m) for m in metric_ids]
        # Check RBAC policy
        metrics = pecan.request.indexer.list_metrics(ids=metric_ids)
        missing_metric_ids = (set(metric_ids)
                              - set(six.text_type(m.id) for m in metrics))
        if missing_metric_ids:
            # Return one of the missing one in the error
            abort(404, storage.MetricDoesNotExist(
                missing_metric_ids.pop()))
        return self.get_cross_metric_measures_from_objs(
            metrics, start, stop, aggregation, reaggregation,
            granularity, needed_overlap, fill, refresh, resample, transform)
github gnocchixyz / gnocchi / gnocchi / storage / file.py View on Github external
def _get_measures(self, metric, aggregation):
        path = self._build_metric_path(metric, aggregation)
        try:
            with open(path, 'rb') as aggregation_file:
                return aggregation_file.read()
        except IOError as e:
            if e.errno == errno.ENOENT:
                if os.path.exists(self._build_metric_path(metric)):
                    raise storage.AggregationDoesNotExist(metric, aggregation)
                else:
                    raise storage.MetricDoesNotExist(metric)
            raise