How to use the gnocchi.utils.to_timespan function in gnocchi

To help you get started, we’ve selected a few gnocchi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gnocchixyz / gnocchi / tools / gnocchi-archive-policy-size.py View on Github external
sys.exit(1)


def sizeof_fmt(num, suffix='B'):
    for unit in ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi'):
        if abs(num) < 1024.0:
            return "%3.1f%s%s" % (num, unit, suffix)
        num /= 1024.0
    return "%.1f%s%s" % (num, 'Yi', suffix)


size = 0
agg_methods = int(sys.argv[1])
for g, t in utils.grouper(sys.argv[2:], 2):
    granularity = utils.to_timespan(g)
    timespan = utils.to_timespan(t)
    points = timespan / granularity
    cursize = points * WORST_CASE_BYTES_PER_POINT
    size += cursize
    print("%s over %s = %d points = %s" % (g, t, points, sizeof_fmt(cursize)))

size *= agg_methods

print("Total: " + sizeof_fmt(size))
github gnocchixyz / gnocchi / gnocchi / rest / __init__.py View on Github external
try:
                pecan.request.storage.refresh_metric(
                    pecan.request.indexer, pecan.request.incoming, self.metric,
                    pecan.request.conf.api.refresh_timeout)
            except storage.SackLockTimeoutError as e:
                abort(503, e)
        try:
            if aggregation in self.custom_agg:
                warnings.warn("moving_average aggregation is deprecated.",
                              category=DeprecationWarning)
                return self.custom_agg[aggregation].compute(
                    pecan.request.storage, self.metric,
                    start, stop, **param)
            return pecan.request.storage.get_measures(
                self.metric, start, stop, aggregation,
                utils.to_timespan(granularity)
                if granularity is not None else None,
                transform)
        except (storage.MetricDoesNotExist,
                storage.GranularityDoesNotExist,
                storage.AggregationDoesNotExist) as e:
            abort(404, e)
        except aggregates.CustomAggFailure as e:
            abort(400, e)
github gnocchixyz / gnocchi / gnocchi / rest / __init__.py View on Github external
not in archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS):
            abort(
                400,
                'Invalid aggregation value %s, must be one of %s'
                % (aggregation,
                   archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS))

        for metric in metrics:
            enforce("get metric", metric)

        number_of_metrics = len(metrics)
        if number_of_metrics == 0:
            return []
        if granularity is not None:
            try:
                granularity = utils.to_timespan(granularity)
            except ValueError as e:
                abort(400, e)

        if transform is not None:
            transform = TransformSchema(transform)

        if resample:
            # TODO(sileht): This have to be deprecated at some point
            if transform:
                abort(400, 'transform and resample are exclusive')

            if not granularity:
                abort(400, 'A granularity must be specified to resample')
            try:
                resample = utils.to_timespan(resample)
            except ValueError as e:
github gnocchixyz / gnocchi / gnocchi / rest / api.py View on Github external
def get_measures(self, start=None, stop=None, aggregation='mean',
                     granularity=None, resample=None, refresh=False,
                     **param):
        self.enforce_metric("get measures")

        if resample:
            if not granularity:
                abort(400, 'A granularity must be specified to resample')
            try:
                resample = (resample if calendar.GROUPINGS.get(resample) else
                            utils.to_timespan(resample))
            except ValueError as e:
                abort(400, six.text_type(e))

        if granularity is None:
            granularity = [d.granularity
                           for d in self.metric.archive_policy.definition]
            start, stop, _, _, _ = validate_qs(
                start=start, stop=stop)
        else:
            start, stop, granularity, _, _ = validate_qs(
                start=start, stop=stop, granularity=granularity)

        if aggregation not in self.metric.archive_policy.aggregation_methods:
            abort(404, {
                "cause": "Aggregation method does not exist for this metric",
                "detail": {
github gnocchixyz / gnocchi / gnocchi / rest / __init__.py View on Github external
granularity = utils.to_timespan(granularity)
            except ValueError as e:
                abort(400, e)

        if transform is not None:
            transform = TransformSchema(transform)

        if resample:
            # TODO(sileht): This have to be deprecated at some point
            if transform:
                abort(400, 'transform and resample are exclusive')

            if not granularity:
                abort(400, 'A granularity must be specified to resample')
            try:
                resample = utils.to_timespan(resample)
            except ValueError as e:
                abort(400, e)
            transform = [carbonara.Transformation("resample", (resample,))]

        if fill is not None:
            if granularity is None:
                abort(400, "Unable to fill without a granularity")
            try:
                fill = float(fill)
            except ValueError as e:
                if fill != 'null':
                    abort(400, "fill must be a float or \'null\': %s" % e)

        try:
            if strtobool("refresh", refresh):
                metrics_to_update = [
github gnocchixyz / gnocchi / gnocchi / rest / api.py View on Github external
abort(400, "Invalid value for start")

        if stop is not None:
            try:
                stop = utils.to_timestamp(stop)
            except Exception:
                abort(400, "Invalid value for stop")

        try:
            predicate = self.MeasureQuery(query)
        except self.MeasureQuery.InvalidQuery as e:
            abort(400, six.text_type(e))

        if granularity is not None:
            granularity = sorted(
                map(utils.to_timespan, arg_to_list(granularity)),
                reverse=True)

        metrics_and_aggregations = collections.defaultdict(list)

        for metric in metrics:
            if granularity is None:
                granularity = sorted((
                    d.granularity
                    for d in metric.archive_policy.definition),
                    reverse=True)
            for gr in granularity:
                agg = metric.archive_policy.get_aggregation(
                    aggregation, gr)
                if agg is None:
                    abort(400,
                          storage.AggregationDoesNotExist(
github gnocchixyz / gnocchi / gnocchi / indexer / sqlalchemy.py View on Github external
def _handle_binary_op(cls, engine, table, op, nodes):
        try:
            field_name, value = list(nodes.items())[0]
        except Exception:
            raise indexer.QueryError()

        if field_name == "lifespan":
            attr = getattr(table, "ended_at") - getattr(table, "started_at")
            value = datetime.timedelta(
                seconds=utils.timespan_total_seconds(
                    utils.to_timespan(value)))
            if engine == "mysql":
                # NOTE(jd) So subtracting 2 timestamps in MySQL result in some
                # weird results based on string comparison. It's useless and it
                # does not work at all with seconds or anything. Just skip it.
                raise exceptions.NotImplementedError
        elif field_name == "created_by_user_id":
            creator = getattr(table, "creator")
            if op == operator.eq:
                return creator.like("%s:%%" % value)
            elif op == operator.ne:
                return sqlalchemy.not_(creator.like("%s:%%" % value))
            elif op == cls.binary_operators[u"like"]:
                return creator.like("%s:%%" % value)
            raise indexer.QueryValueError(value, field_name)
        elif field_name == "created_by_project_id":
            creator = getattr(table, "creator")
github gnocchixyz / gnocchi / gnocchi / rest / __init__.py View on Github external
def Timespan(value):
    try:
        return utils.to_timespan(value)
    except ValueError as e:
        raise voluptuous.Invalid(e)
github gnocchixyz / gnocchi / gnocchi / rest / transformation.py View on Github external
timespan = timespan.setParseAction(lambda t: utils.to_timespan(t[0]))