Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_get_metrics(self):
"""Test that Stats converts recorded values into metrics."""
stats = stats_module.stats
# Check that metrics are empty before view registration
initial_metrics = list(stats.get_metrics())
self.assertEqual(initial_metrics, [])
mock_measure = Mock(spec=measure.MeasureFloat)
mock_md = Mock(spec=metric_descriptor.MetricDescriptor)
mock_md.type =\
metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION
mock_view = Mock(spec=view.View)
mock_view.measure = mock_measure
mock_view.get_metric_descriptor.return_value = mock_md
mock_view.columns = ['k1']
stats.view_manager.measure_to_view_map.register_view(mock_view, Mock())
# Check that metrics are stil empty until we record
empty_metrics = list(stats.get_metrics())
self.assertEqual(empty_metrics, [])
def test_record_with_missing_key_in_context(self):
measure = mock.Mock(spec=measure_module.MeasureInt)
sum_aggregation = aggregation_module.SumAggregation()
view = view_module.View("test_view", "description", ['key1', 'key2'],
measure, sum_aggregation)
start_time = datetime.utcnow()
end_time = datetime.utcnow()
view_data = view_data_module.ViewData(
view=view, start_time=start_time, end_time=end_time)
context = mock.Mock()
context.map = {
'key1': 'val1',
'key3': 'val3'
} # key2 is not in the context.
time = utils.to_iso_str()
value = 4
view_data.record(
context=context, value=value, timestamp=time, attachments=None)
def test_record_with_attachment(self):
boundaries = [1, 2, 3]
distribution_aggregation = aggregation_module.DistributionAggregation(
boundaries=boundaries)
name = "testName"
description = "testMeasure"
unit = "testUnit"
measure = measure_module.MeasureInt(
name=name, description=description, unit=unit)
description = "testMeasure"
columns = ["key1", "key2"]
view = view_module.View(
name=name,
description=description,
columns=columns,
measure=measure,
aggregation=distribution_aggregation)
start_time = datetime.utcnow()
attachments = {"One": "one", "Two": "two"}
end_time = datetime.utcnow()
view_data = view_data_module.ViewData(
def test_new_aggregation_data_float(self):
measure = mock.Mock(spec=measure_module.MeasureFloat)
last_value_aggregation = aggregation_module.LastValueAggregation()
agg_data = last_value_aggregation.new_aggregation_data(measure)
self.assertEqual(0, agg_data.value)
self.assertEqual(value.ValueDouble, agg_data.value_type)
def test_constructor_defaults(self):
name = "testName"
description = "testMeasure"
measure = measure_module.MeasureInt(name=name, description=description)
self.assertEqual(None, measure.unit)
def test_convert_view_without_labels(self):
mock_measure = mock.Mock(spec=measure.MeasureFloat)
mock_aggregation = mock.Mock(spec=aggregation.DistributionAggregation)
mock_aggregation.get_metric_type.return_value = \
metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION
vd = mock.Mock(spec=view_data.ViewData)
vd.view = view.View(
name=mock.Mock(),
description=mock.Mock(),
columns=[],
measure=mock_measure,
aggregation=mock_aggregation)
vd.start_time = '2019-04-11T22:33:44.555555Z'
mock_point = mock.Mock(spec=point.Point)
mock_point.value = mock.Mock(spec=value.ValueDistribution)
def test_view_to_metric_descriptor(self):
mock_measure = mock.Mock(spec=measure.MeasureFloat)
mock_agg = mock.Mock(spec=aggregation.SumAggregation)
mock_agg.get_metric_type.return_value = \
metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE
test_view = view_module.View("name", "description", ["tk1", "tk2"],
mock_measure, mock_agg)
self.assertIsNone(test_view._metric_descriptor)
md = test_view.get_metric_descriptor()
self.assertTrue(isinstance(md, metric_descriptor.MetricDescriptor))
self.assertEqual(md.name, test_view.name)
self.assertEqual(md.description, test_view.description)
self.assertEqual(md.unit, test_view.measure.unit)
self.assertEqual(
md.type, metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE)
self.assertTrue(
all(lk.key == col
def test_constructor_defaults(self):
name = "testName"
description = "testMeasure"
measure = measure_module.BaseMeasure(
name=name, description=description)
self.assertEqual(None, measure.unit)
from opencensus.stats import stats as stats_module
from opencensus.stats import view as view_module
from opencensus.stats import view_data as view_data_module
from opencensus.stats.exporters import stackdriver_exporter as stackdriver
from opencensus.stats.exporters.stackdriver_exporter import set_metric_labels
from opencensus.tags import tag_key as tag_key_module
from opencensus.tags import tag_map as tag_map_module
from opencensus.tags import tag_value as tag_value_module
MiB = 1 << 20
FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend")
FRONTEND_KEY_FLOAT = tag_key_module.TagKey("my.org/keys/frontend-FLOAT")
FRONTEND_KEY_INT = tag_key_module.TagKey("my.org/keys/frontend-INT")
FRONTEND_KEY_STR = tag_key_module.TagKey("my.org/keys/frontend-INT")
VIDEO_SIZE_MEASURE = measure_module.MeasureInt(
"my.org/measure/video_size_test2", "size of processed videos", "By")
VIDEO_SIZE_MEASURE_2 = measure_module.MeasureInt(
"my.org/measure/video_size_test_2", "size of processed videos", "By")
VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat(
"my.org/measure/video_size_test-float", "size of processed videos-float",
"By")
VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2"
VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation(
[16.0 * MiB, 256.0 * MiB])
VIDEO_SIZE_VIEW = view_module.View(
VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY],
VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION)
def get_metric_type(measure):
"""Get the MetricDescriptorType for the metric produced by this
aggregation and measure.
"""
if isinstance(measure, measure_module.MeasureInt):
return MetricDescriptorType.CUMULATIVE_INT64
if isinstance(measure, measure_module.MeasureFloat):
return MetricDescriptorType.CUMULATIVE_DOUBLE
raise ValueError