Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_emit(self):
options = prometheus.Options(namespace="opencensus", port=9005)
stats = stats_module.Stats()
view_manager = stats.view_manager
stats_recorder = stats.stats_recorder
exporter = prometheus.new_stats_exporter(options)
view_manager.register_exporter(exporter)
view_manager.register_view(VIDEO_SIZE_VIEW)
tag_value = tag_value_module.TagValue(str(1000))
tag_map = tag_map_module.TagMap()
tag_map.insert(FRONTEND_KEY, tag_value)
measure_map = stats_recorder.new_measurement_map()
measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
measure_map.record(tag_map)
exporter.export([
exporter.collector.view_name_to_data_map[
'opencensus_myorg_views_video_size_test2']])
self.assertIsInstance(
exporter.collector.view_name_to_data_map[
'opencensus_myorg_views_video_size_test2'],
view_data_module.ViewData)
self.assertEqual(REGISTERED_VIEW2, exporter.collector.registered_views)
self.assertEqual(options, exporter.options)
self.assertEqual(options.registry, exporter.gatherer)
self.assertIsNotNone(exporter.collector)
view_manager.unregister_exporter(
view_manager.measure_to_view_map.exporters[0])
view_manager.register_exporter(exporter)
agg_2 = aggregation_module.LastValueAggregation(value=2)
view_name2 = "view-name2"
new_view2 = view_module.View(
view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT],
VIDEO_SIZE_MEASURE_FLOAT, agg_2)
view_manager.register_view(new_view2)
tag_value_int = tag_value_module.TagValue("Abc")
tag_map = tag_map_module.TagMap()
tag_map.insert(FRONTEND_KEY_INT, tag_value_int)
measure_map = stats_recorder.new_measurement_map()
measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB)
measure_map.record(tag_map)
v_data = measure_map.measure_to_view_map.get_view(view_name2, None)
time_series_list = exporter.create_time_series_list(
v_data, "global", "kubernetes.io/myorg")
self.assertEquals(len(time_series_list), 1)
mock_view.columns = ['k1']
stats.view_manager.measure_to_view_map.register_view(mock_view, Mock())
# Check that metrics are stil empty until we record
empty_metrics = list(stats.get_metrics())
self.assertEqual(empty_metrics, [])
mm = stats.stats_recorder.new_measurement_map()
mm._measurement_map = {mock_measure: 1.0}
mock_view.aggregation = aggregation.DistributionAggregation()
mock_view.new_aggregation_data.return_value = \
mock_view.aggregation.new_aggregation_data()
tm = tag_map.TagMap()
tm.insert('k1', 'v1')
mm.record(tm)
metrics = list(stats.get_metrics())
self.assertEqual(len(metrics), 1)
[metric] = metrics
self.assertEqual(len(metric.time_series), 1)
[ts] = metric.time_series
self.assertEqual(len(ts.points), 1)
[point] = ts.points
self.assertTrue(isinstance(point.value, value.ValueDistribution))
def test_to_byte_array(self):
from opencensus.tags.tag_map import TagMap
tags = [Tag(TagKey('key1'), TagValue('val1')),
Tag(TagKey('key2'), TagValue('val2')),
Tag(TagKey('key3'), TagValue('val3')),
Tag(TagKey('key4'), TagValue('val4'))]
tag_context = TagMap(tags=tags)
propagator = binary_serializer.BinarySerializer()
binary = propagator.to_byte_array(tag_context)
expected_binary = b'\x00\x00\x04key1\x04val1\x00\x04key2\x04val2\x00' \
b'\x04key3\x04val3\x00\x04key4\x04val4'
self.assertEqual(binary, expected_binary)
view_manager.unregister_exporter(
view_manager.measure_to_view_map.exporters[0])
view_manager.register_exporter(exporter)
agg_3 = aggregation_module.SumAggregation(sum=2.2)
view_name3 = "view-name3"
new_view3 = view_module.View(
view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT],
VIDEO_SIZE_MEASURE_FLOAT, agg_3)
view_manager.register_view(new_view3)
tag_value_float = tag_value_module.TagValue("1200")
tag_map = tag_map_module.TagMap()
tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float)
measure_map = stats_recorder.new_measurement_map()
measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB)
measure_map.record(tag_map)
v_data = measure_map.measure_to_view_map.get_view(view_name3, None)
time_series_list = exporter.create_time_series_list(
v_data, "global", "")
self.assertEquals(len(time_series_list), 1)
self.assertEquals(time_series_list[0].metric.type,
"custom.googleapis.com/opencensus/view-name3")
self.assertIsNotNone(time_series_list)
def from_byte_array(self, binary):
if len(binary) <= 0:
logging.warning("Input byte[] cannot be empty/")
return tag_map_module.TagMap()
else:
buffer = memoryview(binary)
version_id = buffer[0]
if six.PY2:
version_id = ord(version_id)
if version_id != VERSION_ID:
raise ValueError("Invalid version id.")
return self._parse_tags(buffer)
def main():
# Enable metrics
# Set the interval in seconds in which you want to send metrics
exporter = metrics_exporter.new_metrics_exporter()
view_manager.register_exporter(exporter)
view_manager.register_view(CARROTS_VIEW)
mmap = stats_recorder.new_measurement_map()
tmap = tag_map_module.TagMap()
mmap.measure_int_put(CARROTS_MEASURE, 1000)
mmap.record(tmap)
time.sleep(60)
print("Done recording metrics")
def log_metric(
self, name="", value="", description="", log_parent=False,
):
"""
Sends a custom metric to appInsights
:param name: name of the metric
:param value: value of the metric
:param description: description of the metric
:param log_parent: not being used for this logger
:return:
"""
measurement_map = \
stats_module.stats.stats_recorder.new_measurement_map()
tag_map = tag_map_module.TagMap()
measure = measure_module.MeasureFloat(name, description)
self.set_view(name, description, measure)
measurement_map.measure_float_put(measure, value)
measurement_map.record(tag_map)
# Default export interval is every 15.0s
# Your application should run for at least this amount
# of time so the exporter will meet this interval
# Sleep can fulfill this https://pypi.org/project/opencensus-ext-azure/
time.sleep(self.export_interval)
def main():
# Enable metrics
# Set the interval in seconds in which you want to send metrics
exporter = metrics_exporter.new_metrics_exporter()
view_manager.register_exporter(exporter)
view_manager.register_view(NUM_REQUESTS_VIEW)
mmap = stats_recorder.new_measurement_map()
tmap = tag_map_module.TagMap()
tmap.insert("url", "http://example.com")
for i in range(100):
print(i)
mmap.measure_int_put(REQUEST_MEASURE, i)
mmap.record(tmap)
time.sleep(1)
print("Done recording metrics")