diff --git a/CHANGELOG.md b/CHANGELOG.md
index 614f240d4ee..6c9f425faad 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -36,6 +36,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
([#5120](https://github.com/open-telemetry/opentelemetry-python/pull/5120))
- Add WeaverLiveCheck test util
([#5088](https://github.com/open-telemetry/opentelemetry-python/pull/5088))
+- `opentelemetry-exporter-prometheus`: add support for configuring scope info metric attributes for the Prometheus exporter
+ ([#5123](https://github.com/open-telemetry/opentelemetry-python/pull/5123))
## Version 1.41.0/0.62b0 (2026-04-09)
diff --git a/docs/exporter/prometheus/prometheus.rst b/docs/exporter/prometheus/prometheus.rst
index d7a46793312..84dbccbda9d 100644
--- a/docs/exporter/prometheus/prometheus.rst
+++ b/docs/exporter/prometheus/prometheus.rst
@@ -39,6 +39,46 @@ Prometheus text format on request::
provider = MeterProvider(resource=resource, metric_readers=[reader])
metrics.set_meter_provider(provider)
+Scope labels
+------------
+
+By default, the Prometheus exporter adds instrumentation scope information as
+labels on every exported metric. These labels include ``otel_scope_name``,
+``otel_scope_version``, and ``otel_scope_schema_url``. Instrumentation scope
+attributes are exported with the ``otel_scope_`` prefix::
+
+ from prometheus_client import start_http_server
+
+ from opentelemetry import metrics
+ from opentelemetry.exporter.prometheus import PrometheusMetricReader
+ from opentelemetry.sdk.metrics import MeterProvider
+
+ start_http_server(port=9464, addr="localhost")
+ reader = PrometheusMetricReader()
+ provider = MeterProvider(metric_readers=[reader])
+ metrics.set_meter_provider(provider)
+
+ meter = metrics.get_meter(
+ "checkout",
+ "1.2.3",
+ schema_url="https://opentelemetry.io/schemas/1.21.0",
+ attributes={"region": "us-east-1"},
+ )
+ counter = meter.create_counter("orders")
+ counter.add(1, {"environment": "production"})
+
+The exported metric includes labels such as
+``otel_scope_name="checkout"``,
+``otel_scope_version="1.2.3"``,
+``otel_scope_schema_url="https://opentelemetry.io/schemas/1.21.0"``,
+``otel_scope_region="us-east-1"``, and
+``environment="production"``.
+
+To omit instrumentation scope labels from exported metrics, set
+``without_scope_info`` to ``True``::
+
+ reader = PrometheusMetricReader(without_scope_info=True)
+
Configuration
-------------
@@ -56,4 +96,4 @@ References
----------
* `Prometheus `_
-* `OpenTelemetry Project `_
\ No newline at end of file
+* `OpenTelemetry Project `_
diff --git a/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py b/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py
index 608d8f6d302..cf26a972ef2 100644
--- a/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py
+++ b/exporter/opentelemetry-exporter-prometheus/src/opentelemetry/exporter/prometheus/__init__.py
@@ -67,7 +67,17 @@
from json import dumps
from logging import getLogger
from os import environ
-from typing import Deque, Dict, Iterable, Sequence, Tuple, Union
+from typing import (
+ Any,
+ Callable,
+ Deque,
+ Dict,
+ Iterable,
+ Sequence,
+ Tuple,
+ TypeVar,
+ Union,
+)
from prometheus_client import start_http_server
from prometheus_client.core import (
@@ -101,20 +111,27 @@
Gauge,
Histogram,
HistogramDataPoint,
+ Metric,
MetricReader,
MetricsData,
Sum,
)
+from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.semconv._incubating.attributes.otel_attributes import (
OtelComponentTypeValues,
)
-from opentelemetry.util.types import Attributes
+from opentelemetry.util.types import Attributes, AttributeValue
_logger = getLogger(__name__)
_TARGET_INFO_NAME = "target"
_TARGET_INFO_DESCRIPTION = "Target metadata"
+_OTEL_SCOPE_NAME_LABEL = "otel_scope_name"
+_OTEL_SCOPE_VERSION_LABEL = "otel_scope_version"
+_OTEL_SCOPE_SCHEMA_URL_LABEL = "otel_scope_schema_url"
+_OTEL_SCOPE_ATTR_PREFIX = "otel_scope_"
+
def _convert_buckets(
bucket_counts: Sequence[int], explicit_bounds: Sequence[float]
@@ -131,11 +148,136 @@ def _convert_buckets(
return buckets
+def _should_convert_sum_to_gauge(metric: Metric) -> bool:
+ # The Prometheus compatibility spec requires cumulative non-monotonic Sums
+ # to be exported as Gauges.
+ if not isinstance(metric.data, Sum):
+ return False
+ return (
+ not metric.data.is_monotonic
+ and metric.data.aggregation_temporality
+ == AggregationTemporality.CUMULATIVE
+ )
+
+
+_FamilyT = TypeVar("_FamilyT", bound=PrometheusMetric)
+
+
+def _get_or_create_family(
+ registry: dict[str, PrometheusMetric],
+ family_id: str,
+ factory: Callable[..., _FamilyT],
+ *,
+ name: str,
+ documentation: str,
+ labels: Sequence[str],
+ unit: str,
+) -> _FamilyT:
+ if family_id not in registry:
+ registry[family_id] = factory(
+ name=name,
+ documentation=documentation,
+ labels=labels,
+ unit=unit,
+ )
+ return registry[family_id]
+
+
+def _populate_counter_family(
+ registry: dict[str, PrometheusMetric],
+ per_metric_family_id: str,
+ metric_name: str,
+ description: str,
+ unit: str,
+ label_keys: Sequence[str],
+ label_rows: Sequence[Sequence[str]],
+ values: Sequence[float],
+) -> None:
+ family_id = "|".join([per_metric_family_id, CounterMetricFamily.__name__])
+ family = _get_or_create_family(
+ registry,
+ family_id,
+ CounterMetricFamily,
+ name=metric_name,
+ documentation=description,
+ labels=label_keys,
+ unit=unit,
+ )
+ for label_values, value in zip(label_rows, values):
+ family.add_metric(labels=label_values, value=value)
+
+
+def _populate_gauge_family(
+ registry: dict[str, PrometheusMetric],
+ per_metric_family_id: str,
+ metric_name: str,
+ description: str,
+ unit: str,
+ label_keys: Sequence[str],
+ label_rows: Sequence[Sequence[str]],
+ values: Sequence[float],
+) -> None:
+ family_id = "|".join([per_metric_family_id, GaugeMetricFamily.__name__])
+ family = _get_or_create_family(
+ registry,
+ family_id,
+ GaugeMetricFamily,
+ name=metric_name,
+ documentation=description,
+ labels=label_keys,
+ unit=unit,
+ )
+ for label_values, value in zip(label_rows, values):
+ family.add_metric(labels=label_values, value=value)
+
+
+def _populate_histogram_family(
+ registry: dict[str, PrometheusMetric],
+ per_metric_family_id: str,
+ metric_name: str,
+ description: str,
+ unit: str,
+ label_keys: Sequence[str],
+ label_rows: Sequence[Sequence[str]],
+ values: Sequence[dict[str, Any]],
+) -> None:
+ family_id = "|".join(
+ [per_metric_family_id, HistogramMetricFamily.__name__]
+ )
+ family = _get_or_create_family(
+ registry,
+ family_id,
+ HistogramMetricFamily,
+ name=metric_name,
+ documentation=description,
+ labels=label_keys,
+ unit=unit,
+ )
+ for label_values, value in zip(label_rows, values):
+ family.add_metric(
+ labels=label_values,
+ buckets=_convert_buckets(
+ value["bucket_counts"], value["explicit_bounds"]
+ ),
+ sum_value=value["sum"],
+ )
+
+
class PrometheusMetricReader(MetricReader):
- """Prometheus metric exporter for OpenTelemetry."""
+ """Prometheus metric exporter for OpenTelemetry.
+
+ Args:
+ disable_target_info: Whether to disable the ``target_info`` metric.
+ without_scope_info: Whether to omit instrumentation scope labels from
+ exported metrics. Scope labels are exported by default.
+ prefix: Prefix added to exported Prometheus metric names.
+ """
def __init__(
- self, disable_target_info: bool = False, prefix: str = ""
+ self,
+ disable_target_info: bool = False,
+ without_scope_info: bool = False,
+ prefix: str = "",
) -> None:
super().__init__(
preferred_temporality={
@@ -149,7 +291,9 @@ def __init__(
otel_component_type=OtelComponentTypeValues.PROMETHEUS_HTTP_TEXT_METRIC_EXPORTER,
)
self._collector = _CustomCollector(
- disable_target_info=disable_target_info, prefix=prefix
+ disable_target_info=disable_target_info,
+ without_scope_info=without_scope_info,
+ prefix=prefix,
)
REGISTRY.register(self._collector)
self._collector._callback = self.collect
@@ -176,10 +320,16 @@ class _CustomCollector:
https://github.com/prometheus/client_python#custom-collectors
"""
- def __init__(self, disable_target_info: bool = False, prefix: str = ""):
+ def __init__(
+ self,
+ disable_target_info: bool = False,
+ without_scope_info: bool = False,
+ prefix: str = "",
+ ):
self._callback = None
self._metrics_datas: Deque[MetricsData] = deque()
self._disable_target_info = disable_target_info
+ self._without_scope_info = without_scope_info
self._target_info = None
self._prefix = prefix
@@ -220,167 +370,130 @@ def collect(self) -> Iterable[PrometheusMetric]:
if metric_family_id_metric_family:
yield from metric_family_id_metric_family.values()
- # pylint: disable=too-many-locals,too-many-branches
def _translate_to_prometheus(
self,
metrics_data: MetricsData,
- metric_family_id_metric_family: Dict[str, PrometheusMetric],
+ metric_family_id_metric_family: dict[str, PrometheusMetric],
):
- metrics = []
-
- for resource_metrics in metrics_data.resource_metrics:
- for scope_metrics in resource_metrics.scope_metrics:
- for metric in scope_metrics.metrics:
- metrics.append(metric)
-
- for metric in metrics:
- label_values_data_points = []
- values = []
-
- metric_name = metric.name
- if self._prefix:
- metric_name = self._prefix + "_" + metric_name
- metric_name = sanitize_full_name(metric_name)
- metric_description = metric.description or ""
- metric_unit = map_unit(metric.unit)
-
- # First pass: collect all unique label keys across all data points
- all_label_keys_set = set()
- data_point_attributes = []
- for number_data_point in metric.data.data_points:
- attrs = {}
- for key, value in number_data_point.attributes.items():
- sanitized_key = sanitize_attribute(key)
- all_label_keys_set.add(sanitized_key)
- attrs[sanitized_key] = self._check_value(value)
- data_point_attributes.append(attrs)
-
- if isinstance(number_data_point, HistogramDataPoint):
- values.append(
- {
- "bucket_counts": number_data_point.bucket_counts,
- "explicit_bounds": (
- number_data_point.explicit_bounds
- ),
- "sum": number_data_point.sum,
- }
+ for rm in metrics_data.resource_metrics:
+ for sm in rm.scope_metrics:
+ scope_attrs = self._build_scope_attrs(sm.scope)
+ for metric in sm.metrics:
+ self._translate_metric(
+ metric,
+ scope_attrs,
+ metric_family_id_metric_family,
)
- else:
- values.append(number_data_point.value)
-
- # Sort label keys for consistent ordering
- all_label_keys = sorted(all_label_keys_set)
-
- # Second pass: build label values with empty strings for missing labels
- for attrs in data_point_attributes:
- label_values = []
- for key in all_label_keys:
- label_values.append(attrs.get(key, ""))
- label_values_data_points.append(label_values)
-
- # Create metric family ID without label keys
- per_metric_family_id = "|".join(
- [
- metric_name,
- metric_description,
- metric_unit,
- ]
- )
- is_non_monotonic_sum = (
- isinstance(metric.data, Sum)
- and metric.data.is_monotonic is False
+ def _translate_metric(
+ self,
+ metric: Metric,
+ scope_attrs: dict[str, Any],
+ metric_family_id_metric_family: dict[str, PrometheusMetric],
+ ) -> None:
+ metric_name = self._resolve_metric_name(metric.name)
+ description = metric.description or ""
+ unit = map_unit(metric.unit or "")
+ label_keys, label_rows, values = self._collect_data_points(
+ metric, scope_attrs
+ )
+ per_metric_family_id = "|".join((metric_name, description, unit))
+
+ convert_sum_to_gauge = _should_convert_sum_to_gauge(metric)
+
+ if isinstance(metric.data, Sum) and not convert_sum_to_gauge:
+ _populate_counter_family(
+ metric_family_id_metric_family,
+ per_metric_family_id,
+ metric_name,
+ description,
+ unit,
+ label_keys,
+ label_rows,
+ values,
)
- is_cumulative = (
- isinstance(metric.data, Sum)
- and metric.data.aggregation_temporality
- == AggregationTemporality.CUMULATIVE
+ elif isinstance(metric.data, Gauge) or convert_sum_to_gauge:
+ _populate_gauge_family(
+ metric_family_id_metric_family,
+ per_metric_family_id,
+ metric_name,
+ description,
+ unit,
+ label_keys,
+ label_rows,
+ values,
)
-
- # The prometheus compatibility spec for sums says: If the aggregation temporality is cumulative and the sum is non-monotonic, it MUST be converted to a Prometheus Gauge.
- should_convert_sum_to_gauge = (
- is_non_monotonic_sum and is_cumulative
+ elif isinstance(metric.data, Histogram):
+ _populate_histogram_family(
+ metric_family_id_metric_family,
+ per_metric_family_id,
+ metric_name,
+ description,
+ unit,
+ label_keys,
+ label_rows,
+ values,
)
-
- if (
- isinstance(metric.data, Sum)
- and not should_convert_sum_to_gauge
+ else:
+ _logger.warning("Unsupported metric data. %s", type(metric.data))
+
+ def _build_scope_attrs(
+ self, scope: InstrumentationScope
+ ) -> dict[str, AttributeValue]:
+ if self._without_scope_info:
+ return {}
+ attrs: dict[str, AttributeValue] = {}
+ if scope.attributes:
+ for key, value in scope.attributes.items():
+ attrs[_OTEL_SCOPE_ATTR_PREFIX + key] = value
+ attrs[_OTEL_SCOPE_NAME_LABEL] = scope.name or ""
+ attrs[_OTEL_SCOPE_VERSION_LABEL] = scope.version or ""
+ attrs[_OTEL_SCOPE_SCHEMA_URL_LABEL] = scope.schema_url or ""
+ return attrs
+
+ def _resolve_metric_name(self, name: str) -> str:
+ if self._prefix:
+ name = self._prefix + "_" + name
+ return sanitize_full_name(name)
+
+ def _collect_data_points(
+ self,
+ metric: Metric,
+ scope_attrs: dict[str, AttributeValue],
+ ) -> tuple[list[str], list[list[str]], list[Any]]:
+ keys: set[str] = set()
+ rows: list[dict[str, str]] = []
+ values: list = []
+
+ for point in metric.data.data_points:
+ labels: dict[str, str] = {}
+ for key, value in chain(
+ scope_attrs.items(),
+ point.attributes.items(),
):
- metric_family_id = "|".join(
- [per_metric_family_id, CounterMetricFamily.__name__]
- )
-
- if metric_family_id not in metric_family_id_metric_family:
- metric_family_id_metric_family[metric_family_id] = (
- CounterMetricFamily(
- name=metric_name,
- documentation=metric_description,
- labels=all_label_keys,
- unit=metric_unit,
- )
- )
- for label_values, value in zip(
- label_values_data_points, values
- ):
- metric_family_id_metric_family[
- metric_family_id
- ].add_metric(labels=label_values, value=value)
- elif isinstance(metric.data, Gauge) or should_convert_sum_to_gauge:
- metric_family_id = "|".join(
- [per_metric_family_id, GaugeMetricFamily.__name__]
- )
-
- if (
- metric_family_id
- not in metric_family_id_metric_family.keys()
- ):
- metric_family_id_metric_family[metric_family_id] = (
- GaugeMetricFamily(
- name=metric_name,
- documentation=metric_description,
- labels=all_label_keys,
- unit=metric_unit,
- )
- )
- for label_values, value in zip(
- label_values_data_points, values
- ):
- metric_family_id_metric_family[
- metric_family_id
- ].add_metric(labels=label_values, value=value)
- elif isinstance(metric.data, Histogram):
- metric_family_id = "|".join(
- [per_metric_family_id, HistogramMetricFamily.__name__]
+ label = sanitize_attribute(key)
+ keys.add(label)
+ labels[label] = self._check_value(value)
+ rows.append(labels)
+
+ if isinstance(point, HistogramDataPoint):
+ values.append(
+ {
+ "bucket_counts": point.bucket_counts,
+ "explicit_bounds": point.explicit_bounds,
+ "sum": point.sum,
+ }
)
-
- if (
- metric_family_id
- not in metric_family_id_metric_family.keys()
- ):
- metric_family_id_metric_family[metric_family_id] = (
- HistogramMetricFamily(
- name=metric_name,
- documentation=metric_description,
- labels=all_label_keys,
- unit=metric_unit,
- )
- )
- for label_values, value in zip(
- label_values_data_points, values
- ):
- metric_family_id_metric_family[
- metric_family_id
- ].add_metric(
- labels=label_values,
- buckets=_convert_buckets(
- value["bucket_counts"], value["explicit_bounds"]
- ),
- sum_value=value["sum"],
- )
else:
- _logger.warning(
- "Unsupported metric data. %s", type(metric.data)
- )
+ values.append(point.value)
+
+ label_keys = sorted(keys)
+ # Backfill missing labels with "" so every data point exposes the
+ # full label set expected by the Prometheus family.
+ label_rows = [
+ [labels.get(k, "") for k in label_keys] for labels in rows
+ ]
+ return label_keys, label_rows, values
# pylint: disable=no-self-use
def _check_value(self, value: Union[int, float, str, Sequence]) -> str:
diff --git a/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py b/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py
index 26770c9e1f4..014d1c3789d 100644
--- a/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py
+++ b/exporter/opentelemetry-exporter-prometheus/tests/test_prometheus_exporter.py
@@ -24,6 +24,10 @@
)
from opentelemetry.exporter.prometheus import (
+ _OTEL_SCOPE_ATTR_PREFIX,
+ _OTEL_SCOPE_NAME_LABEL,
+ _OTEL_SCOPE_SCHEMA_URL_LABEL,
+ _OTEL_SCOPE_VERSION_LABEL,
PrometheusMetricReader,
_CustomCollector,
)
@@ -39,6 +43,7 @@
ScopeMetrics,
)
from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.test.metrictestutil import (
_generate_gauge,
_generate_histogram,
@@ -47,6 +52,7 @@
)
+# pylint: disable=too-many-public-methods
class TestPrometheusMetricReader(TestCase):
def setUp(self):
self._mock_registry_register = Mock()
@@ -74,7 +80,9 @@ def verify_text_format(
]
)
- collector = _CustomCollector(disable_target_info=True, prefix=prefix)
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True, prefix=prefix
+ )
collector.add_metrics_data(metrics_data)
result_bytes = generate_latest(collector)
result = result_bytes.decode("utf-8")
@@ -158,7 +166,9 @@ def test_monotonic_sum_to_prometheus(self):
]
)
- collector = _CustomCollector(disable_target_info=True)
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True
+ )
collector.add_metrics_data(metrics_data)
for prometheus_metric in collector.collect():
@@ -204,7 +214,9 @@ def test_non_monotonic_sum_to_prometheus(self):
]
)
- collector = _CustomCollector(disable_target_info=True)
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True
+ )
collector.add_metrics_data(metrics_data)
for prometheus_metric in collector.collect():
@@ -249,7 +261,9 @@ def test_gauge_to_prometheus(self):
]
)
- collector = _CustomCollector(disable_target_info=True)
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True
+ )
collector.add_metrics_data(metrics_data)
for prometheus_metric in collector.collect():
@@ -301,7 +315,9 @@ def test_list_labels(self):
)
]
)
- collector = _CustomCollector(disable_target_info=True)
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True
+ )
collector.add_metrics_data(metrics_data)
for prometheus_metric in collector.collect():
@@ -666,6 +682,131 @@ def test_semconv(self):
),
)
+ def test_scope_info_labels_default(self):
+ scope = InstrumentationScope(
+ name="library.test",
+ version="1.2.3",
+ schema_url="schema_url",
+ )
+ metric = _generate_gauge(
+ "test_gauge",
+ 42,
+ attributes={"env": "prod"},
+ description="testdesc",
+ unit="",
+ )
+ metrics_data = MetricsData(
+ resource_metrics=[
+ ResourceMetrics(
+ resource=Mock(),
+ scope_metrics=[
+ ScopeMetrics(
+ scope=scope,
+ metrics=[metric],
+ schema_url="schema_url",
+ )
+ ],
+ schema_url="schema_url",
+ )
+ ]
+ )
+ collector = _CustomCollector(disable_target_info=True)
+ collector.add_metrics_data(metrics_data)
+
+ for prometheus_metric in collector.collect():
+ labels = prometheus_metric.samples[0].labels
+ self.assertEqual(labels[_OTEL_SCOPE_NAME_LABEL], "library.test")
+ self.assertEqual(labels[_OTEL_SCOPE_VERSION_LABEL], "1.2.3")
+ self.assertEqual(
+ labels[_OTEL_SCOPE_SCHEMA_URL_LABEL],
+ "schema_url",
+ )
+ self.assertEqual(labels["env"], "prod")
+
+ def test_scope_info_disabled(self):
+ scope = InstrumentationScope(name="library.test", version="1.2.3")
+ metric = _generate_gauge(
+ "test_gauge",
+ 42,
+ attributes={"env": "prod"},
+ description="testdesc",
+ unit="",
+ )
+ metrics_data = MetricsData(
+ resource_metrics=[
+ ResourceMetrics(
+ resource=Mock(),
+ scope_metrics=[
+ ScopeMetrics(
+ scope=scope,
+ metrics=[metric],
+ schema_url="schema_url",
+ )
+ ],
+ schema_url="schema_url",
+ )
+ ]
+ )
+ collector = _CustomCollector(
+ disable_target_info=True, without_scope_info=True
+ )
+ collector.add_metrics_data(metrics_data)
+
+ for prometheus_metric in collector.collect():
+ labels = prometheus_metric.samples[0].labels
+ self.assertNotIn(_OTEL_SCOPE_NAME_LABEL, labels)
+ self.assertNotIn(_OTEL_SCOPE_VERSION_LABEL, labels)
+ self.assertNotIn(_OTEL_SCOPE_SCHEMA_URL_LABEL, labels)
+ self.assertNotIn(_OTEL_SCOPE_ATTR_PREFIX + "region", labels)
+
+ def test_scope_attributes_labels(self):
+ scope = InstrumentationScope(
+ name="library.test",
+ version="1.0",
+ schema_url="schema_url",
+ attributes={
+ "region": "us-east-1",
+ "name": "should-be-dropped",
+ "version": "should-be-dropped",
+ "schema_url": "should-be-dropped",
+ },
+ )
+ metric = _generate_gauge(
+ "test_gauge",
+ 7,
+ attributes={},
+ description="testdesc",
+ unit="",
+ )
+ metrics_data = MetricsData(
+ resource_metrics=[
+ ResourceMetrics(
+ resource=Mock(),
+ scope_metrics=[
+ ScopeMetrics(
+ scope=scope,
+ metrics=[metric],
+ schema_url="schema_url",
+ )
+ ],
+ schema_url="schema_url",
+ )
+ ]
+ )
+ collector = _CustomCollector(disable_target_info=True)
+ collector.add_metrics_data(metrics_data)
+
+ for prometheus_metric in collector.collect():
+ labels = prometheus_metric.samples[0].labels
+ self.assertEqual(
+ labels[_OTEL_SCOPE_ATTR_PREFIX + "region"], "us-east-1"
+ )
+ self.assertEqual(labels[_OTEL_SCOPE_NAME_LABEL], "library.test")
+ self.assertEqual(labels[_OTEL_SCOPE_VERSION_LABEL], "1.0")
+ self.assertEqual(
+ labels[_OTEL_SCOPE_SCHEMA_URL_LABEL], "schema_url"
+ )
+
def test_multiple_data_points_with_different_label_sets(self):
hist_point_1 = HistogramDataPoint(
attributes={"http_target": "/foobar", "net_host_port": 8080},