diff --git a/NOTICE.txt b/NOTICE.txt index 3382561..6ff4a3b 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -6,7 +6,7 @@ The following notices are required by licensors of software used in the Snowflak -------------------------------------------------------------------------------- This library includes software which is copied from or derived from the OpenTelemetry Python API and SDK. -OpenTelemetry Python v1.26.0 +OpenTelemetry Python v1.35.0 https://github.com/open-telemetry/opentelemetry-python Apache License diff --git a/anaconda/meta.yaml b/anaconda/meta.yaml index 73f7b35..f1a06af 100644 --- a/anaconda/meta.yaml +++ b/anaconda/meta.yaml @@ -14,8 +14,8 @@ requirements: - setuptools >=40.0.0 run: - python - - opentelemetry-api ==1.26.0 - - opentelemetry-sdk ==1.26.0 + - opentelemetry-api ==1.35.0 + - opentelemetry-sdk ==1.35.0 about: home: https://www.snowflake.com/ diff --git a/scripts/vendor_otlp_proto_common.sh b/scripts/vendor_otlp_proto_common.sh index eae16a2..74269af 100755 --- a/scripts/vendor_otlp_proto_common.sh +++ b/scripts/vendor_otlp_proto_common.sh @@ -9,7 +9,7 @@ # fixes needed in the OTLP exporter. # Pinned commit/branch/tag for the current version used in opentelemetry-proto python package. -REPO_BRANCH_OR_COMMIT="v1.26.0" +REPO_BRANCH_OR_COMMIT="v1.35.0" set -e diff --git a/setup.py b/setup.py index b85694e..54d740c 100644 --- a/setup.py +++ b/setup.py @@ -30,8 +30,8 @@ long_description=LONG_DESCRIPTION, python_requires=REQUIRED_PYTHON_VERSION, install_requires=[ - "opentelemetry-api == 1.26.0", - "opentelemetry-sdk == 1.26.0", + "opentelemetry-api == 1.35.0", + "opentelemetry-sdk == 1.35.0", ], packages=find_namespace_packages( where='src' diff --git a/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py b/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py index 34ec155..5ec2c0c 100644 --- a/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py +++ b/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py @@ -15,11 +15,7 @@ import abc import logging -import logging.config -import threading import typing -import opentelemetry.sdk.util.instrumentation as otel_instrumentation -import opentelemetry.sdk._logs._internal as _logs_internal from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common._log_encoder import ( encode_logs, @@ -84,16 +80,22 @@ class SnowflakeLoggingHandler(_logs.LoggingHandler): discarded by the original implementation. """ - LOGGER_NAME_TEMP_ATTRIBUTE = "__snow.logging.temp.logger_name" + CODE_FILEPATH: typing.Final = "code.filepath" + CODE_FILE_PATH: typing.Final = "code.file.path" + CODE_FUNCTION: typing.Final = "code.function" + CODE_FUNCTION_NAME: typing.Final = "code.function.name" + CODE_LINENO: typing.Final = "code.lineno" + CODE_LINE_NUMBER: typing.Final = "code.line.number" def __init__( self, log_writer: LogWriter, ): exporter = _ProtoLogExporter(log_writer) - provider = _SnowflakeTelemetryLoggerProvider() - provider.add_log_record_processor( - export.SimpleLogRecordProcessor(exporter) + processor = export.SimpleLogRecordProcessor(exporter) + provider = _logs.LoggerProvider( + resource=Resource.get_empty(), + multi_log_record_processor=processor ) super().__init__(logger_provider=provider) @@ -101,12 +103,17 @@ def __init__( def _get_attributes(record: logging.LogRecord) -> types.Attributes: attributes = _logs.LoggingHandler._get_attributes(record) # pylint: disable=protected-access - # Temporarily storing logger's name in record's attributes. - # This attribute will be removed by the logger. - # - # TODO (SNOW-1235374): opentelemetry-python issue #2485: Record logger - # name as the instrumentation scope name - attributes[SnowflakeLoggingHandler.LOGGER_NAME_TEMP_ATTRIBUTE] = record.name + # Preserving old naming conventions for code attributes that were changed as part of + # https://github.com/open-telemetry/opentelemetry-python/commit/1b1e8d80c764ad3aa76abfb56a7002ddea11fdb5 in + # order to avoid a behavior change for Snowflake customers. + if SnowflakeLoggingHandler.CODE_FILE_PATH in attributes: + attributes[SnowflakeLoggingHandler.CODE_FILEPATH] = attributes.pop(SnowflakeLoggingHandler.CODE_FILE_PATH) + if SnowflakeLoggingHandler.CODE_FUNCTION_NAME in attributes: + attributes[SnowflakeLoggingHandler.CODE_FUNCTION] = attributes.pop( + SnowflakeLoggingHandler.CODE_FUNCTION_NAME) + if SnowflakeLoggingHandler.CODE_LINE_NUMBER in attributes: + attributes[SnowflakeLoggingHandler.CODE_LINENO] = attributes.pop(SnowflakeLoggingHandler.CODE_LINE_NUMBER) + return attributes def _translate(self, record: logging.LogRecord) -> _logs.LogRecord: @@ -115,75 +122,6 @@ def _translate(self, record: logging.LogRecord) -> _logs.LogRecord: return otel_record -class _SnowflakeTelemetryLogger(_logs.Logger): - """ - An Open Telemetry Logger which creates an InstrumentationScope for each - logger name it encounters. - """ - - def __init__( - self, - resource: Resource, - multi_log_record_processor: typing.Union[ - _logs_internal.SynchronousMultiLogRecordProcessor, - _logs_internal.ConcurrentMultiLogRecordProcessor, - ], - instrumentation_scope: otel_instrumentation.InstrumentationScope, - ): - super().__init__(resource, multi_log_record_processor, instrumentation_scope) - self._lock = threading.Lock() - self.cached_scopes = {} - - def emit(self, record: _logs.LogRecord): - if SnowflakeLoggingHandler.LOGGER_NAME_TEMP_ATTRIBUTE not in record.attributes: - # The record doesn't contain our custom attribute with a logger name, - # so we can call the superclass's `emit` method. It will emit a log - # record with the default instrumentation scope. - super().emit(record) - return - - # Creating an InstrumentationScope for each logger name, - # and caching those scopes. - logger_name = record.attributes[SnowflakeLoggingHandler.LOGGER_NAME_TEMP_ATTRIBUTE] - del record.attributes[SnowflakeLoggingHandler.LOGGER_NAME_TEMP_ATTRIBUTE] - with self._lock: - if logger_name in self.cached_scopes: - current_scope = self.cached_scopes[logger_name] - else: - current_scope = otel_instrumentation.InstrumentationScope(logger_name) - self.cached_scopes[logger_name] = current_scope - - # Emitting a record with a scope that corresponds to the logger - # that logged it. NOT calling the superclass here for two reasons: - # 1. Logger.emit takes a LogRecord, not LogData. - # 2. It would emit a log record with the default instrumentation scope, - # not with the scope we want. - log_data = _logs.LogData(record, current_scope) - self._multi_log_record_processor.emit(log_data) - - -class _SnowflakeTelemetryLoggerProvider(_logs.LoggerProvider): - """ - A LoggerProvider that creates SnowflakeTelemetryLoggers - """ - - def get_logger( - self, name: str, - version: types.Optional[str] = None, - schema_url: types.Optional[str] = None, - attributes: types.Optional[types.Attributes] = None, - ) -> _logs.Logger: - return _SnowflakeTelemetryLogger( - Resource.get_empty(), - self._multi_log_record_processor, - otel_instrumentation.InstrumentationScope( - name, - version, - schema_url, - ), - ) - - __all__ = [ "LogWriter", "SnowflakeLoggingHandler", diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/__init__.py index c1250a0..e2eda90 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/__init__.py @@ -14,7 +14,7 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py index 530528f..6f6d15e 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py @@ -14,42 +14,42 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. +from __future__ import annotations + import logging from collections.abc import Sequence -from itertools import count from typing import ( Any, + Callable, + Dict, + List, Mapping, Optional, - List, - Callable, TypeVar, - Dict, - Iterator, ) -from opentelemetry.sdk.util.instrumentation import InstrumentationScope +from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import AnyValue as PB2AnyValue from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import ( - InstrumentationScope as PB2InstrumentationScope, + ArrayValue as PB2ArrayValue, ) -from snowflake.telemetry._internal.opentelemetry.proto.resource.v1.resource_marshaler import ( - Resource as PB2Resource, +from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import ( + InstrumentationScope as PB2InstrumentationScope, ) -from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import AnyValue as PB2AnyValue from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import KeyValue as PB2KeyValue from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import ( KeyValueList as PB2KeyValueList, ) -from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import ( - ArrayValue as PB2ArrayValue, +from snowflake.telemetry._internal.opentelemetry.proto.resource.v1.resource_marshaler import ( + Resource as PB2Resource, ) from opentelemetry.sdk.trace import Resource -from opentelemetry.util.types import Attributes +from opentelemetry.sdk.util.instrumentation import InstrumentationScope +from opentelemetry.util.types import _ExtendedAttributes _logger = logging.getLogger(__name__) @@ -65,6 +65,7 @@ def _encode_instrumentation_scope( return PB2InstrumentationScope( name=instrumentation_scope.name, version=instrumentation_scope.version, + attributes=_encode_attributes(instrumentation_scope.attributes), ) @@ -72,7 +73,11 @@ def _encode_resource(resource: Resource) -> PB2Resource: return PB2Resource(attributes=_encode_attributes(resource.attributes)) -def _encode_value(value: Any) -> PB2AnyValue: +def _encode_value( + value: Any, allow_null: bool = False +) -> Optional[PB2AnyValue]: + if allow_null is True and value is None: + return None if isinstance(value, bool): return PB2AnyValue(bool_value=value) if isinstance(value, str): @@ -81,21 +86,49 @@ def _encode_value(value: Any) -> PB2AnyValue: return PB2AnyValue(int_value=value) if isinstance(value, float): return PB2AnyValue(double_value=value) + if isinstance(value, bytes): + return PB2AnyValue(bytes_value=value) if isinstance(value, Sequence): return PB2AnyValue( - array_value=PB2ArrayValue(values=[_encode_value(v) for v in value]) + array_value=PB2ArrayValue( + values=_encode_array(value, allow_null=allow_null) + ) ) elif isinstance(value, Mapping): return PB2AnyValue( kvlist_value=PB2KeyValueList( - values=[_encode_key_value(str(k), v) for k, v in value.items()] + values=[ + _encode_key_value(str(k), v, allow_null=allow_null) + for k, v in value.items() + ] ) ) raise Exception(f"Invalid type {type(value)} of value {value}") -def _encode_key_value(key: str, value: Any) -> PB2KeyValue: - return PB2KeyValue(key=key, value=_encode_value(value)) +def _encode_key_value( + key: str, value: Any, allow_null: bool = False +) -> PB2KeyValue: + return PB2KeyValue( + key=key, value=_encode_value(value, allow_null=allow_null) + ) + + +def _encode_array( + array: Sequence[Any], allow_null: bool = False +) -> Sequence[PB2AnyValue]: + if not allow_null: + # Let the exception get raised by _encode_value() + return [_encode_value(v, allow_null=allow_null) for v in array] + + return [ + _encode_value(v, allow_null=allow_null) + if v is not None + # Use an empty AnyValue to represent None in an array. Behavior may change pending + # https://github.com/open-telemetry/opentelemetry-specification/issues/4392 + else PB2AnyValue() + for v in array + ] def _encode_span_id(span_id: int) -> bytes: @@ -107,14 +140,17 @@ def _encode_trace_id(trace_id: int) -> bytes: def _encode_attributes( - attributes: Attributes, + attributes: _ExtendedAttributes, + allow_null: bool = False, ) -> Optional[List[PB2KeyValue]]: if attributes: pb2_attributes = [] for key, value in attributes.items(): # pylint: disable=broad-exception-caught try: - pb2_attributes.append(_encode_key_value(key, value)) + pb2_attributes.append( + _encode_key_value(key, value, allow_null=allow_null) + ) except Exception as error: _logger.exception("Failed to encode key %s: %s", key, error) else: @@ -145,38 +181,3 @@ def _get_resource_data( ) ) return resource_data - - -def _create_exp_backoff_generator(max_value: int = 0) -> Iterator[int]: - """ - Generates an infinite sequence of exponential backoff values. The sequence starts - from 1 (2^0) and doubles each time (2^1, 2^2, 2^3, ...). If a max_value is specified - and non-zero, the generated values will not exceed this maximum, capping at max_value - instead of growing indefinitely. - - Parameters: - - max_value (int, optional): The maximum value to yield. If 0 or not provided, the - sequence grows without bound. - - Returns: - Iterator[int]: An iterator that yields the exponential backoff values, either uncapped or - capped at max_value. - - Example: - ``` - gen = _create_exp_backoff_generator(max_value=10) - for _ in range(5): - print(next(gen)) - ``` - This will print: - 1 - 2 - 4 - 8 - 10 - - Note: this functionality used to be handled by the 'backoff' package. - """ - for i in count(0): - out = 2**i - yield min(out, max_value) if max_value else out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py index 2f71a17..9033c59 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py @@ -14,29 +14,28 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. from collections import defaultdict -from typing import Sequence, List +from typing import List, Sequence from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common._internal import ( + _encode_attributes, _encode_instrumentation_scope, _encode_resource, _encode_span_id, _encode_trace_id, _encode_value, - _encode_attributes, ) from snowflake.telemetry._internal.opentelemetry.proto.collector.logs.v1.logs_service_marshaler import ( ExportLogsServiceRequest, ) +from snowflake.telemetry._internal.opentelemetry.proto.logs.v1.logs_marshaler import LogRecord as PB2LogRecord from snowflake.telemetry._internal.opentelemetry.proto.logs.v1.logs_marshaler import ( - ScopeLogs, ResourceLogs, + ScopeLogs, ) -from snowflake.telemetry._internal.opentelemetry.proto.logs.v1.logs_marshaler import LogRecord as PB2LogRecord - from opentelemetry.sdk._logs import LogData @@ -55,17 +54,21 @@ def _encode_log(log_data: LogData) -> PB2LogRecord: if log_data.log_record.trace_id == 0 else _encode_trace_id(log_data.log_record.trace_id) ) + body = log_data.log_record.body return PB2LogRecord( time_unix_nano=log_data.log_record.timestamp, observed_time_unix_nano=log_data.log_record.observed_timestamp, span_id=span_id, trace_id=trace_id, flags=int(log_data.log_record.trace_flags), - body=_encode_value(log_data.log_record.body), + body=_encode_value(body, allow_null=True), severity_text=log_data.log_record.severity_text, - attributes=_encode_attributes(log_data.log_record.attributes), + attributes=_encode_attributes( + log_data.log_record.attributes, allow_null=True + ), dropped_attributes_count=log_data.log_record.dropped_attributes, severity_number=log_data.log_record.severity_number.value, + event_name=log_data.log_record.event_name, ) @@ -88,6 +91,9 @@ def _encode_resource_logs(batch: Sequence[LogData]) -> List[ResourceLogs]: ScopeLogs( scope=(_encode_instrumentation_scope(sdk_instrumentation)), log_records=pb2_logs, + schema_url=sdk_instrumentation.schema_url + if sdk_instrumentation + else None, ) ) pb2_resource_logs.append( diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py index 9d729cd..1a3487b 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py @@ -14,55 +14,58 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. +from __future__ import annotations + import logging +from os import environ +from typing import Dict, List -from opentelemetry.sdk.metrics.export import ( - MetricExporter, +from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common._internal import ( + _encode_attributes, + _encode_instrumentation_scope, + _encode_span_id, + _encode_trace_id, +) +from snowflake.telemetry._internal.opentelemetry.proto.collector.metrics.v1.metrics_service_marshaler import ( + ExportMetricsServiceRequest, +) +from snowflake.telemetry._internal.opentelemetry.proto.metrics.v1 import metrics_marshaler as pb2 +from snowflake.telemetry._internal.opentelemetry.proto.resource.v1.resource_marshaler import ( + Resource as PB2Resource, +) +from opentelemetry.sdk.environment_variables import ( + OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION, + OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE, ) -from opentelemetry.sdk.metrics.view import Aggregation -from os import environ from opentelemetry.sdk.metrics import ( Counter, + Exemplar, Histogram, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, ) -from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common._internal import ( - _encode_attributes, -) -from opentelemetry.sdk.environment_variables import ( - OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE, -) from opentelemetry.sdk.metrics.export import ( AggregationTemporality, -) -from snowflake.telemetry._internal.opentelemetry.proto.collector.metrics.v1.metrics_service_marshaler import ( - ExportMetricsServiceRequest, -) -from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common_marshaler import InstrumentationScope -from snowflake.telemetry._internal.opentelemetry.proto.metrics.v1 import metrics_marshaler as pb2 -from opentelemetry.sdk.metrics.export import ( - MetricsData, Gauge, - Histogram as HistogramType, + MetricExporter, + MetricsData, Sum, - ExponentialHistogram as ExponentialHistogramType, ) -from typing import Dict -from snowflake.telemetry._internal.opentelemetry.proto.resource.v1.resource_marshaler import ( - Resource as PB2Resource, +from opentelemetry.sdk.metrics.export import ( + ExponentialHistogram as ExponentialHistogramType, ) -from opentelemetry.sdk.environment_variables import ( - OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION, +from opentelemetry.sdk.metrics.export import ( + Histogram as HistogramType, ) from opentelemetry.sdk.metrics.view import ( - ExponentialBucketHistogramAggregation, + Aggregation, ExplicitBucketHistogramAggregation, + ExponentialBucketHistogramAggregation, ) _logger = logging.getLogger(__name__) @@ -71,10 +74,10 @@ class OTLPMetricExporterMixin: def _common_configuration( self, - preferred_temporality: Dict[type, AggregationTemporality] = None, - preferred_aggregation: Dict[type, Aggregation] = None, + preferred_temporality: dict[type, AggregationTemporality] + | None = None, + preferred_aggregation: dict[type, Aggregation] | None = None, ) -> None: - MetricExporter.__init__( self, preferred_temporality=self._get_temporality(preferred_temporality), @@ -84,7 +87,6 @@ def _common_configuration( def _get_temporality( self, preferred_temporality: Dict[type, AggregationTemporality] ) -> Dict[type, AggregationTemporality]: - otel_exporter_otlp_metrics_temporality_preference = ( environ.get( OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE, @@ -121,8 +123,9 @@ def _get_temporality( _logger.warning( "Unrecognized OTEL_EXPORTER_METRICS_TEMPORALITY_PREFERENCE" " value found: " - f"{otel_exporter_otlp_metrics_temporality_preference}, " - "using CUMULATIVE" + "%s, " + "using CUMULATIVE", + otel_exporter_otlp_metrics_temporality_preference, ) instrument_class_temporality = { Counter: AggregationTemporality.CUMULATIVE, @@ -141,7 +144,6 @@ def _get_aggregation( self, preferred_aggregation: Dict[type, Aggregation], ) -> Dict[type, Aggregation]: - otel_exporter_otlp_metrics_default_histogram_aggregation = environ.get( OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION, "explicit_bucket_histogram", @@ -150,17 +152,14 @@ def _get_aggregation( if otel_exporter_otlp_metrics_default_histogram_aggregation == ( "base2_exponential_bucket_histogram" ): - instrument_class_aggregation = { Histogram: ExponentialBucketHistogramAggregation(), } else: - if otel_exporter_otlp_metrics_default_histogram_aggregation != ( "explicit_bucket_histogram" ): - _logger.warning( ( "Invalid value for %s: %s, using explicit bucket " @@ -179,152 +178,26 @@ def _get_aggregation( return instrument_class_aggregation -def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: - resource_metrics_dict = {} - - for resource_metrics in data.resource_metrics: - - resource = resource_metrics.resource +class EncodingException(Exception): + """ + Raised by encode_metrics() when an exception is caught during encoding. Contains the problematic metric so + the misbehaving metric name and details can be logged during exception handling. + """ - # It is safe to assume that each entry in data.resource_metrics is - # associated with an unique resource. - scope_metrics_dict = {} + def __init__(self, original_exception, metric): + super().__init__() + self.original_exception = original_exception + self.metric = metric - resource_metrics_dict[resource] = scope_metrics_dict + def __str__(self): + return f"{self.metric}\n{self.original_exception}" - for scope_metrics in resource_metrics.scope_metrics: - instrumentation_scope = scope_metrics.scope - - # The SDK groups metrics in instrumentation scopes already so - # there is no need to check for existing instrumentation scopes - # here. - pb2_scope_metrics = pb2.ScopeMetrics( - scope=InstrumentationScope( - name=instrumentation_scope.name, - version=instrumentation_scope.version, - ) - ) - - scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics - - for metric in scope_metrics.metrics: - pb2_metric = pb2.Metric( - name=metric.name, - description=metric.description, - unit=metric.unit, - ) +def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: + resource_metrics_dict = {} - if isinstance(metric.data, Gauge): - for data_point in metric.data.data_points: - pt = pb2.NumberDataPoint( - attributes=_encode_attributes( - data_point.attributes - ), - time_unix_nano=data_point.time_unix_nano, - ) - if isinstance(data_point.value, int): - pt.as_int = data_point.value - else: - pt.as_double = data_point.value - pb2_metric.gauge.data_points.append(pt) - - elif isinstance(metric.data, HistogramType): - for data_point in metric.data.data_points: - pt = pb2.HistogramDataPoint( - attributes=_encode_attributes( - data_point.attributes - ), - time_unix_nano=data_point.time_unix_nano, - start_time_unix_nano=( - data_point.start_time_unix_nano - ), - count=data_point.count, - sum=data_point.sum, - bucket_counts=data_point.bucket_counts, - explicit_bounds=data_point.explicit_bounds, - max=data_point.max, - min=data_point.min, - ) - pb2_metric.histogram.aggregation_temporality = ( - metric.data.aggregation_temporality - ) - pb2_metric.histogram.data_points.append(pt) - - elif isinstance(metric.data, Sum): - for data_point in metric.data.data_points: - pt = pb2.NumberDataPoint( - attributes=_encode_attributes( - data_point.attributes - ), - start_time_unix_nano=( - data_point.start_time_unix_nano - ), - time_unix_nano=data_point.time_unix_nano, - ) - if isinstance(data_point.value, int): - pt.as_int = data_point.value - else: - pt.as_double = data_point.value - # note that because sum is a message type, the - # fields must be set individually rather than - # instantiating a pb2.Sum and setting it once - pb2_metric.sum.aggregation_temporality = ( - metric.data.aggregation_temporality - ) - pb2_metric.sum.is_monotonic = metric.data.is_monotonic - pb2_metric.sum.data_points.append(pt) - - elif isinstance(metric.data, ExponentialHistogramType): - for data_point in metric.data.data_points: - - if data_point.positive.bucket_counts: - positive = pb2.ExponentialHistogramDataPoint.Buckets( - offset=data_point.positive.offset, - bucket_counts=data_point.positive.bucket_counts, - ) - else: - positive = None - - if data_point.negative.bucket_counts: - negative = pb2.ExponentialHistogramDataPoint.Buckets( - offset=data_point.negative.offset, - bucket_counts=data_point.negative.bucket_counts, - ) - else: - negative = None - - pt = pb2.ExponentialHistogramDataPoint( - attributes=_encode_attributes( - data_point.attributes - ), - time_unix_nano=data_point.time_unix_nano, - start_time_unix_nano=( - data_point.start_time_unix_nano - ), - count=data_point.count, - sum=data_point.sum, - scale=data_point.scale, - zero_count=data_point.zero_count, - positive=positive, - negative=negative, - flags=data_point.flags, - max=data_point.max, - min=data_point.min, - ) - pb2_metric.exponential_histogram.aggregation_temporality = ( - metric.data.aggregation_temporality - ) - pb2_metric.exponential_histogram.data_points.append(pt) - - else: - _logger.warning( - "unsupported data type %s", - metric.data.__class__.__name__, - ) - continue - - pb2_scope_metrics.metrics.append(pb2_metric) + for resource_metrics in data.resource_metrics: + _encode_resource_metrics(resource_metrics, resource_metrics_dict) resource_data = [] for ( @@ -340,5 +213,182 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: schema_url=sdk_resource.schema_url, ) ) - resource_metrics = resource_data - return ExportMetricsServiceRequest(resource_metrics=resource_metrics) + return ExportMetricsServiceRequest(resource_metrics=resource_data) + + +def _encode_resource_metrics(resource_metrics, resource_metrics_dict): + resource = resource_metrics.resource + # It is safe to assume that each entry in data.resource_metrics is + # associated with an unique resource. + scope_metrics_dict = {} + resource_metrics_dict[resource] = scope_metrics_dict + for scope_metrics in resource_metrics.scope_metrics: + instrumentation_scope = scope_metrics.scope + + # The SDK groups metrics in instrumentation scopes already so + # there is no need to check for existing instrumentation scopes + # here. + pb2_scope_metrics = pb2.ScopeMetrics( + scope=_encode_instrumentation_scope(instrumentation_scope), + schema_url=instrumentation_scope.schema_url, + ) + + scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics + + for metric in scope_metrics.metrics: + pb2_metric = pb2.Metric( + name=metric.name, + description=metric.description, + unit=metric.unit, + ) + + try: + _encode_metric(metric, pb2_metric) + except Exception as ex: + # `from None` so we don't get "During handling of the above exception, another exception occurred:" + raise EncodingException(ex, metric) from None + + pb2_scope_metrics.metrics.append(pb2_metric) + + +def _encode_metric(metric, pb2_metric): + if isinstance(metric.data, Gauge): + for data_point in metric.data.data_points: + pt = pb2.NumberDataPoint( + attributes=_encode_attributes(data_point.attributes), + time_unix_nano=data_point.time_unix_nano, + exemplars=_encode_exemplars(data_point.exemplars), + ) + if isinstance(data_point.value, int): + pt.as_int = data_point.value + else: + pt.as_double = data_point.value + pb2_metric.gauge.data_points.append(pt) + + elif isinstance(metric.data, HistogramType): + for data_point in metric.data.data_points: + pt = pb2.HistogramDataPoint( + attributes=_encode_attributes(data_point.attributes), + time_unix_nano=data_point.time_unix_nano, + start_time_unix_nano=data_point.start_time_unix_nano, + exemplars=_encode_exemplars(data_point.exemplars), + count=data_point.count, + sum=data_point.sum, + bucket_counts=data_point.bucket_counts, + explicit_bounds=data_point.explicit_bounds, + max=data_point.max, + min=data_point.min, + ) + pb2_metric.histogram.aggregation_temporality = ( + metric.data.aggregation_temporality + ) + pb2_metric.histogram.data_points.append(pt) + + elif isinstance(metric.data, Sum): + for data_point in metric.data.data_points: + pt = pb2.NumberDataPoint( + attributes=_encode_attributes(data_point.attributes), + start_time_unix_nano=data_point.start_time_unix_nano, + time_unix_nano=data_point.time_unix_nano, + exemplars=_encode_exemplars(data_point.exemplars), + ) + if isinstance(data_point.value, int): + pt.as_int = data_point.value + else: + pt.as_double = data_point.value + # note that because sum is a message type, the + # fields must be set individually rather than + # instantiating a pb2.Sum and setting it once + pb2_metric.sum.aggregation_temporality = ( + metric.data.aggregation_temporality + ) + pb2_metric.sum.is_monotonic = metric.data.is_monotonic + pb2_metric.sum.data_points.append(pt) + + elif isinstance(metric.data, ExponentialHistogramType): + for data_point in metric.data.data_points: + if data_point.positive.bucket_counts: + positive = pb2.ExponentialHistogramDataPoint.Buckets( + offset=data_point.positive.offset, + bucket_counts=data_point.positive.bucket_counts, + ) + else: + positive = None + + if data_point.negative.bucket_counts: + negative = pb2.ExponentialHistogramDataPoint.Buckets( + offset=data_point.negative.offset, + bucket_counts=data_point.negative.bucket_counts, + ) + else: + negative = None + + pt = pb2.ExponentialHistogramDataPoint( + attributes=_encode_attributes(data_point.attributes), + time_unix_nano=data_point.time_unix_nano, + start_time_unix_nano=data_point.start_time_unix_nano, + exemplars=_encode_exemplars(data_point.exemplars), + count=data_point.count, + sum=data_point.sum, + scale=data_point.scale, + zero_count=data_point.zero_count, + positive=positive, + negative=negative, + flags=data_point.flags, + max=data_point.max, + min=data_point.min, + ) + pb2_metric.exponential_histogram.aggregation_temporality = ( + metric.data.aggregation_temporality + ) + pb2_metric.exponential_histogram.data_points.append(pt) + + else: + _logger.warning( + "unsupported data type %s", + metric.data.__class__.__name__, + ) + + +def _encode_exemplars(sdk_exemplars: List[Exemplar]) -> List[pb2.Exemplar]: + """ + Converts a list of SDK Exemplars into a list of protobuf Exemplars. + + Args: + sdk_exemplars (list): The list of exemplars from the OpenTelemetry SDK. + + Returns: + list: A list of protobuf exemplars. + """ + pb_exemplars = [] + for sdk_exemplar in sdk_exemplars: + if ( + sdk_exemplar.span_id is not None + and sdk_exemplar.trace_id is not None + ): + pb_exemplar = pb2.Exemplar( + time_unix_nano=sdk_exemplar.time_unix_nano, + span_id=_encode_span_id(sdk_exemplar.span_id), + trace_id=_encode_trace_id(sdk_exemplar.trace_id), + filtered_attributes=_encode_attributes( + sdk_exemplar.filtered_attributes + ), + ) + else: + pb_exemplar = pb2.Exemplar( + time_unix_nano=sdk_exemplar.time_unix_nano, + filtered_attributes=_encode_attributes( + sdk_exemplar.filtered_attributes + ), + ) + + # Assign the value based on its type in the SDK exemplar + if isinstance(sdk_exemplar.value, float): + pb_exemplar.as_double = sdk_exemplar.value + elif isinstance(sdk_exemplar.value, int): + pb_exemplar.as_int = sdk_exemplar.value + else: + raise ValueError("Exemplar value must be an int or float") + pb_exemplars.append(pb_exemplar) + + return pb_exemplars diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py index e269641..32140fd 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py @@ -14,7 +14,7 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. @@ -95,6 +95,9 @@ def _encode_resource_spans( PB2ScopeSpans( scope=(_encode_instrumentation_scope(sdk_instrumentation)), spans=pb2_spans, + schema_url=sdk_instrumentation.schema_url + if sdk_instrumentation + else None, ) ) pb2_resource_spans.append( @@ -163,7 +166,7 @@ def _encode_links(links: Sequence[Link]) -> Sequence[PB2SPan.Link]: trace_id=_encode_trace_id(link.context.trace_id), span_id=_encode_span_id(link.context.span_id), attributes=_encode_attributes(link.attributes), - dropped_attributes_count=link.attributes.dropped, + dropped_attributes_count=link.dropped_attributes, flags=_span_flags(link.context), ) pb2_links.append(encoded_link) diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_log_encoder.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_log_encoder.py index 481a853..885ebe1 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_log_encoder.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_log_encoder.py @@ -14,7 +14,7 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/metrics_encoder.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/metrics_encoder.py index 4d82926..daac819 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/metrics_encoder.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/metrics_encoder.py @@ -14,7 +14,7 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/trace_encoder.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/trace_encoder.py index bc37212..2cd631d 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/trace_encoder.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/trace_encoder.py @@ -14,7 +14,7 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version/__init__.py similarity index 95% rename from src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version.py rename to src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version/__init__.py index 8ddc9b6..ecbec74 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/version/__init__.py @@ -14,8 +14,8 @@ # # This file has been modified from the original source code at # -# https://github.com/open-telemetry/opentelemetry-python/tree/v1.26.0 +# https://github.com/open-telemetry/opentelemetry-python/tree/v1.35.0 # # by Snowflake Inc. -__version__ = "1.26.0" +__version__ = "1.35.0" diff --git a/tests/snowflake-telemetry-test-utils/setup.py b/tests/snowflake-telemetry-test-utils/setup.py index a724e76..8dc94e0 100644 --- a/tests/snowflake-telemetry-test-utils/setup.py +++ b/tests/snowflake-telemetry-test-utils/setup.py @@ -15,7 +15,7 @@ description=DESCRIPTION, long_description=LONG_DESCRIPTION, install_requires=[ - "opentelemetry-exporter-otlp-proto-common == 1.26.0", + "opentelemetry-exporter-otlp-proto-common == 1.35.0", "pytest >= 7.0.0", "snowflake-telemetry-python == 0.7.2.dev", "Jinja2 == 3.1.4", diff --git a/tests/test_metrics_encoder.py b/tests/test_metrics_encoder.py index 7a473ed..b8d614b 100644 --- a/tests/test_metrics_encoder.py +++ b/tests/test_metrics_encoder.py @@ -95,7 +95,7 @@ def test_encode_sum_int(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[_generate_sum("sum_int", 33)], schema_url="instrumentation_scope_schema_url", @@ -122,6 +122,7 @@ def test_encode_sum_int(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="sum_int", @@ -181,7 +182,7 @@ def test_encode_sum_double(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[_generate_sum("sum_double", 2.98)], schema_url="instrumentation_scope_schema_url", @@ -208,6 +209,7 @@ def test_encode_sum_double(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="sum_double", @@ -267,7 +269,7 @@ def test_encode_gauge_int(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[_generate_gauge("gauge_int", 9000)], schema_url="instrumentation_scope_schema_url", @@ -294,6 +296,7 @@ def test_encode_gauge_int(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="gauge_int", @@ -350,7 +353,7 @@ def test_encode_gauge_double(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[_generate_gauge("gauge_double", 52.028)], schema_url="instrumentation_scope_schema_url", @@ -377,6 +380,7 @@ def test_encode_gauge_double(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="gauge_double", @@ -433,7 +437,7 @@ def test_encode_histogram(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[self.histogram], schema_url="instrumentation_scope_schema_url", @@ -460,6 +464,7 @@ def test_encode_histogram(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="histogram", @@ -524,7 +529,7 @@ def test_encode_multiple_scope_histogram(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[self.histogram, self.histogram], schema_url="instrumentation_scope_schema_url", @@ -533,7 +538,7 @@ def test_encode_multiple_scope_histogram(self): scope=SDKInstrumentationScope( name="second_name", version="second_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[self.histogram], schema_url="instrumentation_scope_schema_url", @@ -542,7 +547,7 @@ def test_encode_multiple_scope_histogram(self): scope=SDKInstrumentationScope( name="third_name", version="third_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[self.histogram], schema_url="instrumentation_scope_schema_url", @@ -569,6 +574,7 @@ def test_encode_multiple_scope_histogram(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="histogram", @@ -646,6 +652,7 @@ def test_encode_multiple_scope_histogram(self): scope=InstrumentationScope( name="second_name", version="second_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="histogram", @@ -688,6 +695,7 @@ def test_encode_multiple_scope_histogram(self): scope=InstrumentationScope( name="third_name", version="third_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="histogram", @@ -777,7 +785,7 @@ def test_encode_exponential_histogram(self): scope=SDKInstrumentationScope( name="first_name", version="first_version", - schema_url="insrumentation_scope_schema_url", + schema_url="instrumentation_scope_schema_url", ), metrics=[exponential_histogram], schema_url="instrumentation_scope_schema_url", @@ -804,6 +812,7 @@ def test_encode_exponential_histogram(self): scope=InstrumentationScope( name="first_name", version="first_version" ), + schema_url="instrumentation_scope_schema_url", metrics=[ pb2.Metric( name="exponential_histogram",