diff --git a/CHANGELOG.md b/CHANGELOG.md index 19a3434190e..c8097b699f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased +- Update OTLP proto to v1.7 [#4645](https://github.com/open-telemetry/opentelemetry-python/pull/4645). - Update OTLP gRPC/HTTP exporters: the export timeout is now inclusive of all retries and backoffs. A +/-20% jitter was added to all backoffs. A pointless 32 second sleep that occurred after all retries had completed/failed was removed. diff --git a/opentelemetry-proto/README.rst b/opentelemetry-proto/README.rst index 7ee31de7d88..aa70bc7bb91 100644 --- a/opentelemetry-proto/README.rst +++ b/opentelemetry-proto/README.rst @@ -7,9 +7,9 @@ OpenTelemetry Python Proto :target: https://pypi.org/project/opentelemetry-proto/ This library contains the generated code for OpenTelemetry protobuf data model. The code in the current -package was generated using the v1.2.0 release_ of opentelemetry-proto. +package was generated using the v1.7.0 release_ of opentelemetry-proto. -.. _release: https://github.com/open-telemetry/opentelemetry-proto/releases/tag/v1.2.0 +.. _release: https://github.com/open-telemetry/opentelemetry-proto/releases/tag/v1.7.0 Installation ------------ diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py index 17f7196eee6..81f124f6303 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py @@ -7,39 +7,28 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.logs.v1 import ( - logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2, -) +from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess\"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.proto.collector.logs.v1.logs_service_pb2", - _globals, -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.logs.v1.logs_service_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1" - ) - _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_start = 139 - _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_end = 231 - _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_start = 233 - _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_end = 350 - _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_start = 352 - _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_end = 431 - _globals["_LOGSSERVICE"]._serialized_start = 434 - _globals["_LOGSSERVICE"]._serialized_end = 591 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1' + _globals['_EXPORTLOGSSERVICEREQUEST']._serialized_start=139 + _globals['_EXPORTLOGSSERVICEREQUEST']._serialized_end=231 + _globals['_EXPORTLOGSSERVICERESPONSE']._serialized_start=233 + _globals['_EXPORTLOGSSERVICERESPONSE']._serialized_end=350 + _globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_start=352 + _globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_end=431 + _globals['_LOGSSERVICE']._serialized_start=434 + _globals['_LOGSSERVICE']._serialized_end=591 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi index f4c7489b31d..99e2a0ac101 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -24,7 +23,10 @@ import google.protobuf.message import opentelemetry.proto.logs.v1.logs_pb2 import sys -import typing as typing_extensions +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -34,34 +36,19 @@ class ExportLogsServiceRequest(google.protobuf.message.Message): RESOURCE_LOGS_FIELD_NUMBER: builtins.int @property - def resource_logs( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs - ]: + def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]: """An array of ResourceLogs. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_logs: ( - collections.abc.Iterable[ - opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs - ] - | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_logs", b"resource_logs" - ], + resource_logs: collections.abc.Iterable[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_logs", b"resource_logs"]) -> None: ... global___ExportLogsServiceRequest = ExportLogsServiceRequest @@ -88,24 +75,13 @@ class ExportLogsServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - def __init__( self, *, partial_success: global___ExportLogsPartialSuccess | None = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ... global___ExportLogsServiceResponse = ExportLogsServiceResponse @@ -136,14 +112,6 @@ class ExportLogsPartialSuccess(google.protobuf.message.Message): rejected_log_records: builtins.int = ..., error_message: builtins.str = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "error_message", - b"error_message", - "rejected_log_records", - b"rejected_log_records", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_log_records", b"rejected_log_records"]) -> None: ... global___ExportLogsPartialSuccess = ExportLogsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py index 99c720386b5..bb64c98fa25 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py @@ -3,35 +3,30 @@ import grpc import warnings -from opentelemetry.proto.collector.logs.v1 import ( - logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2, -) +from opentelemetry.proto.collector.logs.v1 import logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2 -GRPC_GENERATED_VERSION = "1.63.2" +GRPC_GENERATED_VERSION = '1.63.2' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = "1.65.0" -SCHEDULED_RELEASE_DATE = "June 25, 2024" +EXPECTED_ERROR_RELEASE = '1.65.0' +SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: from grpc._utilities import first_version_is_lower - - _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION - ) + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) except ImportError: _version_not_supported = True if _version_not_supported: warnings.warn( - f"The grpc package installed is at version {GRPC_VERSION}," - + f" but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on" - + f" grpcio>={GRPC_GENERATED_VERSION}." - + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" - + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." - + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," - + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", - RuntimeWarning, + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' + + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', + RuntimeWarning ) @@ -48,11 +43,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - "/opentelemetry.proto.collector.logs.v1.LogsService/Export", - request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, - _registered_method=True, - ) + '/opentelemetry.proto.collector.logs.v1.LogsService/Export', + request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, + _registered_method=True) class LogsServiceServicer(object): @@ -62,30 +56,26 @@ class LogsServiceServicer(object): """ def Export(self, request, context): - """For performance reasons, it is recommended to keep this RPC - alive for the entire life of the application. - """ + """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_LogsServiceServicer_to_server(servicer, server): rpc_method_handlers = { - "Export": grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString, - ), + 'Export': grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - "opentelemetry.proto.collector.logs.v1.LogsService", - rpc_method_handlers, - ) + 'opentelemetry.proto.collector.logs.v1.LogsService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class LogsService(object): """Service that can be used to push logs between one Application instrumented with OpenTelemetry and an collector, or between an collector and a central collector (in this @@ -93,22 +83,20 @@ class LogsService(object): """ @staticmethod - def Export( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): + def Export(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary( request, target, - "/opentelemetry.proto.collector.logs.v1.LogsService/Export", + '/opentelemetry.proto.collector.logs.v1.LogsService/Export', opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, options, @@ -119,5 +107,4 @@ def Export( wait_for_ready, timeout, metadata, - _registered_method=True, - ) + _registered_method=True) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py index f25b8801525..6083655c882 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py @@ -7,39 +7,28 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.metrics.v1 import ( - metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2, -) +from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess\"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.proto.collector.metrics.v1.metrics_service_pb2", - _globals, -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1" - ) - _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_start = 154 - _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_end = 258 - _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_start = 260 - _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_end = 386 - _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_start = 388 - _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_end = 470 - _globals["_METRICSSERVICE"]._serialized_start = 473 - _globals["_METRICSSERVICE"]._serialized_end = 645 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1' + _globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_start=154 + _globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_end=258 + _globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_start=260 + _globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_end=386 + _globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_start=388 + _globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_end=470 + _globals['_METRICSSERVICE']._serialized_start=473 + _globals['_METRICSSERVICE']._serialized_end=645 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi index 1915f8a619e..fe3c44f3c37 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -24,7 +23,10 @@ import google.protobuf.message import opentelemetry.proto.metrics.v1.metrics_pb2 import sys -import typing as typing_extensions +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -34,34 +36,19 @@ class ExportMetricsServiceRequest(google.protobuf.message.Message): RESOURCE_METRICS_FIELD_NUMBER: builtins.int @property - def resource_metrics( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics - ]: + def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]: """An array of ResourceMetrics. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_metrics: ( - collections.abc.Iterable[ - opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics - ] - | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_metrics", b"resource_metrics" - ], + resource_metrics: collections.abc.Iterable[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_metrics", b"resource_metrics"]) -> None: ... global___ExportMetricsServiceRequest = ExportMetricsServiceRequest @@ -88,24 +75,13 @@ class ExportMetricsServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - def __init__( self, *, partial_success: global___ExportMetricsPartialSuccess | None = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ... global___ExportMetricsServiceResponse = ExportMetricsServiceResponse @@ -136,14 +112,6 @@ class ExportMetricsPartialSuccess(google.protobuf.message.Message): rejected_data_points: builtins.int = ..., error_message: builtins.str = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "error_message", - b"error_message", - "rejected_data_points", - b"rejected_data_points", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_data_points", b"rejected_data_points"]) -> None: ... global___ExportMetricsPartialSuccess = ExportMetricsPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py index fcf72343f8b..f124bfe4adc 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py @@ -3,35 +3,30 @@ import grpc import warnings -from opentelemetry.proto.collector.metrics.v1 import ( - metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2, -) +from opentelemetry.proto.collector.metrics.v1 import metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2 -GRPC_GENERATED_VERSION = "1.63.2" +GRPC_GENERATED_VERSION = '1.63.2' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = "1.65.0" -SCHEDULED_RELEASE_DATE = "June 25, 2024" +EXPECTED_ERROR_RELEASE = '1.65.0' +SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: from grpc._utilities import first_version_is_lower - - _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION - ) + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) except ImportError: _version_not_supported = True if _version_not_supported: warnings.warn( - f"The grpc package installed is at version {GRPC_VERSION}," - + f" but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on" - + f" grpcio>={GRPC_GENERATED_VERSION}." - + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" - + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." - + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," - + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", - RuntimeWarning, + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' + + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', + RuntimeWarning ) @@ -48,11 +43,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", - request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, - _registered_method=True, - ) + '/opentelemetry.proto.collector.metrics.v1.MetricsService/Export', + request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, + _registered_method=True) class MetricsServiceServicer(object): @@ -62,30 +56,26 @@ class MetricsServiceServicer(object): """ def Export(self, request, context): - """For performance reasons, it is recommended to keep this RPC - alive for the entire life of the application. - """ + """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_MetricsServiceServicer_to_server(servicer, server): rpc_method_handlers = { - "Export": grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString, - ), + 'Export': grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - "opentelemetry.proto.collector.metrics.v1.MetricsService", - rpc_method_handlers, - ) + 'opentelemetry.proto.collector.metrics.v1.MetricsService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class MetricsService(object): """Service that can be used to push metrics between one Application instrumented with OpenTelemetry and a collector, or between a collector and a @@ -93,22 +83,20 @@ class MetricsService(object): """ @staticmethod - def Export( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): + def Export(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary( request, target, - "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", + '/opentelemetry.proto.collector.metrics.v1.MetricsService/Export', opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, options, @@ -119,5 +107,4 @@ def Export( wait_for_ready, timeout, metadata, - _registered_method=True, - ) + _registered_method=True) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.py new file mode 100644 index 00000000000..9e2f6198299 --- /dev/null +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/collector/profiles/v1development/profiles_service.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.profiles.v1development import profiles_pb2 as opentelemetry_dot_proto_dot_profiles_dot_v1development_dot_profiles__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nKopentelemetry/proto/collector/profiles/v1development/profiles_service.proto\x12\x34opentelemetry.proto.collector.profiles.v1development\x1a\x39opentelemetry/proto/profiles/v1development/profiles.proto\"\xcb\x01\n\x1c\x45xportProfilesServiceRequest\x12W\n\x11resource_profiles\x18\x01 \x03(\x0b\x32<.opentelemetry.proto.profiles.v1development.ResourceProfiles\x12R\n\ndictionary\x18\x02 \x01(\x0b\x32>.opentelemetry.proto.profiles.v1development.ProfilesDictionary\"\x8c\x01\n\x1d\x45xportProfilesServiceResponse\x12k\n\x0fpartial_success\x18\x01 \x01(\x0b\x32R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesPartialSuccess\"P\n\x1c\x45xportProfilesPartialSuccess\x12\x19\n\x11rejected_profiles\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xc7\x01\n\x0fProfilesService\x12\xb3\x01\n\x06\x45xport\x12R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceRequest\x1aS.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceResponse\"\x00\x42\xc9\x01\n7io.opentelemetry.proto.collector.profiles.v1developmentB\x14ProfilesServiceProtoP\x01Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\xaa\x02\x34OpenTelemetry.Proto.Collector.Profiles.V1Developmentb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.profiles.v1development.profiles_service_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n7io.opentelemetry.proto.collector.profiles.v1developmentB\024ProfilesServiceProtoP\001Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\252\0024OpenTelemetry.Proto.Collector.Profiles.V1Development' + _globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_start=193 + _globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_end=396 + _globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_start=399 + _globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_end=539 + _globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_start=541 + _globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_end=621 + _globals['_PROFILESSERVICE']._serialized_start=624 + _globals['_PROFILESSERVICE']._serialized_end=823 +# @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.pyi new file mode 100644 index 00000000000..e8b7a82095c --- /dev/null +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2.pyi @@ -0,0 +1,123 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2023, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import opentelemetry.proto.profiles.v1development.profiles_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class ExportProfilesServiceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESOURCE_PROFILES_FIELD_NUMBER: builtins.int + DICTIONARY_FIELD_NUMBER: builtins.int + @property + def resource_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles]: + """An array of ResourceProfiles. + For data coming from a single resource this array will typically contain one + element. Intermediary nodes (such as OpenTelemetry Collector) that receive + data from multiple origins typically batch the data before forwarding further and + in that case this array will contain multiple elements. + """ + @property + def dictionary(self) -> opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary: + """The reference table containing all data shared by profiles across the message being sent.""" + def __init__( + self, + *, + resource_profiles: collections.abc.Iterable[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles] | None = ..., + dictionary: opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary", "resource_profiles", b"resource_profiles"]) -> None: ... + +global___ExportProfilesServiceRequest = ExportProfilesServiceRequest + +@typing_extensions.final +class ExportProfilesServiceResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int + @property + def partial_success(self) -> global___ExportProfilesPartialSuccess: + """The details of a partially successful export request. + + If the request is only partially accepted + (i.e. when the server accepts only parts of the data and rejects the rest) + the server MUST initialize the `partial_success` field and MUST + set the `rejected_` with the number of items it rejected. + + Servers MAY also make use of the `partial_success` field to convey + warnings/suggestions to senders even when the request was fully accepted. + In such cases, the `rejected_` MUST have a value of `0` and + the `error_message` MUST be non-empty. + + A `partial_success` message with an empty value (rejected_ = 0 and + `error_message` = "") is equivalent to it not being set/present. Senders + SHOULD interpret it the same way as in the full success case. + """ + def __init__( + self, + *, + partial_success: global___ExportProfilesPartialSuccess | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ... + +global___ExportProfilesServiceResponse = ExportProfilesServiceResponse + +@typing_extensions.final +class ExportProfilesPartialSuccess(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REJECTED_PROFILES_FIELD_NUMBER: builtins.int + ERROR_MESSAGE_FIELD_NUMBER: builtins.int + rejected_profiles: builtins.int + """The number of rejected profiles. + + A `rejected_` field holding a `0` value indicates that the + request was fully accepted. + """ + error_message: builtins.str + """A developer-facing human-readable message in English. It should be used + either to explain why the server rejected parts of the data during a partial + success or to convey warnings/suggestions during a full success. The message + should offer guidance on how users can address such issues. + + error_message is an optional field. An error_message with an empty value + is equivalent to it not being set. + """ + def __init__( + self, + *, + rejected_profiles: builtins.int = ..., + error_message: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_profiles", b"rejected_profiles"]) -> None: ... + +global___ExportProfilesPartialSuccess = ExportProfilesPartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py new file mode 100644 index 00000000000..3742ae591e3 --- /dev/null +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py @@ -0,0 +1,107 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from opentelemetry.proto.collector.profiles.v1development import profiles_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2 + +GRPC_GENERATED_VERSION = '1.63.2' +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = '1.65.0' +SCHEDULED_RELEASE_DATE = 'June 25, 2024' +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' + + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', + RuntimeWarning + ) + + +class ProfilesServiceStub(object): + """Service that can be used to push profiles between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Export = channel.unary_unary( + '/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export', + request_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString, + _registered_method=True) + + +class ProfilesServiceServicer(object): + """Service that can be used to push profiles between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector. + """ + + def Export(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ProfilesServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Export': grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'opentelemetry.proto.collector.profiles.v1development.ProfilesService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class ProfilesService(object): + """Service that can be used to push profiles between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector. + """ + + @staticmethod + def Export(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export', + opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString, + opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py index 8ced74fb4c2..c0ad62bfdbd 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py @@ -7,39 +7,28 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.trace.v1 import ( - trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2, -) +from opentelemetry.proto.trace.v1 import trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess\"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, - "opentelemetry.proto.collector.trace.v1.trace_service_pb2", - _globals, -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.trace.v1.trace_service_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1" - ) - _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_start = 144 - _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_end = 240 - _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_start = 242 - _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_end = 362 - _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_start = 364 - _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_end = 438 - _globals["_TRACESERVICE"]._serialized_start = 441 - _globals["_TRACESERVICE"]._serialized_end = 603 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1' + _globals['_EXPORTTRACESERVICEREQUEST']._serialized_start=144 + _globals['_EXPORTTRACESERVICEREQUEST']._serialized_end=240 + _globals['_EXPORTTRACESERVICERESPONSE']._serialized_start=242 + _globals['_EXPORTTRACESERVICERESPONSE']._serialized_end=362 + _globals['_EXPORTTRACEPARTIALSUCCESS']._serialized_start=364 + _globals['_EXPORTTRACEPARTIALSUCCESS']._serialized_end=438 + _globals['_TRACESERVICE']._serialized_start=441 + _globals['_TRACESERVICE']._serialized_end=603 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi index 18772275891..ceb4db5213f 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -24,7 +23,10 @@ import google.protobuf.message import opentelemetry.proto.trace.v1.trace_pb2 import sys -import typing as typing_extensions +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -34,34 +36,19 @@ class ExportTraceServiceRequest(google.protobuf.message.Message): RESOURCE_SPANS_FIELD_NUMBER: builtins.int @property - def resource_spans( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans - ]: + def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]: """An array of ResourceSpans. For data coming from a single resource this array will typically contain one element. Intermediary nodes (such as OpenTelemetry Collector) that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_spans: ( - collections.abc.Iterable[ - opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans - ] - | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_spans", b"resource_spans" - ], + resource_spans: collections.abc.Iterable[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_spans", b"resource_spans"]) -> None: ... global___ExportTraceServiceRequest = ExportTraceServiceRequest @@ -88,24 +75,13 @@ class ExportTraceServiceResponse(google.protobuf.message.Message): `error_message` = "") is equivalent to it not being set/present. Senders SHOULD interpret it the same way as in the full success case. """ - def __init__( self, *, partial_success: global___ExportTracePartialSuccess | None = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "partial_success", b"partial_success" - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ... global___ExportTraceServiceResponse = ExportTraceServiceResponse @@ -136,14 +112,6 @@ class ExportTracePartialSuccess(google.protobuf.message.Message): rejected_spans: builtins.int = ..., error_message: builtins.str = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "error_message", - b"error_message", - "rejected_spans", - b"rejected_spans", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_spans", b"rejected_spans"]) -> None: ... global___ExportTracePartialSuccess = ExportTracePartialSuccess diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py index f58f7ef6408..f1cdf0355b4 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py +++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py @@ -3,35 +3,30 @@ import grpc import warnings -from opentelemetry.proto.collector.trace.v1 import ( - trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2, -) +from opentelemetry.proto.collector.trace.v1 import trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2 -GRPC_GENERATED_VERSION = "1.63.2" +GRPC_GENERATED_VERSION = '1.63.2' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = "1.65.0" -SCHEDULED_RELEASE_DATE = "June 25, 2024" +EXPECTED_ERROR_RELEASE = '1.65.0' +SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: from grpc._utilities import first_version_is_lower - - _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION - ) + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) except ImportError: _version_not_supported = True if _version_not_supported: warnings.warn( - f"The grpc package installed is at version {GRPC_VERSION}," - + f" but the generated code in opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py depends on" - + f" grpcio>={GRPC_GENERATED_VERSION}." - + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" - + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." - + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," - + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", - RuntimeWarning, + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' + + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', + RuntimeWarning ) @@ -48,11 +43,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Export = channel.unary_unary( - "/opentelemetry.proto.collector.trace.v1.TraceService/Export", - request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, - response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, - _registered_method=True, - ) + '/opentelemetry.proto.collector.trace.v1.TraceService/Export', + request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, + _registered_method=True) class TraceServiceServicer(object): @@ -62,30 +56,26 @@ class TraceServiceServicer(object): """ def Export(self, request, context): - """For performance reasons, it is recommended to keep this RPC - alive for the entire life of the application. - """ + """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_TraceServiceServicer_to_server(servicer, server): rpc_method_handlers = { - "Export": grpc.unary_unary_rpc_method_handler( - servicer.Export, - request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString, - response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString, - ), + 'Export': grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - "opentelemetry.proto.collector.trace.v1.TraceService", - rpc_method_handlers, - ) + 'opentelemetry.proto.collector.trace.v1.TraceService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class TraceService(object): """Service that can be used to push spans between one Application instrumented with OpenTelemetry and a collector, or between a collector and a central collector (in this @@ -93,22 +83,20 @@ class TraceService(object): """ @staticmethod - def Export( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): + def Export(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary( request, target, - "/opentelemetry.proto.collector.trace.v1.TraceService/Export", + '/opentelemetry.proto.collector.trace.v1.TraceService/Export', opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, options, @@ -119,5 +107,4 @@ def Export( wait_for_ready, timeout, metadata, - _registered_method=True, - ) + _registered_method=True) diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py index f55ae932065..0ea36443bcc 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py @@ -7,34 +7,31 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\rB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3' -) + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\"X\n\tEntityRef\x12\x12\n\nschema_url\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07id_keys\x18\x03 \x03(\t\x12\x18\n\x10\x64\x65scription_keys\x18\x04 \x03(\tB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "opentelemetry.proto.common.v1.common_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.common.v1.common_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1" - ) - _globals["_ANYVALUE"]._serialized_start = 78 - _globals["_ANYVALUE"]._serialized_end = 346 - _globals["_ARRAYVALUE"]._serialized_start = 348 - _globals["_ARRAYVALUE"]._serialized_end = 417 - _globals["_KEYVALUELIST"]._serialized_start = 419 - _globals["_KEYVALUELIST"]._serialized_end = 490 - _globals["_KEYVALUE"]._serialized_start = 492 - _globals["_KEYVALUE"]._serialized_end = 571 - _globals["_INSTRUMENTATIONSCOPE"]._serialized_start = 574 - _globals["_INSTRUMENTATIONSCOPE"]._serialized_end = 722 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1' + _globals['_ANYVALUE']._serialized_start=78 + _globals['_ANYVALUE']._serialized_end=346 + _globals['_ARRAYVALUE']._serialized_start=348 + _globals['_ARRAYVALUE']._serialized_end=417 + _globals['_KEYVALUELIST']._serialized_start=419 + _globals['_KEYVALUELIST']._serialized_end=490 + _globals['_KEYVALUE']._serialized_start=492 + _globals['_KEYVALUE']._serialized_end=571 + _globals['_INSTRUMENTATIONSCOPE']._serialized_start=574 + _globals['_INSTRUMENTATIONSCOPE']._serialized_end=722 + _globals['_ENTITYREF']._serialized_start=724 + _globals['_ENTITYREF']._serialized_end=812 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi index e4dd2e4e8d7..1f79b5b253c 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -23,7 +22,10 @@ import google.protobuf.internal.containers import google.protobuf.message import sys -import typing as typing_extensions +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -63,62 +65,9 @@ class AnyValue(google.protobuf.message.Message): kvlist_value: global___KeyValueList | None = ..., bytes_value: builtins.bytes = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "array_value", - b"array_value", - "bool_value", - b"bool_value", - "bytes_value", - b"bytes_value", - "double_value", - b"double_value", - "int_value", - b"int_value", - "kvlist_value", - b"kvlist_value", - "string_value", - b"string_value", - "value", - b"value", - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "array_value", - b"array_value", - "bool_value", - b"bool_value", - "bytes_value", - b"bytes_value", - "double_value", - b"double_value", - "int_value", - b"int_value", - "kvlist_value", - b"kvlist_value", - "string_value", - b"string_value", - "value", - b"value", - ], - ) -> None: ... - def WhichOneof( - self, oneof_group: typing_extensions.Literal["value", b"value"] - ) -> ( - typing_extensions.Literal[ - "string_value", - "bool_value", - "int_value", - "double_value", - "array_value", - "kvlist_value", - "bytes_value", - ] - | None - ): ... + def HasField(self, field_name: typing_extensions.Literal["array_value", b"array_value", "bool_value", b"bool_value", "bytes_value", b"bytes_value", "double_value", b"double_value", "int_value", b"int_value", "kvlist_value", b"kvlist_value", "string_value", b"string_value", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["array_value", b"array_value", "bool_value", b"bool_value", "bytes_value", b"bytes_value", "double_value", b"double_value", "int_value", b"int_value", "kvlist_value", b"kvlist_value", "string_value", b"string_value", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["string_value", "bool_value", "int_value", "double_value", "array_value", "kvlist_value", "bytes_value"] | None: ... global___AnyValue = AnyValue @@ -132,21 +81,14 @@ class ArrayValue(google.protobuf.message.Message): VALUES_FIELD_NUMBER: builtins.int @property - def values( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___AnyValue - ]: + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyValue]: """Array of values. The array may be empty (contain 0 elements).""" - def __init__( self, *, values: collections.abc.Iterable[global___AnyValue] | None = ..., ) -> None: ... - def ClearField( - self, field_name: typing_extensions.Literal["values", b"values"] - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... global___ArrayValue = ArrayValue @@ -163,25 +105,18 @@ class KeyValueList(google.protobuf.message.Message): VALUES_FIELD_NUMBER: builtins.int @property - def values( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___KeyValue - ]: + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: """A collection of key/value pairs of key-value pairs. The list may be empty (may contain 0 elements). The keys MUST be unique (it is not allowed to have more than one value with the same key). """ - def __init__( self, *, values: collections.abc.Iterable[global___KeyValue] | None = ..., ) -> None: ... - def ClearField( - self, field_name: typing_extensions.Literal["values", b"values"] - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... global___KeyValueList = KeyValueList @@ -204,15 +139,8 @@ class KeyValue(google.protobuf.message.Message): key: builtins.str = ..., value: global___AnyValue | None = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["value", b"value"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "key", b"key", "value", b"value" - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... global___KeyValue = KeyValue @@ -232,11 +160,7 @@ class InstrumentationScope(google.protobuf.message.Message): """An empty instrumentation scope name means the name is unknown.""" version: builtins.str @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: """Additional attributes that describe the scope. [Optional]. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). @@ -250,18 +174,62 @@ class InstrumentationScope(google.protobuf.message.Message): attributes: collections.abc.Iterable[global___KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., ) -> None: ... - def ClearField( + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "name", b"name", "version", b"version"]) -> None: ... + +global___InstrumentationScope = InstrumentationScope + +@typing_extensions.final +class EntityRef(google.protobuf.message.Message): + """A reference to an Entity. + Entity represents an object of interest associated with produced telemetry: e.g spans, metrics, profiles, or logs. + + Status: [Development] + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCHEMA_URL_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + ID_KEYS_FIELD_NUMBER: builtins.int + DESCRIPTION_KEYS_FIELD_NUMBER: builtins.int + schema_url: builtins.str + """The Schema URL, if known. This is the identifier of the Schema that the entity data + is recorded in. To learn more about Schema URL see + https://opentelemetry.io/docs/specs/otel/schemas/#schema-url + + This schema_url applies to the data in this message and to the Resource attributes + referenced by id_keys and description_keys. + TODO: discuss if we are happy with this somewhat complicated definition of what + the schema_url applies to. + + This field obsoletes the schema_url field in ResourceMetrics/ResourceSpans/ResourceLogs. + """ + type: builtins.str + """Defines the type of the entity. MUST not change during the lifetime of the entity. + For example: "service" or "host". This field is required and MUST not be empty + for valid entities. + """ + @property + def id_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Attribute Keys that identify the entity. + MUST not change during the lifetime of the entity. The Id must contain at least one attribute. + These keys MUST exist in the containing {message}.attributes. + """ + @property + def description_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Descriptive (non-identifying) attribute keys of the entity. + MAY change over the lifetime of the entity. MAY be empty. + These attribute keys are not part of entity's identity. + These keys MUST exist in the containing {message}.attributes. + """ + def __init__( self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "dropped_attributes_count", - b"dropped_attributes_count", - "name", - b"name", - "version", - b"version", - ], + *, + schema_url: builtins.str = ..., + type: builtins.str = ..., + id_keys: collections.abc.Iterable[builtins.str] | None = ..., + description_keys: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description_keys", b"description_keys", "id_keys", b"id_keys", "schema_url", b"schema_url", "type", b"type"]) -> None: ... -global___InstrumentationScope = InstrumentationScope +global___EntityRef = EntityRef diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py index 715a741a2c6..3fe64e28961 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py @@ -7,44 +7,33 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import ( - common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, -) -from opentelemetry.proto.resource.v1 import ( - resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, -) +from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x83\x03\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0c\x12\x12\n\nevent_name\x18\x0c \x01(\tJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "opentelemetry.proto.logs.v1.logs_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.logs.v1.logs_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1" - ) - _globals["_SEVERITYNUMBER"]._serialized_start = 941 - _globals["_SEVERITYNUMBER"]._serialized_end = 1648 - _globals["_LOGRECORDFLAGS"]._serialized_start = 1650 - _globals["_LOGRECORDFLAGS"]._serialized_end = 1739 - _globals["_LOGSDATA"]._serialized_start = 163 - _globals["_LOGSDATA"]._serialized_end = 239 - _globals["_RESOURCELOGS"]._serialized_start = 242 - _globals["_RESOURCELOGS"]._serialized_end = 405 - _globals["_SCOPELOGS"]._serialized_start = 408 - _globals["_SCOPELOGS"]._serialized_end = 568 - _globals["_LOGRECORD"]._serialized_start = 571 - _globals["_LOGRECORD"]._serialized_end = 938 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1' + _globals['_SEVERITYNUMBER']._serialized_start=961 + _globals['_SEVERITYNUMBER']._serialized_end=1668 + _globals['_LOGRECORDFLAGS']._serialized_start=1670 + _globals['_LOGRECORDFLAGS']._serialized_end=1759 + _globals['_LOGSDATA']._serialized_start=163 + _globals['_LOGSDATA']._serialized_end=239 + _globals['_RESOURCELOGS']._serialized_start=242 + _globals['_RESOURCELOGS']._serialized_end=405 + _globals['_SCOPELOGS']._serialized_start=408 + _globals['_SCOPELOGS']._serialized_end=568 + _globals['_LOGRECORD']._serialized_start=571 + _globals['_LOGRECORD']._serialized_end=958 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi index b0232c2b6b3..0fa9cc363e9 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -38,12 +37,7 @@ class _SeverityNumber: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _SeverityNumberEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - _SeverityNumber.ValueType - ], - builtins.type, -): +class _SeverityNumberEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SeverityNumber.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SEVERITY_NUMBER_UNSPECIFIED: _SeverityNumber.ValueType # 0 """UNSPECIFIED is the default SeverityNumber, it MUST NOT be used.""" @@ -72,9 +66,7 @@ class _SeverityNumberEnumTypeWrapper( SEVERITY_NUMBER_FATAL3: _SeverityNumber.ValueType # 23 SEVERITY_NUMBER_FATAL4: _SeverityNumber.ValueType # 24 -class SeverityNumber( - _SeverityNumber, metaclass=_SeverityNumberEnumTypeWrapper -): +class SeverityNumber(_SeverityNumber, metaclass=_SeverityNumberEnumTypeWrapper): """Possible values for LogRecord.SeverityNumber.""" SEVERITY_NUMBER_UNSPECIFIED: SeverityNumber.ValueType # 0 @@ -109,12 +101,7 @@ class _LogRecordFlags: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _LogRecordFlagsEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - _LogRecordFlags.ValueType - ], - builtins.type, -): +class _LogRecordFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogRecordFlags.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LOG_RECORD_FLAGS_DO_NOT_USE: _LogRecordFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. @@ -123,9 +110,7 @@ class _LogRecordFlagsEnumTypeWrapper( LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: _LogRecordFlags.ValueType # 255 """Bits 0-7 are used for trace flags.""" -class LogRecordFlags( - _LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper -): +class LogRecordFlags(_LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper): """LogRecordFlags represents constants used to interpret the LogRecord.flags field, which is protobuf 'fixed32' type and is to be used as bit-fields. Each non-zero value defined in this enum is @@ -161,31 +146,19 @@ class LogsData(google.protobuf.message.Message): RESOURCE_LOGS_FIELD_NUMBER: builtins.int @property - def resource_logs( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ResourceLogs - ]: + def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceLogs]: """An array of ResourceLogs. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_logs: ( - collections.abc.Iterable[global___ResourceLogs] | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_logs", b"resource_logs" - ], + resource_logs: collections.abc.Iterable[global___ResourceLogs] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_logs", b"resource_logs"]) -> None: ... global___LogsData = LogsData @@ -199,23 +172,17 @@ class ResourceLogs(google.protobuf.message.Message): SCOPE_LOGS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource( - self, - ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the logs in this message. If this field is not set then resource info is unknown. """ - @property - def scope_logs( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ScopeLogs - ]: + def scope_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeLogs]: """A list of ScopeLogs that originate from a resource.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_logs" field which have their own schema_url field. @@ -223,26 +190,12 @@ class ResourceLogs(google.protobuf.message.Message): def __init__( self, *, - resource: ( - opentelemetry.proto.resource.v1.resource_pb2.Resource | None - ) = ..., + resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ..., scope_logs: collections.abc.Iterable[global___ScopeLogs] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["resource", b"resource"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource", - b"resource", - "schema_url", - b"schema_url", - "scope_logs", - b"scope_logs", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_logs", b"scope_logs"]) -> None: ... global___ResourceLogs = ResourceLogs @@ -256,51 +209,30 @@ class ScopeLogs(google.protobuf.message.Message): LOG_RECORDS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope( - self, - ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the logs in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - @property - def log_records( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___LogRecord - ]: + def log_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogRecord]: """A list of log records.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the log data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all logs in the "logs" field. """ def __init__( self, *, - scope: ( - opentelemetry.proto.common.v1.common_pb2.InstrumentationScope - | None - ) = ..., + scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ..., log_records: collections.abc.Iterable[global___LogRecord] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["scope", b"scope"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "log_records", - b"log_records", - "schema_url", - b"schema_url", - "scope", - b"scope", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["log_records", b"log_records", "schema_url", b"schema_url", "scope", b"scope"]) -> None: ... global___ScopeLogs = ScopeLogs @@ -322,6 +254,7 @@ class LogRecord(google.protobuf.message.Message): FLAGS_FIELD_NUMBER: builtins.int TRACE_ID_FIELD_NUMBER: builtins.int SPAN_ID_FIELD_NUMBER: builtins.int + EVENT_NAME_FIELD_NUMBER: builtins.int time_unix_nano: builtins.int """time_unix_nano is the time when the event occurred. Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970. @@ -358,13 +291,8 @@ class LogRecord(google.protobuf.message.Message): string message (including multi-line) describing the event in a free form or it can be a structured data composed of arrays and maps of other values. [Optional]. """ - @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """Additional attributes that describe the specific event occurrence. [Optional]. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). @@ -404,6 +332,18 @@ class LogRecord(google.protobuf.message.Message): - the field is not present, - the field contains an invalid value. """ + event_name: builtins.str + """A unique identifier of event category/type. + All events with the same event_name are expected to conform to the same + schema for both their attributes and their body. + + Recommended to be fully qualified and short (no longer than 256 characters). + + Presence of event_name on the log record identifies this record + as an event. + + [Optional]. + """ def __init__( self, *, @@ -412,44 +352,14 @@ class LogRecord(google.protobuf.message.Message): severity_number: global___SeverityNumber.ValueType = ..., severity_text: builtins.str = ..., body: opentelemetry.proto.common.v1.common_pb2.AnyValue | None = ..., - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., flags: builtins.int = ..., trace_id: builtins.bytes = ..., span_id: builtins.bytes = ..., + event_name: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["body", b"body"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "body", - b"body", - "dropped_attributes_count", - b"dropped_attributes_count", - "flags", - b"flags", - "observed_time_unix_nano", - b"observed_time_unix_nano", - "severity_number", - b"severity_number", - "severity_text", - b"severity_text", - "span_id", - b"span_id", - "time_unix_nano", - b"time_unix_nano", - "trace_id", - b"trace_id", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["body", b"body"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "body", b"body", "dropped_attributes_count", b"dropped_attributes_count", "event_name", b"event_name", "flags", b"flags", "observed_time_unix_nano", b"observed_time_unix_nano", "severity_number", b"severity_number", "severity_text", b"severity_text", "span_id", b"span_id", "time_unix_nano", b"time_unix_nano", "trace_id", b"trace_id"]) -> None: ... global___LogRecord = LogRecord diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py index 98ed5c33f59..a337a58476b 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py @@ -7,68 +7,57 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import ( - common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, -) -from opentelemetry.proto.resource.v1 import ( - resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, -) +from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xcd\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x12\x39\n\x08metadata\x18\x0c \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValueB\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xcd\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x12\x39\n\x08metadata\x18\x0c \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValueB\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t\"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08\"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint\"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02\"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02\"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max\"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02\"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "opentelemetry.proto.metrics.v1.metrics_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.metrics.v1.metrics_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1" - ) - _globals["_AGGREGATIONTEMPORALITY"]._serialized_start = 3546 - _globals["_AGGREGATIONTEMPORALITY"]._serialized_end = 3686 - _globals["_DATAPOINTFLAGS"]._serialized_start = 3688 - _globals["_DATAPOINTFLAGS"]._serialized_end = 3782 - _globals["_METRICSDATA"]._serialized_start = 172 - _globals["_METRICSDATA"]._serialized_end = 260 - _globals["_RESOURCEMETRICS"]._serialized_start = 263 - _globals["_RESOURCEMETRICS"]._serialized_end = 438 - _globals["_SCOPEMETRICS"]._serialized_start = 441 - _globals["_SCOPEMETRICS"]._serialized_end = 600 - _globals["_METRIC"]._serialized_start = 603 - _globals["_METRIC"]._serialized_end = 1064 - _globals["_GAUGE"]._serialized_start = 1066 - _globals["_GAUGE"]._serialized_end = 1143 - _globals["_SUM"]._serialized_start = 1146 - _globals["_SUM"]._serialized_end = 1332 - _globals["_HISTOGRAM"]._serialized_start = 1335 - _globals["_HISTOGRAM"]._serialized_end = 1508 - _globals["_EXPONENTIALHISTOGRAM"]._serialized_start = 1511 - _globals["_EXPONENTIALHISTOGRAM"]._serialized_end = 1706 - _globals["_SUMMARY"]._serialized_start = 1708 - _globals["_SUMMARY"]._serialized_end = 1788 - _globals["_NUMBERDATAPOINT"]._serialized_start = 1791 - _globals["_NUMBERDATAPOINT"]._serialized_end = 2053 - _globals["_HISTOGRAMDATAPOINT"]._serialized_start = 2056 - _globals["_HISTOGRAMDATAPOINT"]._serialized_end = 2414 - _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_start = 2417 - _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_end = 3019 - _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_start = 2947 - _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_end = 2995 - _globals["_SUMMARYDATAPOINT"]._serialized_start = 3022 - _globals["_SUMMARYDATAPOINT"]._serialized_end = 3347 - _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_start = 3291 - _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_end = 3341 - _globals["_EXEMPLAR"]._serialized_start = 3350 - _globals["_EXEMPLAR"]._serialized_end = 3543 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1' + _globals['_AGGREGATIONTEMPORALITY']._serialized_start=3546 + _globals['_AGGREGATIONTEMPORALITY']._serialized_end=3686 + _globals['_DATAPOINTFLAGS']._serialized_start=3688 + _globals['_DATAPOINTFLAGS']._serialized_end=3782 + _globals['_METRICSDATA']._serialized_start=172 + _globals['_METRICSDATA']._serialized_end=260 + _globals['_RESOURCEMETRICS']._serialized_start=263 + _globals['_RESOURCEMETRICS']._serialized_end=438 + _globals['_SCOPEMETRICS']._serialized_start=441 + _globals['_SCOPEMETRICS']._serialized_end=600 + _globals['_METRIC']._serialized_start=603 + _globals['_METRIC']._serialized_end=1064 + _globals['_GAUGE']._serialized_start=1066 + _globals['_GAUGE']._serialized_end=1143 + _globals['_SUM']._serialized_start=1146 + _globals['_SUM']._serialized_end=1332 + _globals['_HISTOGRAM']._serialized_start=1335 + _globals['_HISTOGRAM']._serialized_end=1508 + _globals['_EXPONENTIALHISTOGRAM']._serialized_start=1511 + _globals['_EXPONENTIALHISTOGRAM']._serialized_end=1706 + _globals['_SUMMARY']._serialized_start=1708 + _globals['_SUMMARY']._serialized_end=1788 + _globals['_NUMBERDATAPOINT']._serialized_start=1791 + _globals['_NUMBERDATAPOINT']._serialized_end=2053 + _globals['_HISTOGRAMDATAPOINT']._serialized_start=2056 + _globals['_HISTOGRAMDATAPOINT']._serialized_end=2414 + _globals['_EXPONENTIALHISTOGRAMDATAPOINT']._serialized_start=2417 + _globals['_EXPONENTIALHISTOGRAMDATAPOINT']._serialized_end=3019 + _globals['_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS']._serialized_start=2947 + _globals['_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS']._serialized_end=2995 + _globals['_SUMMARYDATAPOINT']._serialized_start=3022 + _globals['_SUMMARYDATAPOINT']._serialized_end=3347 + _globals['_SUMMARYDATAPOINT_VALUEATQUANTILE']._serialized_start=3291 + _globals['_SUMMARYDATAPOINT_VALUEATQUANTILE']._serialized_end=3341 + _globals['_EXEMPLAR']._serialized_start=3350 + _globals['_EXEMPLAR']._serialized_end=3543 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi index b3e3e4d04d9..5b547446933 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -38,12 +37,7 @@ class _AggregationTemporality: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _AggregationTemporalityEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - _AggregationTemporality.ValueType - ], - builtins.type, -): +class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AggregationTemporality.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AGGREGATION_TEMPORALITY_UNSPECIFIED: _AggregationTemporality.ValueType # 0 """UNSPECIFIED is the default AggregationTemporality, it MUST not be used.""" @@ -111,9 +105,7 @@ class _AggregationTemporalityEnumTypeWrapper( value was reset (e.g. Prometheus). """ -class AggregationTemporality( - _AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper -): +class AggregationTemporality(_AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper): """AggregationTemporality defines how a metric aggregator reports aggregated values. It describes how those values relate to the time interval over which they are aggregated. @@ -190,12 +182,7 @@ class _DataPointFlags: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _DataPointFlagsEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - _DataPointFlags.ValueType - ], - builtins.type, -): +class _DataPointFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataPointFlags.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DATA_POINT_FLAGS_DO_NOT_USE: _DataPointFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. @@ -207,9 +194,7 @@ class _DataPointFlagsEnumTypeWrapper( for an equivalent to the Prometheus "staleness marker". """ -class DataPointFlags( - _DataPointFlags, metaclass=_DataPointFlagsEnumTypeWrapper -): +class DataPointFlags(_DataPointFlags, metaclass=_DataPointFlagsEnumTypeWrapper): """DataPointFlags is defined as a protobuf 'uint32' type and is to be used as a bit-field representing 32 distinct boolean flags. Each flag defined in this enum is a bit-mask. To test the presence of a single flag in the flags of @@ -235,6 +220,24 @@ class MetricsData(google.protobuf.message.Message): storage, OR can be embedded by other protocols that transfer OTLP metrics data but do not implement the OTLP protocol. + MetricsData + └─── ResourceMetrics + ├── Resource + ├── SchemaURL + └── ScopeMetrics + ├── Scope + ├── SchemaURL + └── Metric + ├── Name + ├── Description + ├── Unit + └── data + ├── Gauge + ├── Sum + ├── Histogram + ├── ExponentialHistogram + └── Summary + The main difference between this message and collector protocol is that in this message there will not be any "control" or "metadata" specific to OTLP protocol. @@ -247,31 +250,19 @@ class MetricsData(google.protobuf.message.Message): RESOURCE_METRICS_FIELD_NUMBER: builtins.int @property - def resource_metrics( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ResourceMetrics - ]: + def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceMetrics]: """An array of ResourceMetrics. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_metrics: ( - collections.abc.Iterable[global___ResourceMetrics] | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_metrics", b"resource_metrics" - ], + resource_metrics: collections.abc.Iterable[global___ResourceMetrics] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_metrics", b"resource_metrics"]) -> None: ... global___MetricsData = MetricsData @@ -285,23 +276,17 @@ class ResourceMetrics(google.protobuf.message.Message): SCOPE_METRICS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource( - self, - ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the metrics in this message. If this field is not set then no resource info is known. """ - @property - def scope_metrics( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ScopeMetrics - ]: + def scope_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeMetrics]: """A list of metrics that originate from a resource.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_metrics" field which have their own schema_url field. @@ -309,28 +294,12 @@ class ResourceMetrics(google.protobuf.message.Message): def __init__( self, *, - resource: ( - opentelemetry.proto.resource.v1.resource_pb2.Resource | None - ) = ..., - scope_metrics: ( - collections.abc.Iterable[global___ScopeMetrics] | None - ) = ..., + resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ..., + scope_metrics: collections.abc.Iterable[global___ScopeMetrics] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["resource", b"resource"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource", - b"resource", - "schema_url", - b"schema_url", - "scope_metrics", - b"scope_metrics", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_metrics", b"scope_metrics"]) -> None: ... global___ResourceMetrics = ResourceMetrics @@ -344,51 +313,30 @@ class ScopeMetrics(google.protobuf.message.Message): METRICS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope( - self, - ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the metrics in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - @property - def metrics( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Metric - ]: + def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Metric]: """A list of metrics that originate from an instrumentation library.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the metric data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all metrics in the "metrics" field. """ def __init__( self, *, - scope: ( - opentelemetry.proto.common.v1.common_pb2.InstrumentationScope - | None - ) = ..., + scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ..., metrics: collections.abc.Iterable[global___Metric] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["scope", b"scope"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "metrics", - b"metrics", - "schema_url", - b"schema_url", - "scope", - b"scope", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metrics", b"metrics", "schema_url", b"schema_url", "scope", b"scope"]) -> None: ... global___ScopeMetrics = ScopeMetrics @@ -399,7 +347,6 @@ class Metric(google.protobuf.message.Message): https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/data-model.md - The data model and relation between entities is shown in the diagram below. Here, "DataPoint" is the term used to refer to any one of the specific data point value types, and "points" is the term used @@ -411,7 +358,7 @@ class Metric(google.protobuf.message.Message): - DataPoint contains timestamps, attributes, and one of the possible value type fields. - Metric + Metric +------------+ |name | |description | @@ -498,7 +445,7 @@ class Metric(google.protobuf.message.Message): """description of the metric, which can be used in documentation.""" unit: builtins.str """unit in which the metric value is reported. Follows the format - described by http://unitsofmeasure.org/ucum.html. + described by https://unitsofmeasure.org/ucum.html. """ @property def gauge(self) -> global___Gauge: ... @@ -511,11 +458,7 @@ class Metric(google.protobuf.message.Message): @property def summary(self) -> global___Summary: ... @property - def metadata( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """Additional metadata attributes that describe the metric. [Optional]. Attributes are non-identifying. Consumers SHOULD NOT need to be aware of these attributes. @@ -524,7 +467,6 @@ class Metric(google.protobuf.message.Message): Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). """ - def __init__( self, *, @@ -536,63 +478,11 @@ class Metric(google.protobuf.message.Message): histogram: global___Histogram | None = ..., exponential_histogram: global___ExponentialHistogram | None = ..., summary: global___Summary | None = ..., - metadata: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + metadata: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "data", - b"data", - "exponential_histogram", - b"exponential_histogram", - "gauge", - b"gauge", - "histogram", - b"histogram", - "sum", - b"sum", - "summary", - b"summary", - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "data", - b"data", - "description", - b"description", - "exponential_histogram", - b"exponential_histogram", - "gauge", - b"gauge", - "histogram", - b"histogram", - "metadata", - b"metadata", - "name", - b"name", - "sum", - b"sum", - "summary", - b"summary", - "unit", - b"unit", - ], - ) -> None: ... - def WhichOneof( - self, oneof_group: typing_extensions.Literal["data", b"data"] - ) -> ( - typing_extensions.Literal[ - "gauge", "sum", "histogram", "exponential_histogram", "summary" - ] - | None - ): ... + def HasField(self, field_name: typing_extensions.Literal["data", b"data", "exponential_histogram", b"exponential_histogram", "gauge", b"gauge", "histogram", b"histogram", "sum", b"sum", "summary", b"summary"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "description", b"description", "exponential_histogram", b"exponential_histogram", "gauge", b"gauge", "histogram", b"histogram", "metadata", b"metadata", "name", b"name", "sum", b"sum", "summary", b"summary", "unit", b"unit"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["data", b"data"]) -> typing_extensions.Literal["gauge", "sum", "histogram", "exponential_histogram", "summary"] | None: ... global___Metric = Metric @@ -613,22 +503,13 @@ class Gauge(google.protobuf.message.Message): DATA_POINTS_FIELD_NUMBER: builtins.int @property - def data_points( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___NumberDataPoint - ]: ... + def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ... def __init__( self, *, - data_points: ( - collections.abc.Iterable[global___NumberDataPoint] | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal["data_points", b"data_points"], + data_points: collections.abc.Iterable[global___NumberDataPoint] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_points", b"data_points"]) -> None: ... global___Gauge = Gauge @@ -644,11 +525,7 @@ class Sum(google.protobuf.message.Message): AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int IS_MONOTONIC_FIELD_NUMBER: builtins.int @property - def data_points( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___NumberDataPoint - ]: ... + def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ... aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. @@ -658,23 +535,11 @@ class Sum(google.protobuf.message.Message): def __init__( self, *, - data_points: ( - collections.abc.Iterable[global___NumberDataPoint] | None - ) = ..., + data_points: collections.abc.Iterable[global___NumberDataPoint] | None = ..., aggregation_temporality: global___AggregationTemporality.ValueType = ..., is_monotonic: builtins.bool = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "aggregation_temporality", - b"aggregation_temporality", - "data_points", - b"data_points", - "is_monotonic", - b"is_monotonic", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality", b"aggregation_temporality", "data_points", b"data_points", "is_monotonic", b"is_monotonic"]) -> None: ... global___Sum = Sum @@ -689,11 +554,7 @@ class Histogram(google.protobuf.message.Message): DATA_POINTS_FIELD_NUMBER: builtins.int AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int @property - def data_points( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___HistogramDataPoint - ]: ... + def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HistogramDataPoint]: ... aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. @@ -701,20 +562,10 @@ class Histogram(google.protobuf.message.Message): def __init__( self, *, - data_points: ( - collections.abc.Iterable[global___HistogramDataPoint] | None - ) = ..., + data_points: collections.abc.Iterable[global___HistogramDataPoint] | None = ..., aggregation_temporality: global___AggregationTemporality.ValueType = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "aggregation_temporality", - b"aggregation_temporality", - "data_points", - b"data_points", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality", b"aggregation_temporality", "data_points", b"data_points"]) -> None: ... global___Histogram = Histogram @@ -729,11 +580,7 @@ class ExponentialHistogram(google.protobuf.message.Message): DATA_POINTS_FIELD_NUMBER: builtins.int AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int @property - def data_points( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ExponentialHistogramDataPoint - ]: ... + def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ExponentialHistogramDataPoint]: ... aggregation_temporality: global___AggregationTemporality.ValueType """aggregation_temporality describes if the aggregator reports delta changes since last report time, or cumulative changes since a fixed start time. @@ -741,21 +588,10 @@ class ExponentialHistogram(google.protobuf.message.Message): def __init__( self, *, - data_points: ( - collections.abc.Iterable[global___ExponentialHistogramDataPoint] - | None - ) = ..., + data_points: collections.abc.Iterable[global___ExponentialHistogramDataPoint] | None = ..., aggregation_temporality: global___AggregationTemporality.ValueType = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "aggregation_temporality", - b"aggregation_temporality", - "data_points", - b"data_points", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality", b"aggregation_temporality", "data_points", b"data_points"]) -> None: ... global___ExponentialHistogram = ExponentialHistogram @@ -763,32 +599,26 @@ global___ExponentialHistogram = ExponentialHistogram class Summary(google.protobuf.message.Message): """Summary metric data are used to convey quantile summaries, a Prometheus (see: https://prometheus.io/docs/concepts/metric_types/#summary) - and OpenMetrics (see: https://github.com/OpenObservability/OpenMetrics/blob/4dbf6075567ab43296eed941037c12951faafb92/protos/prometheus.proto#L45) + and OpenMetrics (see: https://github.com/prometheus/OpenMetrics/blob/4dbf6075567ab43296eed941037c12951faafb92/protos/prometheus.proto#L45) data type. These data points cannot always be merged in a meaningful way. While they can be useful in some applications, histogram data points are recommended for new applications. + Summary metrics do not have an aggregation temporality field. This is + because the count and sum fields of a SummaryDataPoint are assumed to be + cumulative values. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor DATA_POINTS_FIELD_NUMBER: builtins.int @property - def data_points( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___SummaryDataPoint - ]: ... + def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint]: ... def __init__( self, *, - data_points: ( - collections.abc.Iterable[global___SummaryDataPoint] | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal["data_points", b"data_points"], + data_points: collections.abc.Iterable[global___SummaryDataPoint] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_points", b"data_points"]) -> None: ... global___Summary = Summary @@ -808,11 +638,7 @@ class NumberDataPoint(google.protobuf.message.Message): EXEMPLARS_FIELD_NUMBER: builtins.int FLAGS_FIELD_NUMBER: builtins.int @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one @@ -834,11 +660,7 @@ class NumberDataPoint(google.protobuf.message.Message): as_double: builtins.float as_int: builtins.int @property - def exemplars( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Exemplar - ]: + def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ @@ -849,12 +671,7 @@ class NumberDataPoint(google.protobuf.message.Message): def __init__( self, *, - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., start_time_unix_nano: builtins.int = ..., time_unix_nano: builtins.int = ..., as_double: builtins.float = ..., @@ -862,36 +679,9 @@ class NumberDataPoint(google.protobuf.message.Message): exemplars: collections.abc.Iterable[global___Exemplar] | None = ..., flags: builtins.int = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "as_double", b"as_double", "as_int", b"as_int", "value", b"value" - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "as_double", - b"as_double", - "as_int", - b"as_int", - "attributes", - b"attributes", - "exemplars", - b"exemplars", - "flags", - b"flags", - "start_time_unix_nano", - b"start_time_unix_nano", - "time_unix_nano", - b"time_unix_nano", - "value", - b"value", - ], - ) -> None: ... - def WhichOneof( - self, oneof_group: typing_extensions.Literal["value", b"value"] - ) -> typing_extensions.Literal["as_double", "as_int"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["as_double", b"as_double", "as_int", b"as_int", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["as_double", b"as_double", "as_int", b"as_int", "attributes", b"attributes", "exemplars", b"exemplars", "flags", b"flags", "start_time_unix_nano", b"start_time_unix_nano", "time_unix_nano", b"time_unix_nano", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["as_double", "as_int"] | None: ... global___NumberDataPoint = NumberDataPoint @@ -923,11 +713,7 @@ class HistogramDataPoint(google.protobuf.message.Message): MIN_FIELD_NUMBER: builtins.int MAX_FIELD_NUMBER: builtins.int @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one @@ -962,26 +748,19 @@ class HistogramDataPoint(google.protobuf.message.Message): see: https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#histogram """ @property - def bucket_counts( - self, - ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ - builtins.int - ]: + def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """bucket_counts is an optional field contains the count values of histogram for each bucket. The sum of the bucket_counts must equal the value in the count field. The number of elements in bucket_counts array must be by one greater than - the number of elements in explicit_bounds array. + the number of elements in explicit_bounds array. The exception to this rule + is when the length of bucket_counts is 0, then the length of explicit_bounds + must also be 0. """ - @property - def explicit_bounds( - self, - ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ - builtins.float - ]: + def explicit_bounds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: """explicit_bounds specifies buckets with explicitly defined bounds for values. The boundaries for bucket at index i are: @@ -995,14 +774,12 @@ class HistogramDataPoint(google.protobuf.message.Message): Histogram buckets are inclusive of their upper boundary, except the last bucket where the boundary is at infinity. This format is intentionally compatible with the OpenMetrics histogram definition. - """ + If bucket_counts length is 0 then explicit_bounds length must also be 0, + otherwise the data point is invalid. + """ @property - def exemplars( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Exemplar - ]: + def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ @@ -1017,12 +794,7 @@ class HistogramDataPoint(google.protobuf.message.Message): def __init__( self, *, - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., start_time_unix_nano: builtins.int = ..., time_unix_nano: builtins.int = ..., count: builtins.int = ..., @@ -1034,68 +806,14 @@ class HistogramDataPoint(google.protobuf.message.Message): min: builtins.float | None = ..., max: builtins.float | None = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "_max", - b"_max", - "_min", - b"_min", - "_sum", - b"_sum", - "max", - b"max", - "min", - b"min", - "sum", - b"sum", - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "_max", - b"_max", - "_min", - b"_min", - "_sum", - b"_sum", - "attributes", - b"attributes", - "bucket_counts", - b"bucket_counts", - "count", - b"count", - "exemplars", - b"exemplars", - "explicit_bounds", - b"explicit_bounds", - "flags", - b"flags", - "max", - b"max", - "min", - b"min", - "start_time_unix_nano", - b"start_time_unix_nano", - "sum", - b"sum", - "time_unix_nano", - b"time_unix_nano", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_sum", b"_sum", "max", b"max", "min", b"min", "sum", b"sum"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_sum", b"_sum", "attributes", b"attributes", "bucket_counts", b"bucket_counts", "count", b"count", "exemplars", b"exemplars", "explicit_bounds", b"explicit_bounds", "flags", b"flags", "max", b"max", "min", b"min", "start_time_unix_nano", b"start_time_unix_nano", "sum", b"sum", "time_unix_nano", b"time_unix_nano"]) -> None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_max", b"_max"] - ) -> typing_extensions.Literal["max"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_max", b"_max"]) -> typing_extensions.Literal["max"] | None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_min", b"_min"] - ) -> typing_extensions.Literal["min"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_min", b"_min"]) -> typing_extensions.Literal["min"] | None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_sum", b"_sum"] - ) -> typing_extensions.Literal["sum"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_sum", b"_sum"]) -> typing_extensions.Literal["sum"] | None: ... global___HistogramDataPoint = HistogramDataPoint @@ -1125,11 +843,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): Note: This uses a varint encoding as a simple form of compression. """ @property - def bucket_counts( - self, - ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ - builtins.int - ]: + def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """bucket_counts is an array of count values, where bucket_counts[i] carries the count of the bucket at index (offset+i). bucket_counts[i] is the count of values greater than base^(offset+i) and less than or equal to @@ -1140,19 +854,13 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): especially zeros, so uint64 has been selected to ensure varint encoding. """ - def __init__( self, *, offset: builtins.int = ..., bucket_counts: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "bucket_counts", b"bucket_counts", "offset", b"offset" - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bucket_counts", b"bucket_counts", "offset", b"offset"]) -> None: ... ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -1169,11 +877,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): MAX_FIELD_NUMBER: builtins.int ZERO_THRESHOLD_FIELD_NUMBER: builtins.int @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one @@ -1237,7 +941,6 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): @property def positive(self) -> global___ExponentialHistogramDataPoint.Buckets: """positive carries the positive range of exponential bucket counts.""" - @property def negative(self) -> global___ExponentialHistogramDataPoint.Buckets: """negative carries the negative range of exponential bucket counts.""" @@ -1246,11 +949,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): for the available flags and their meaning. """ @property - def exemplars( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Exemplar - ]: + def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: """(Optional) List of exemplars collected from measurements that were used to form the data point """ @@ -1269,12 +968,7 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): def __init__( self, *, - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., start_time_unix_nano: builtins.int = ..., time_unix_nano: builtins.int = ..., count: builtins.int = ..., @@ -1289,85 +983,22 @@ class ExponentialHistogramDataPoint(google.protobuf.message.Message): max: builtins.float | None = ..., zero_threshold: builtins.float = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "_max", - b"_max", - "_min", - b"_min", - "_sum", - b"_sum", - "max", - b"max", - "min", - b"min", - "negative", - b"negative", - "positive", - b"positive", - "sum", - b"sum", - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "_max", - b"_max", - "_min", - b"_min", - "_sum", - b"_sum", - "attributes", - b"attributes", - "count", - b"count", - "exemplars", - b"exemplars", - "flags", - b"flags", - "max", - b"max", - "min", - b"min", - "negative", - b"negative", - "positive", - b"positive", - "scale", - b"scale", - "start_time_unix_nano", - b"start_time_unix_nano", - "sum", - b"sum", - "time_unix_nano", - b"time_unix_nano", - "zero_count", - b"zero_count", - "zero_threshold", - b"zero_threshold", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_sum", b"_sum", "max", b"max", "min", b"min", "negative", b"negative", "positive", b"positive", "sum", b"sum"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_sum", b"_sum", "attributes", b"attributes", "count", b"count", "exemplars", b"exemplars", "flags", b"flags", "max", b"max", "min", b"min", "negative", b"negative", "positive", b"positive", "scale", b"scale", "start_time_unix_nano", b"start_time_unix_nano", "sum", b"sum", "time_unix_nano", b"time_unix_nano", "zero_count", b"zero_count", "zero_threshold", b"zero_threshold"]) -> None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_max", b"_max"] - ) -> typing_extensions.Literal["max"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_max", b"_max"]) -> typing_extensions.Literal["max"] | None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_min", b"_min"] - ) -> typing_extensions.Literal["min"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_min", b"_min"]) -> typing_extensions.Literal["min"] | None: ... @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_sum", b"_sum"] - ) -> typing_extensions.Literal["sum"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_sum", b"_sum"]) -> typing_extensions.Literal["sum"] | None: ... global___ExponentialHistogramDataPoint = ExponentialHistogramDataPoint @typing_extensions.final class SummaryDataPoint(google.protobuf.message.Message): """SummaryDataPoint is a single data point in a timeseries that describes the - time-varying values of a Summary metric. + time-varying values of a Summary metric. The count and sum fields represent + cumulative values. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1403,12 +1034,7 @@ class SummaryDataPoint(google.protobuf.message.Message): quantile: builtins.float = ..., value: builtins.float = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "quantile", b"quantile", "value", b"value" - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["quantile", b"quantile", "value", b"value"]) -> None: ... ATTRIBUTES_FIELD_NUMBER: builtins.int START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int @@ -1418,11 +1044,7 @@ class SummaryDataPoint(google.protobuf.message.Message): QUANTILE_VALUES_FIELD_NUMBER: builtins.int FLAGS_FIELD_NUMBER: builtins.int @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that uniquely identify the timeseries from where this point belongs. The list may be empty (may contain 0 elements). Attribute keys MUST be unique (it is not allowed to have more than one @@ -1454,11 +1076,7 @@ class SummaryDataPoint(google.protobuf.message.Message): see: https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#summary """ @property - def quantile_values( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___SummaryDataPoint.ValueAtQuantile - ]: + def quantile_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint.ValueAtQuantile]: """(Optional) list of values at different quantiles of the distribution calculated from the current snapshot. The quantiles must be strictly increasing. """ @@ -1469,41 +1087,15 @@ class SummaryDataPoint(google.protobuf.message.Message): def __init__( self, *, - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., start_time_unix_nano: builtins.int = ..., time_unix_nano: builtins.int = ..., count: builtins.int = ..., sum: builtins.float = ..., - quantile_values: ( - collections.abc.Iterable[global___SummaryDataPoint.ValueAtQuantile] - | None - ) = ..., + quantile_values: collections.abc.Iterable[global___SummaryDataPoint.ValueAtQuantile] | None = ..., flags: builtins.int = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "count", - b"count", - "flags", - b"flags", - "quantile_values", - b"quantile_values", - "start_time_unix_nano", - b"start_time_unix_nano", - "sum", - b"sum", - "time_unix_nano", - b"time_unix_nano", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "count", b"count", "flags", b"flags", "quantile_values", b"quantile_values", "start_time_unix_nano", b"start_time_unix_nano", "sum", b"sum", "time_unix_nano", b"time_unix_nano"]) -> None: ... global___SummaryDataPoint = SummaryDataPoint @@ -1524,11 +1116,7 @@ class Exemplar(google.protobuf.message.Message): SPAN_ID_FIELD_NUMBER: builtins.int TRACE_ID_FIELD_NUMBER: builtins.int @property - def filtered_attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def filtered_attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """The set of key/value pairs that were filtered out by the aggregator, but recorded alongside the original measurement. Only key/value pairs that were filtered out by the aggregator should be included @@ -1554,45 +1142,15 @@ class Exemplar(google.protobuf.message.Message): def __init__( self, *, - filtered_attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + filtered_attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., time_unix_nano: builtins.int = ..., as_double: builtins.float = ..., as_int: builtins.int = ..., span_id: builtins.bytes = ..., trace_id: builtins.bytes = ..., ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "as_double", b"as_double", "as_int", b"as_int", "value", b"value" - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "as_double", - b"as_double", - "as_int", - b"as_int", - "filtered_attributes", - b"filtered_attributes", - "span_id", - b"span_id", - "time_unix_nano", - b"time_unix_nano", - "trace_id", - b"trace_id", - "value", - b"value", - ], - ) -> None: ... - def WhichOneof( - self, oneof_group: typing_extensions.Literal["value", b"value"] - ) -> typing_extensions.Literal["as_double", "as_int"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["as_double", b"as_double", "as_int", b"as_int", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["as_double", b"as_double", "as_int", b"as_int", "filtered_attributes", b"filtered_attributes", "span_id", b"span_id", "time_unix_nano", b"time_unix_nano", "trace_id", b"trace_id", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["as_double", "as_int"] | None: ... global___Exemplar = Exemplar diff --git a/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.py new file mode 100644 index 00000000000..70e6b239a1f --- /dev/null +++ b/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/profiles/v1development/profiles.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n9opentelemetry/proto/profiles/v1development/profiles.proto\x12*opentelemetry.proto.profiles.v1development\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"\xee\x03\n\x12ProfilesDictionary\x12J\n\rmapping_table\x18\x01 \x03(\x0b\x32\x33.opentelemetry.proto.profiles.v1development.Mapping\x12L\n\x0elocation_table\x18\x02 \x03(\x0b\x32\x34.opentelemetry.proto.profiles.v1development.Location\x12L\n\x0e\x66unction_table\x18\x03 \x03(\x0b\x32\x34.opentelemetry.proto.profiles.v1development.Function\x12\x44\n\nlink_table\x18\x04 \x03(\x0b\x32\x30.opentelemetry.proto.profiles.v1development.Link\x12\x14\n\x0cstring_table\x18\x05 \x03(\t\x12@\n\x0f\x61ttribute_table\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12R\n\x0f\x61ttribute_units\x18\x07 \x03(\x0b\x32\x39.opentelemetry.proto.profiles.v1development.AttributeUnit\"\xbb\x01\n\x0cProfilesData\x12W\n\x11resource_profiles\x18\x01 \x03(\x0b\x32<.opentelemetry.proto.profiles.v1development.ResourceProfiles\x12R\n\ndictionary\x18\x02 \x01(\x0b\x32>.opentelemetry.proto.profiles.v1development.ProfilesDictionary\"\xbe\x01\n\x10ResourceProfiles\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12Q\n\x0escope_profiles\x18\x02 \x03(\x0b\x32\x39.opentelemetry.proto.profiles.v1development.ScopeProfiles\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xae\x01\n\rScopeProfiles\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x45\n\x08profiles\x18\x02 \x03(\x0b\x32\x33.opentelemetry.proto.profiles.v1development.Profile\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x86\x04\n\x07Profile\x12J\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x35.opentelemetry.proto.profiles.v1development.ValueType\x12\x42\n\x06sample\x18\x02 \x03(\x0b\x32\x32.opentelemetry.proto.profiles.v1development.Sample\x12\x18\n\x10location_indices\x18\x03 \x03(\x05\x12\x12\n\ntime_nanos\x18\x04 \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\x05 \x01(\x03\x12J\n\x0bperiod_type\x18\x06 \x01(\x0b\x32\x35.opentelemetry.proto.profiles.v1development.ValueType\x12\x0e\n\x06period\x18\x07 \x01(\x03\x12\x1a\n\x12\x63omment_strindices\x18\x08 \x03(\x05\x12!\n\x19\x64\x65\x66\x61ult_sample_type_index\x18\t \x01(\x05\x12\x12\n\nprofile_id\x18\n \x01(\x0c\x12 \n\x18\x64ropped_attributes_count\x18\x0b \x01(\r\x12\x1f\n\x17original_payload_format\x18\x0c \x01(\t\x12\x18\n\x10original_payload\x18\r \x01(\x0c\x12\x19\n\x11\x61ttribute_indices\x18\x0e \x03(\x05\"F\n\rAttributeUnit\x12\x1e\n\x16\x61ttribute_key_strindex\x18\x01 \x01(\x05\x12\x15\n\runit_strindex\x18\x02 \x01(\x05\")\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\"\x9e\x01\n\tValueType\x12\x15\n\rtype_strindex\x18\x01 \x01(\x05\x12\x15\n\runit_strindex\x18\x02 \x01(\x05\x12\x63\n\x17\x61ggregation_temporality\x18\x03 \x01(\x0e\x32\x42.opentelemetry.proto.profiles.v1development.AggregationTemporality\"\xb1\x01\n\x06Sample\x12\x1d\n\x15locations_start_index\x18\x01 \x01(\x05\x12\x18\n\x10locations_length\x18\x02 \x01(\x05\x12\r\n\x05value\x18\x03 \x03(\x03\x12\x19\n\x11\x61ttribute_indices\x18\x04 \x03(\x05\x12\x17\n\nlink_index\x18\x05 \x01(\x05H\x00\x88\x01\x01\x12\x1c\n\x14timestamps_unix_nano\x18\x06 \x03(\x04\x42\r\n\x0b_link_index\"\xe3\x01\n\x07Mapping\x12\x14\n\x0cmemory_start\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x02 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x03 \x01(\x04\x12\x19\n\x11\x66ilename_strindex\x18\x04 \x01(\x05\x12\x19\n\x11\x61ttribute_indices\x18\x05 \x03(\x05\x12\x15\n\rhas_functions\x18\x06 \x01(\x08\x12\x15\n\rhas_filenames\x18\x07 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\x08 \x01(\x08\x12\x19\n\x11has_inline_frames\x18\t \x01(\x08\"\xb7\x01\n\x08Location\x12\x1a\n\rmapping_index\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\x04\x12>\n\x04line\x18\x03 \x03(\x0b\x32\x30.opentelemetry.proto.profiles.v1development.Line\x12\x11\n\tis_folded\x18\x04 \x01(\x08\x12\x19\n\x11\x61ttribute_indices\x18\x05 \x03(\x05\x42\x10\n\x0e_mapping_index\"<\n\x04Line\x12\x16\n\x0e\x66unction_index\x18\x01 \x01(\x05\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x0e\n\x06\x63olumn\x18\x03 \x01(\x03\"n\n\x08\x46unction\x12\x15\n\rname_strindex\x18\x01 \x01(\x05\x12\x1c\n\x14system_name_strindex\x18\x02 \x01(\x05\x12\x19\n\x11\x66ilename_strindex\x18\x03 \x01(\x05\x12\x12\n\nstart_line\x18\x04 \x01(\x03*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02\x42\xa4\x01\n-io.opentelemetry.proto.profiles.v1developmentB\rProfilesProtoP\x01Z5go.opentelemetry.io/proto/otlp/profiles/v1development\xaa\x02*OpenTelemetry.Proto.Profiles.V1Developmentb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.profiles.v1development.profiles_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n-io.opentelemetry.proto.profiles.v1developmentB\rProfilesProtoP\001Z5go.opentelemetry.io/proto/otlp/profiles/v1development\252\002*OpenTelemetry.Proto.Profiles.V1Development' + _globals['_AGGREGATIONTEMPORALITY']._serialized_start=2822 + _globals['_AGGREGATIONTEMPORALITY']._serialized_end=2962 + _globals['_PROFILESDICTIONARY']._serialized_start=198 + _globals['_PROFILESDICTIONARY']._serialized_end=692 + _globals['_PROFILESDATA']._serialized_start=695 + _globals['_PROFILESDATA']._serialized_end=882 + _globals['_RESOURCEPROFILES']._serialized_start=885 + _globals['_RESOURCEPROFILES']._serialized_end=1075 + _globals['_SCOPEPROFILES']._serialized_start=1078 + _globals['_SCOPEPROFILES']._serialized_end=1252 + _globals['_PROFILE']._serialized_start=1255 + _globals['_PROFILE']._serialized_end=1773 + _globals['_ATTRIBUTEUNIT']._serialized_start=1775 + _globals['_ATTRIBUTEUNIT']._serialized_end=1845 + _globals['_LINK']._serialized_start=1847 + _globals['_LINK']._serialized_end=1888 + _globals['_VALUETYPE']._serialized_start=1891 + _globals['_VALUETYPE']._serialized_end=2049 + _globals['_SAMPLE']._serialized_start=2052 + _globals['_SAMPLE']._serialized_end=2229 + _globals['_MAPPING']._serialized_start=2232 + _globals['_MAPPING']._serialized_end=2459 + _globals['_LOCATION']._serialized_start=2462 + _globals['_LOCATION']._serialized_end=2645 + _globals['_LINE']._serialized_start=2647 + _globals['_LINE']._serialized_end=2707 + _globals['_FUNCTION']._serialized_start=2709 + _globals['_FUNCTION']._serialized_end=2819 +# @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.pyi new file mode 100644 index 00000000000..91cc416c262 --- /dev/null +++ b/opentelemetry-proto/src/opentelemetry/proto/profiles/v1development/profiles_pb2.pyi @@ -0,0 +1,865 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2023, OpenTelemetry Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +This file includes work covered by the following copyright and permission notices: + +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import opentelemetry.proto.common.v1.common_pb2 +import opentelemetry.proto.resource.v1.resource_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _AggregationTemporality: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AggregationTemporality.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + AGGREGATION_TEMPORALITY_UNSPECIFIED: _AggregationTemporality.ValueType # 0 + """UNSPECIFIED is the default AggregationTemporality, it MUST not be used.""" + AGGREGATION_TEMPORALITY_DELTA: _AggregationTemporality.ValueType # 1 + """* DELTA is an AggregationTemporality for a profiler which reports + changes since last report time. Successive metrics contain aggregation of + values from continuous and non-overlapping intervals. + + The values for a DELTA metric are based only on the time interval + associated with one measurement cycle. There is no dependency on + previous measurements like is the case for CUMULATIVE metrics. + + For example, consider a system measuring the number of requests that + it receives and reports the sum of these requests every second as a + DELTA metric: + + 1. The system starts receiving at time=t_0. + 2. A request is received, the system measures 1 request. + 3. A request is received, the system measures 1 request. + 4. A request is received, the system measures 1 request. + 5. The 1 second collection cycle ends. A metric is exported for the + number of requests received over the interval of time t_0 to + t_0+1 with a value of 3. + 6. A request is received, the system measures 1 request. + 7. A request is received, the system measures 1 request. + 8. The 1 second collection cycle ends. A metric is exported for the + number of requests received over the interval of time t_0+1 to + t_0+2 with a value of 2. + """ + AGGREGATION_TEMPORALITY_CUMULATIVE: _AggregationTemporality.ValueType # 2 + """* CUMULATIVE is an AggregationTemporality for a profiler which + reports changes since a fixed start time. This means that current values + of a CUMULATIVE metric depend on all previous measurements since the + start time. Because of this, the sender is required to retain this state + in some form. If this state is lost or invalidated, the CUMULATIVE metric + values MUST be reset and a new fixed start time following the last + reported measurement time sent MUST be used. + + For example, consider a system measuring the number of requests that + it receives and reports the sum of these requests every second as a + CUMULATIVE metric: + + 1. The system starts receiving at time=t_0. + 2. A request is received, the system measures 1 request. + 3. A request is received, the system measures 1 request. + 4. A request is received, the system measures 1 request. + 5. The 1 second collection cycle ends. A metric is exported for the + number of requests received over the interval of time t_0 to + t_0+1 with a value of 3. + 6. A request is received, the system measures 1 request. + 7. A request is received, the system measures 1 request. + 8. The 1 second collection cycle ends. A metric is exported for the + number of requests received over the interval of time t_0 to + t_0+2 with a value of 5. + 9. The system experiences a fault and loses state. + 10. The system recovers and resumes receiving at time=t_1. + 11. A request is received, the system measures 1 request. + 12. The 1 second collection cycle ends. A metric is exported for the + number of requests received over the interval of time t_1 to + t_1+1 with a value of 1. + + Note: Even though, when reporting changes since last report time, using + CUMULATIVE is valid, it is not recommended. + """ + +class AggregationTemporality(_AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper): + """Specifies the method of aggregating metric values, either DELTA (change since last report) + or CUMULATIVE (total since a fixed start time). + """ + +AGGREGATION_TEMPORALITY_UNSPECIFIED: AggregationTemporality.ValueType # 0 +"""UNSPECIFIED is the default AggregationTemporality, it MUST not be used.""" +AGGREGATION_TEMPORALITY_DELTA: AggregationTemporality.ValueType # 1 +"""* DELTA is an AggregationTemporality for a profiler which reports +changes since last report time. Successive metrics contain aggregation of +values from continuous and non-overlapping intervals. + +The values for a DELTA metric are based only on the time interval +associated with one measurement cycle. There is no dependency on +previous measurements like is the case for CUMULATIVE metrics. + +For example, consider a system measuring the number of requests that +it receives and reports the sum of these requests every second as a +DELTA metric: + +1. The system starts receiving at time=t_0. +2. A request is received, the system measures 1 request. +3. A request is received, the system measures 1 request. +4. A request is received, the system measures 1 request. +5. The 1 second collection cycle ends. A metric is exported for the +number of requests received over the interval of time t_0 to +t_0+1 with a value of 3. +6. A request is received, the system measures 1 request. +7. A request is received, the system measures 1 request. +8. The 1 second collection cycle ends. A metric is exported for the +number of requests received over the interval of time t_0+1 to +t_0+2 with a value of 2. +""" +AGGREGATION_TEMPORALITY_CUMULATIVE: AggregationTemporality.ValueType # 2 +"""* CUMULATIVE is an AggregationTemporality for a profiler which +reports changes since a fixed start time. This means that current values +of a CUMULATIVE metric depend on all previous measurements since the +start time. Because of this, the sender is required to retain this state +in some form. If this state is lost or invalidated, the CUMULATIVE metric +values MUST be reset and a new fixed start time following the last +reported measurement time sent MUST be used. + +For example, consider a system measuring the number of requests that +it receives and reports the sum of these requests every second as a +CUMULATIVE metric: + +1. The system starts receiving at time=t_0. +2. A request is received, the system measures 1 request. +3. A request is received, the system measures 1 request. +4. A request is received, the system measures 1 request. +5. The 1 second collection cycle ends. A metric is exported for the +number of requests received over the interval of time t_0 to +t_0+1 with a value of 3. +6. A request is received, the system measures 1 request. +7. A request is received, the system measures 1 request. +8. The 1 second collection cycle ends. A metric is exported for the +number of requests received over the interval of time t_0 to +t_0+2 with a value of 5. +9. The system experiences a fault and loses state. +10. The system recovers and resumes receiving at time=t_1. +11. A request is received, the system measures 1 request. +12. The 1 second collection cycle ends. A metric is exported for the +number of requests received over the interval of time t_1 to +t_1+1 with a value of 1. + +Note: Even though, when reporting changes since last report time, using +CUMULATIVE is valid, it is not recommended. +""" +global___AggregationTemporality = AggregationTemporality + +@typing_extensions.final +class ProfilesDictionary(google.protobuf.message.Message): + """ Relationships Diagram + + ┌──────────────────┐ LEGEND + │ ProfilesData │ ─────┐ + └──────────────────┘ │ ─────▶ embedded + │ │ + │ 1-n │ ─────▷ referenced by index + ▼ ▼ + ┌──────────────────┐ ┌────────────────────┐ + │ ResourceProfiles │ │ ProfilesDictionary │ + └──────────────────┘ └────────────────────┘ + │ + │ 1-n + ▼ + ┌──────────────────┐ + │ ScopeProfiles │ + └──────────────────┘ + │ + │ 1-1 + ▼ + ┌──────────────────┐ + │ Profile │ + └──────────────────┘ + │ n-1 + │ 1-n ┌───────────────────────────────────────┐ + ▼ │ ▽ + ┌──────────────────┐ 1-n ┌──────────────┐ ┌──────────┐ + │ Sample │ ──────▷ │ KeyValue │ │ Link │ + └──────────────────┘ └──────────────┘ └──────────┘ + │ 1-n △ △ + │ 1-n ┌─────────────────┘ │ 1-n + ▽ │ │ + ┌──────────────────┐ n-1 ┌──────────────┐ + │ Location │ ──────▷ │ Mapping │ + └──────────────────┘ └──────────────┘ + │ + │ 1-n + ▼ + ┌──────────────────┐ + │ Line │ + └──────────────────┘ + │ + │ 1-1 + ▽ + ┌──────────────────┐ + │ Function │ + └──────────────────┘ + + ProfilesDictionary represents the profiles data shared across the + entire message being sent. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MAPPING_TABLE_FIELD_NUMBER: builtins.int + LOCATION_TABLE_FIELD_NUMBER: builtins.int + FUNCTION_TABLE_FIELD_NUMBER: builtins.int + LINK_TABLE_FIELD_NUMBER: builtins.int + STRING_TABLE_FIELD_NUMBER: builtins.int + ATTRIBUTE_TABLE_FIELD_NUMBER: builtins.int + ATTRIBUTE_UNITS_FIELD_NUMBER: builtins.int + @property + def mapping_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Mapping]: + """Mappings from address ranges to the image/binary/library mapped + into that address range referenced by locations via Location.mapping_index. + """ + @property + def location_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Location]: + """Locations referenced by samples via Profile.location_indices.""" + @property + def function_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Function]: + """Functions referenced by locations via Line.function_index.""" + @property + def link_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Link]: + """Links referenced by samples via Sample.link_index.""" + @property + def string_table(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """A common table for strings referenced by various messages. + string_table[0] must always be "". + """ + @property + def attribute_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: + """A common table for attributes referenced by various messages.""" + @property + def attribute_units(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AttributeUnit]: + """Represents a mapping between Attribute Keys and Units.""" + def __init__( + self, + *, + mapping_table: collections.abc.Iterable[global___Mapping] | None = ..., + location_table: collections.abc.Iterable[global___Location] | None = ..., + function_table: collections.abc.Iterable[global___Function] | None = ..., + link_table: collections.abc.Iterable[global___Link] | None = ..., + string_table: collections.abc.Iterable[builtins.str] | None = ..., + attribute_table: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., + attribute_units: collections.abc.Iterable[global___AttributeUnit] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attribute_table", b"attribute_table", "attribute_units", b"attribute_units", "function_table", b"function_table", "link_table", b"link_table", "location_table", b"location_table", "mapping_table", b"mapping_table", "string_table", b"string_table"]) -> None: ... + +global___ProfilesDictionary = ProfilesDictionary + +@typing_extensions.final +class ProfilesData(google.protobuf.message.Message): + """ProfilesData represents the profiles data that can be stored in persistent storage, + OR can be embedded by other protocols that transfer OTLP profiles data but do not + implement the OTLP protocol. + + The main difference between this message and collector protocol is that + in this message there will not be any "control" or "metadata" specific to + OTLP protocol. + + When new fields are added into this message, the OTLP request MUST be updated + as well. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESOURCE_PROFILES_FIELD_NUMBER: builtins.int + DICTIONARY_FIELD_NUMBER: builtins.int + @property + def resource_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceProfiles]: + """An array of ResourceProfiles. + For data coming from an SDK profiler, this array will typically contain one + element. Host-level profilers will usually create one ResourceProfile per + container, as well as one additional ResourceProfile grouping all samples + from non-containerized processes. + Other resource groupings are possible as well and clarified via + Resource.attributes and semantic conventions. + """ + @property + def dictionary(self) -> global___ProfilesDictionary: + """One instance of ProfilesDictionary""" + def __init__( + self, + *, + resource_profiles: collections.abc.Iterable[global___ResourceProfiles] | None = ..., + dictionary: global___ProfilesDictionary | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary", "resource_profiles", b"resource_profiles"]) -> None: ... + +global___ProfilesData = ProfilesData + +@typing_extensions.final +class ResourceProfiles(google.protobuf.message.Message): + """A collection of ScopeProfiles from a Resource.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESOURCE_FIELD_NUMBER: builtins.int + SCOPE_PROFILES_FIELD_NUMBER: builtins.int + SCHEMA_URL_FIELD_NUMBER: builtins.int + @property + def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + """The resource for the profiles in this message. + If this field is not set then no resource info is known. + """ + @property + def scope_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeProfiles]: + """A list of ScopeProfiles that originate from a resource.""" + schema_url: builtins.str + """The Schema URL, if known. This is the identifier of the Schema that the resource data + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see + https://opentelemetry.io/docs/specs/otel/schemas/#schema-url + This schema_url applies to the data in the "resource" field. It does not apply + to the data in the "scope_profiles" field which have their own schema_url field. + """ + def __init__( + self, + *, + resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ..., + scope_profiles: collections.abc.Iterable[global___ScopeProfiles] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_profiles", b"scope_profiles"]) -> None: ... + +global___ResourceProfiles = ResourceProfiles + +@typing_extensions.final +class ScopeProfiles(google.protobuf.message.Message): + """A collection of Profiles produced by an InstrumentationScope.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCOPE_FIELD_NUMBER: builtins.int + PROFILES_FIELD_NUMBER: builtins.int + SCHEMA_URL_FIELD_NUMBER: builtins.int + @property + def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + """The instrumentation scope information for the profiles in this message. + Semantically when InstrumentationScope isn't set, it is equivalent with + an empty instrumentation scope name (unknown). + """ + @property + def profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Profile]: + """A list of Profiles that originate from an instrumentation scope.""" + schema_url: builtins.str + """The Schema URL, if known. This is the identifier of the Schema that the profile data + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see + https://opentelemetry.io/docs/specs/otel/schemas/#schema-url + This schema_url applies to all profiles in the "profiles" field. + """ + def __init__( + self, + *, + scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ..., + profiles: collections.abc.Iterable[global___Profile] | None = ..., + schema_url: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["profiles", b"profiles", "schema_url", b"schema_url", "scope", b"scope"]) -> None: ... + +global___ScopeProfiles = ScopeProfiles + +@typing_extensions.final +class Profile(google.protobuf.message.Message): + """Profile is a common stacktrace profile format. + + Measurements represented with this format should follow the + following conventions: + + - Consumers should treat unset optional fields as if they had been + set with their default value. + + - When possible, measurements should be stored in "unsampled" form + that is most useful to humans. There should be enough + information present to determine the original sampled values. + + - On-disk, the serialized proto must be gzip-compressed. + + - The profile is represented as a set of samples, where each sample + references a sequence of locations, and where each location belongs + to a mapping. + - There is a N->1 relationship from sample.location_id entries to + locations. For every sample.location_id entry there must be a + unique Location with that index. + - There is an optional N->1 relationship from locations to + mappings. For every nonzero Location.mapping_id there must be a + unique Mapping with that index. + + Represents a complete profile, including sample types, samples, + mappings to binaries, locations, functions, string table, and additional metadata. + It modifies and annotates pprof Profile with OpenTelemetry specific fields. + + Note that whilst fields in this message retain the name and field id from pprof in most cases + for ease of understanding data migration, it is not intended that pprof:Profile and + OpenTelemetry:Profile encoding be wire compatible. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SAMPLE_TYPE_FIELD_NUMBER: builtins.int + SAMPLE_FIELD_NUMBER: builtins.int + LOCATION_INDICES_FIELD_NUMBER: builtins.int + TIME_NANOS_FIELD_NUMBER: builtins.int + DURATION_NANOS_FIELD_NUMBER: builtins.int + PERIOD_TYPE_FIELD_NUMBER: builtins.int + PERIOD_FIELD_NUMBER: builtins.int + COMMENT_STRINDICES_FIELD_NUMBER: builtins.int + DEFAULT_SAMPLE_TYPE_INDEX_FIELD_NUMBER: builtins.int + PROFILE_ID_FIELD_NUMBER: builtins.int + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int + ORIGINAL_PAYLOAD_FORMAT_FIELD_NUMBER: builtins.int + ORIGINAL_PAYLOAD_FIELD_NUMBER: builtins.int + ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int + @property + def sample_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ValueType]: + """A description of the samples associated with each Sample.value. + For a cpu profile this might be: + [["cpu","nanoseconds"]] or [["wall","seconds"]] or [["syscall","count"]] + For a heap profile, this might be: + [["allocations","count"], ["space","bytes"]], + If one of the values represents the number of events represented + by the sample, by convention it should be at index 0 and use + sample_type.unit == "count". + """ + @property + def sample(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Sample]: + """The set of samples recorded in this profile.""" + @property + def location_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """References to locations in ProfilesDictionary.location_table.""" + time_nanos: builtins.int + """The following fields 4-14 are informational, do not affect + interpretation of results. + + Time of collection (UTC) represented as nanoseconds past the epoch. + """ + duration_nanos: builtins.int + """Duration of the profile, if a duration makes sense.""" + @property + def period_type(self) -> global___ValueType: + """The kind of events between sampled occurrences. + e.g [ "cpu","cycles" ] or [ "heap","bytes" ] + """ + period: builtins.int + """The number of events between sampled occurrences.""" + @property + def comment_strindices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Free-form text associated with the profile. The text is displayed as is + to the user by the tools that read profiles (e.g. by pprof). This field + should not be used to store any machine-readable information, it is only + for human-friendly content. The profile must stay functional if this field + is cleaned. + Indices into ProfilesDictionary.string_table. + """ + default_sample_type_index: builtins.int + """Index into the sample_type array to the default sample type.""" + profile_id: builtins.bytes + """A globally unique identifier for a profile. The ID is a 16-byte array. An ID with + all zeroes is considered invalid. + + This field is required. + """ + dropped_attributes_count: builtins.int + """dropped_attributes_count is the number of attributes that were discarded. Attributes + can be discarded because their keys are too long or because there are too many + attributes. If this value is 0, then no attributes were dropped. + """ + original_payload_format: builtins.str + """Specifies format of the original payload. Common values are defined in semantic conventions. [required if original_payload is present]""" + original_payload: builtins.bytes + """Original payload can be stored in this field. This can be useful for users who want to get the original payload. + Formats such as JFR are highly extensible and can contain more information than what is defined in this spec. + Inclusion of original payload should be configurable by the user. Default behavior should be to not include the original payload. + If the original payload is in pprof format, it SHOULD not be included in this field. + The field is optional, however if it is present then equivalent converted data should be populated in other fields + of this message as far as is practicable. + """ + @property + def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """References to attributes in attribute_table. [optional] + It is a collection of key/value pairs. Note, global attributes + like server name can be set using the resource API. Examples of attributes: + + "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36" + "/http/server_latency": 300 + "abc.com/myattribute": true + "abc.com/score": 10.239 + + The OpenTelemetry API specification further restricts the allowed value types: + https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/README.md#attribute + Attribute keys MUST be unique (it is not allowed to have more than one + attribute with the same key). + """ + def __init__( + self, + *, + sample_type: collections.abc.Iterable[global___ValueType] | None = ..., + sample: collections.abc.Iterable[global___Sample] | None = ..., + location_indices: collections.abc.Iterable[builtins.int] | None = ..., + time_nanos: builtins.int = ..., + duration_nanos: builtins.int = ..., + period_type: global___ValueType | None = ..., + period: builtins.int = ..., + comment_strindices: collections.abc.Iterable[builtins.int] | None = ..., + default_sample_type_index: builtins.int = ..., + profile_id: builtins.bytes = ..., + dropped_attributes_count: builtins.int = ..., + original_payload_format: builtins.str = ..., + original_payload: builtins.bytes = ..., + attribute_indices: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["period_type", b"period_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["attribute_indices", b"attribute_indices", "comment_strindices", b"comment_strindices", "default_sample_type_index", b"default_sample_type_index", "dropped_attributes_count", b"dropped_attributes_count", "duration_nanos", b"duration_nanos", "location_indices", b"location_indices", "original_payload", b"original_payload", "original_payload_format", b"original_payload_format", "period", b"period", "period_type", b"period_type", "profile_id", b"profile_id", "sample", b"sample", "sample_type", b"sample_type", "time_nanos", b"time_nanos"]) -> None: ... + +global___Profile = Profile + +@typing_extensions.final +class AttributeUnit(google.protobuf.message.Message): + """Represents a mapping between Attribute Keys and Units.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ATTRIBUTE_KEY_STRINDEX_FIELD_NUMBER: builtins.int + UNIT_STRINDEX_FIELD_NUMBER: builtins.int + attribute_key_strindex: builtins.int + """Index into string table.""" + unit_strindex: builtins.int + """Index into string table.""" + def __init__( + self, + *, + attribute_key_strindex: builtins.int = ..., + unit_strindex: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attribute_key_strindex", b"attribute_key_strindex", "unit_strindex", b"unit_strindex"]) -> None: ... + +global___AttributeUnit = AttributeUnit + +@typing_extensions.final +class Link(google.protobuf.message.Message): + """A pointer from a profile Sample to a trace Span. + Connects a profile sample to a trace span, identified by unique trace and span IDs. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TRACE_ID_FIELD_NUMBER: builtins.int + SPAN_ID_FIELD_NUMBER: builtins.int + trace_id: builtins.bytes + """A unique identifier of a trace that this linked span is part of. The ID is a + 16-byte array. + """ + span_id: builtins.bytes + """A unique identifier for the linked span. The ID is an 8-byte array.""" + def __init__( + self, + *, + trace_id: builtins.bytes = ..., + span_id: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["span_id", b"span_id", "trace_id", b"trace_id"]) -> None: ... + +global___Link = Link + +@typing_extensions.final +class ValueType(google.protobuf.message.Message): + """ValueType describes the type and units of a value, with an optional aggregation temporality.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_STRINDEX_FIELD_NUMBER: builtins.int + UNIT_STRINDEX_FIELD_NUMBER: builtins.int + AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int + type_strindex: builtins.int + """Index into ProfilesDictionary.string_table.""" + unit_strindex: builtins.int + """Index into ProfilesDictionary.string_table.""" + aggregation_temporality: global___AggregationTemporality.ValueType + def __init__( + self, + *, + type_strindex: builtins.int = ..., + unit_strindex: builtins.int = ..., + aggregation_temporality: global___AggregationTemporality.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality", b"aggregation_temporality", "type_strindex", b"type_strindex", "unit_strindex", b"unit_strindex"]) -> None: ... + +global___ValueType = ValueType + +@typing_extensions.final +class Sample(google.protobuf.message.Message): + """Each Sample records values encountered in some program + context. The program context is typically a stack trace, perhaps + augmented with auxiliary information like the thread-id, some + indicator of a higher level request being handled etc. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LOCATIONS_START_INDEX_FIELD_NUMBER: builtins.int + LOCATIONS_LENGTH_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int + LINK_INDEX_FIELD_NUMBER: builtins.int + TIMESTAMPS_UNIX_NANO_FIELD_NUMBER: builtins.int + locations_start_index: builtins.int + """locations_start_index along with locations_length refers to to a slice of locations in Profile.location_indices.""" + locations_length: builtins.int + """locations_length along with locations_start_index refers to a slice of locations in Profile.location_indices. + Supersedes location_index. + """ + @property + def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """The type and unit of each value is defined by the corresponding + entry in Profile.sample_type. All samples must have the same + number of values, the same as the length of Profile.sample_type. + When aggregating multiple samples into a single sample, the + result has a list of values that is the element-wise sum of the + lists of the originals. + """ + @property + def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """References to attributes in ProfilesDictionary.attribute_table. [optional]""" + link_index: builtins.int + """Reference to link in ProfilesDictionary.link_table. [optional]""" + @property + def timestamps_unix_nano(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Timestamps associated with Sample represented in nanoseconds. These timestamps are expected + to fall within the Profile's time range. [optional] + """ + def __init__( + self, + *, + locations_start_index: builtins.int = ..., + locations_length: builtins.int = ..., + value: collections.abc.Iterable[builtins.int] | None = ..., + attribute_indices: collections.abc.Iterable[builtins.int] | None = ..., + link_index: builtins.int | None = ..., + timestamps_unix_nano: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_link_index", b"_link_index", "link_index", b"link_index"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_link_index", b"_link_index", "attribute_indices", b"attribute_indices", "link_index", b"link_index", "locations_length", b"locations_length", "locations_start_index", b"locations_start_index", "timestamps_unix_nano", b"timestamps_unix_nano", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_link_index", b"_link_index"]) -> typing_extensions.Literal["link_index"] | None: ... + +global___Sample = Sample + +@typing_extensions.final +class Mapping(google.protobuf.message.Message): + """Describes the mapping of a binary in memory, including its address range, + file offset, and metadata like build ID + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MEMORY_START_FIELD_NUMBER: builtins.int + MEMORY_LIMIT_FIELD_NUMBER: builtins.int + FILE_OFFSET_FIELD_NUMBER: builtins.int + FILENAME_STRINDEX_FIELD_NUMBER: builtins.int + ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int + HAS_FUNCTIONS_FIELD_NUMBER: builtins.int + HAS_FILENAMES_FIELD_NUMBER: builtins.int + HAS_LINE_NUMBERS_FIELD_NUMBER: builtins.int + HAS_INLINE_FRAMES_FIELD_NUMBER: builtins.int + memory_start: builtins.int + """Address at which the binary (or DLL) is loaded into memory.""" + memory_limit: builtins.int + """The limit of the address range occupied by this mapping.""" + file_offset: builtins.int + """Offset in the binary that corresponds to the first mapped address.""" + filename_strindex: builtins.int + """The object this entry is loaded from. This can be a filename on + disk for the main binary and shared libraries, or virtual + abstractions like "[vdso]". + Index into ProfilesDictionary.string_table. + """ + @property + def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """References to attributes in ProfilesDictionary.attribute_table. [optional]""" + has_functions: builtins.bool + """The following fields indicate the resolution of symbolic info.""" + has_filenames: builtins.bool + has_line_numbers: builtins.bool + has_inline_frames: builtins.bool + def __init__( + self, + *, + memory_start: builtins.int = ..., + memory_limit: builtins.int = ..., + file_offset: builtins.int = ..., + filename_strindex: builtins.int = ..., + attribute_indices: collections.abc.Iterable[builtins.int] | None = ..., + has_functions: builtins.bool = ..., + has_filenames: builtins.bool = ..., + has_line_numbers: builtins.bool = ..., + has_inline_frames: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attribute_indices", b"attribute_indices", "file_offset", b"file_offset", "filename_strindex", b"filename_strindex", "has_filenames", b"has_filenames", "has_functions", b"has_functions", "has_inline_frames", b"has_inline_frames", "has_line_numbers", b"has_line_numbers", "memory_limit", b"memory_limit", "memory_start", b"memory_start"]) -> None: ... + +global___Mapping = Mapping + +@typing_extensions.final +class Location(google.protobuf.message.Message): + """Describes function and line table debug information.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MAPPING_INDEX_FIELD_NUMBER: builtins.int + ADDRESS_FIELD_NUMBER: builtins.int + LINE_FIELD_NUMBER: builtins.int + IS_FOLDED_FIELD_NUMBER: builtins.int + ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int + mapping_index: builtins.int + """Reference to mapping in ProfilesDictionary.mapping_table. + It can be unset if the mapping is unknown or not applicable for + this profile type. + """ + address: builtins.int + """The instruction address for this location, if available. It + should be within [Mapping.memory_start...Mapping.memory_limit] + for the corresponding mapping. A non-leaf address may be in the + middle of a call instruction. It is up to display tools to find + the beginning of the instruction if necessary. + """ + @property + def line(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Line]: + """Multiple line indicates this location has inlined functions, + where the last entry represents the caller into which the + preceding entries were inlined. + + E.g., if memcpy() is inlined into printf: + line[0].function_name == "memcpy" + line[1].function_name == "printf" + """ + is_folded: builtins.bool + """Provides an indication that multiple symbols map to this location's + address, for example due to identical code folding by the linker. In that + case the line information above represents one of the multiple + symbols. This field must be recomputed when the symbolization state of the + profile changes. + """ + @property + def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """References to attributes in ProfilesDictionary.attribute_table. [optional]""" + def __init__( + self, + *, + mapping_index: builtins.int | None = ..., + address: builtins.int = ..., + line: collections.abc.Iterable[global___Line] | None = ..., + is_folded: builtins.bool = ..., + attribute_indices: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_mapping_index", b"_mapping_index", "mapping_index", b"mapping_index"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_mapping_index", b"_mapping_index", "address", b"address", "attribute_indices", b"attribute_indices", "is_folded", b"is_folded", "line", b"line", "mapping_index", b"mapping_index"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_mapping_index", b"_mapping_index"]) -> typing_extensions.Literal["mapping_index"] | None: ... + +global___Location = Location + +@typing_extensions.final +class Line(google.protobuf.message.Message): + """Details a specific line in a source code, linked to a function.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FUNCTION_INDEX_FIELD_NUMBER: builtins.int + LINE_FIELD_NUMBER: builtins.int + COLUMN_FIELD_NUMBER: builtins.int + function_index: builtins.int + """Reference to function in ProfilesDictionary.function_table.""" + line: builtins.int + """Line number in source code. 0 means unset.""" + column: builtins.int + """Column number in source code. 0 means unset.""" + def __init__( + self, + *, + function_index: builtins.int = ..., + line: builtins.int = ..., + column: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function_index", b"function_index", "line", b"line"]) -> None: ... + +global___Line = Line + +@typing_extensions.final +class Function(google.protobuf.message.Message): + """Describes a function, including its human-readable name, system name, + source file, and starting line number in the source. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_STRINDEX_FIELD_NUMBER: builtins.int + SYSTEM_NAME_STRINDEX_FIELD_NUMBER: builtins.int + FILENAME_STRINDEX_FIELD_NUMBER: builtins.int + START_LINE_FIELD_NUMBER: builtins.int + name_strindex: builtins.int + """Function name. Empty string if not available.""" + system_name_strindex: builtins.int + """Function name, as identified by the system. For instance, + it can be a C++ mangled name. Empty string if not available. + """ + filename_strindex: builtins.int + """Source file containing the function. Empty string if not available.""" + start_line: builtins.int + """Line number in source file. 0 means unset.""" + def __init__( + self, + *, + name_strindex: builtins.int = ..., + system_name_strindex: builtins.int = ..., + filename_strindex: builtins.int = ..., + start_line: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["filename_strindex", b"filename_strindex", "name_strindex", b"name_strindex", "start_line", b"start_line", "system_name_strindex", b"system_name_strindex"]) -> None: ... + +global___Function = Function diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py index ffc7a5d3af1..f7066fcf7ac 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py @@ -7,31 +7,22 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import ( - common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, -) +from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rB\x83\x01\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"\xa8\x01\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\r\x12=\n\x0b\x65ntity_refs\x18\x03 \x03(\x0b\x32(.opentelemetry.proto.common.v1.EntityRefB\x83\x01\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "opentelemetry.proto.resource.v1.resource_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.resource.v1.resource_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b'\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' - ) - _globals["_RESOURCE"]._serialized_start = 127 - _globals["_RESOURCE"]._serialized_end = 232 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' + _globals['_RESOURCE']._serialized_start=128 + _globals['_RESOURCE']._serialized_end=296 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi index 3e1f0805395..b1b0f194981 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -24,7 +23,10 @@ import google.protobuf.message import opentelemetry.proto.common.v1.common_pb2 import sys -import typing as typing_extensions +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -36,12 +38,9 @@ class Resource(google.protobuf.message.Message): ATTRIBUTES_FIELD_NUMBER: builtins.int DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int + ENTITY_REFS_FIELD_NUMBER: builtins.int @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """Set of attributes that describe the resource. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). @@ -50,25 +49,21 @@ class Resource(google.protobuf.message.Message): """dropped_attributes_count is the number of dropped attributes. If the value is 0, then no attributes were dropped. """ + @property + def entity_refs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.EntityRef]: + """Set of entities that participate in this Resource. + + Note: keys in the references MUST exist in attributes of this message. + + Status: [Development] + """ def __init__( self, *, - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., + entity_refs: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.EntityRef] | None = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "dropped_attributes_count", - b"dropped_attributes_count", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "entity_refs", b"entity_refs"]) -> None: ... global___Resource = Resource diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py index f573faa6ff3..61a2d0fadd1 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py @@ -7,52 +7,41 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from opentelemetry.proto.common.v1 import ( - common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, -) -from opentelemetry.proto.resource.v1 import ( - resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, -) +from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "opentelemetry.proto.trace.v1.trace_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.trace.v1.trace_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = ( - b"\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1" - ) - _globals["_SPANFLAGS"]._serialized_start = 1782 - _globals["_SPANFLAGS"]._serialized_end = 1938 - _globals["_TRACESDATA"]._serialized_start = 166 - _globals["_TRACESDATA"]._serialized_end = 247 - _globals["_RESOURCESPANS"]._serialized_start = 250 - _globals["_RESOURCESPANS"]._serialized_end = 417 - _globals["_SCOPESPANS"]._serialized_start = 420 - _globals["_SCOPESPANS"]._serialized_end = 571 - _globals["_SPAN"]._serialized_start = 574 - _globals["_SPAN"]._serialized_end = 1602 - _globals["_SPAN_EVENT"]._serialized_start = 1131 - _globals["_SPAN_EVENT"]._serialized_end = 1271 - _globals["_SPAN_LINK"]._serialized_start = 1274 - _globals["_SPAN_LINK"]._serialized_end = 1446 - _globals["_SPAN_SPANKIND"]._serialized_start = 1449 - _globals["_SPAN_SPANKIND"]._serialized_end = 1602 - _globals["_STATUS"]._serialized_start = 1605 - _globals["_STATUS"]._serialized_end = 1779 - _globals["_STATUS_STATUSCODE"]._serialized_start = 1695 - _globals["_STATUS_STATUSCODE"]._serialized_end = 1773 + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z\'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1' + _globals['_SPANFLAGS']._serialized_start=1782 + _globals['_SPANFLAGS']._serialized_end=1938 + _globals['_TRACESDATA']._serialized_start=166 + _globals['_TRACESDATA']._serialized_end=247 + _globals['_RESOURCESPANS']._serialized_start=250 + _globals['_RESOURCESPANS']._serialized_end=417 + _globals['_SCOPESPANS']._serialized_start=420 + _globals['_SCOPESPANS']._serialized_end=571 + _globals['_SPAN']._serialized_start=574 + _globals['_SPAN']._serialized_end=1602 + _globals['_SPAN_EVENT']._serialized_start=1131 + _globals['_SPAN_EVENT']._serialized_end=1271 + _globals['_SPAN_LINK']._serialized_start=1274 + _globals['_SPAN_LINK']._serialized_end=1446 + _globals['_SPAN_SPANKIND']._serialized_start=1449 + _globals['_SPAN_SPANKIND']._serialized_end=1602 + _globals['_STATUS']._serialized_start=1605 + _globals['_STATUS']._serialized_end=1779 + _globals['_STATUS_STATUSCODE']._serialized_start=1695 + _globals['_STATUS_STATUSCODE']._serialized_end=1773 # @@protoc_insertion_point(module_scope) diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi index d75e7e03181..598c1ee6da4 100644 --- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi +++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi @@ -15,7 +15,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.descriptor @@ -38,12 +37,7 @@ class _SpanFlags: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _SpanFlagsEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - _SpanFlags.ValueType - ], - builtins.type, -): +class _SpanFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SpanFlags.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SPAN_FLAGS_DO_NOT_USE: _SpanFlags.ValueType # 0 """The zero value for the enum. Should not be used for comparisons. @@ -107,31 +101,19 @@ class TracesData(google.protobuf.message.Message): RESOURCE_SPANS_FIELD_NUMBER: builtins.int @property - def resource_spans( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ResourceSpans - ]: + def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceSpans]: """An array of ResourceSpans. For data coming from a single resource this array will typically contain one element. Intermediary nodes that receive data from multiple origins typically batch the data before forwarding further and in that case this array will contain multiple elements. """ - def __init__( self, *, - resource_spans: ( - collections.abc.Iterable[global___ResourceSpans] | None - ) = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource_spans", b"resource_spans" - ], + resource_spans: collections.abc.Iterable[global___ResourceSpans] | None = ..., ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["resource_spans", b"resource_spans"]) -> None: ... global___TracesData = TracesData @@ -145,23 +127,17 @@ class ResourceSpans(google.protobuf.message.Message): SCOPE_SPANS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def resource( - self, - ) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: + def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: """The resource for the spans in this message. If this field is not set then no resource info is known. """ - @property - def scope_spans( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___ScopeSpans - ]: + def scope_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeSpans]: """A list of ScopeSpans that originate from a resource.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the resource data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to the data in the "resource" field. It does not apply to the data in the "scope_spans" field which have their own schema_url field. @@ -169,28 +145,12 @@ class ResourceSpans(google.protobuf.message.Message): def __init__( self, *, - resource: ( - opentelemetry.proto.resource.v1.resource_pb2.Resource | None - ) = ..., - scope_spans: ( - collections.abc.Iterable[global___ScopeSpans] | None - ) = ..., + resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ..., + scope_spans: collections.abc.Iterable[global___ScopeSpans] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["resource", b"resource"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "resource", - b"resource", - "schema_url", - b"schema_url", - "scope_spans", - b"scope_spans", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_spans", b"scope_spans"]) -> None: ... global___ResourceSpans = ResourceSpans @@ -204,46 +164,30 @@ class ScopeSpans(google.protobuf.message.Message): SPANS_FIELD_NUMBER: builtins.int SCHEMA_URL_FIELD_NUMBER: builtins.int @property - def scope( - self, - ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: + def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope: """The instrumentation scope information for the spans in this message. Semantically when InstrumentationScope isn't set, it is equivalent with an empty instrumentation scope name (unknown). """ - @property - def spans( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Span - ]: + def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]: """A list of Spans that originate from an instrumentation scope.""" schema_url: builtins.str """The Schema URL, if known. This is the identifier of the Schema that the span data - is recorded in. To learn more about Schema URL see + is recorded in. Notably, the last part of the URL path is the version number of the + schema: http[s]://server[:port]/path/. To learn more about Schema URL see https://opentelemetry.io/docs/specs/otel/schemas/#schema-url This schema_url applies to all spans and span events in the "spans" field. """ def __init__( self, *, - scope: ( - opentelemetry.proto.common.v1.common_pb2.InstrumentationScope - | None - ) = ..., + scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ..., spans: collections.abc.Iterable[global___Span] | None = ..., schema_url: builtins.str = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["scope", b"scope"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "schema_url", b"schema_url", "scope", b"scope", "spans", b"spans" - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["schema_url", b"schema_url", "scope", b"scope", "spans", b"spans"]) -> None: ... global___ScopeSpans = ScopeSpans @@ -260,12 +204,7 @@ class Span(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SpanKindEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - Span._SpanKind.ValueType - ], - builtins.type, - ): + class _SpanKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Span._SpanKind.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SPAN_KIND_UNSPECIFIED: Span._SpanKind.ValueType # 0 """Unspecified. Do NOT use as default. @@ -343,11 +282,7 @@ class Span(google.protobuf.message.Message): This field is semantically required to be set to non-empty string. """ @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """attributes is a collection of attribute key/value pairs on the event. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). @@ -361,27 +296,10 @@ class Span(google.protobuf.message.Message): *, time_unix_nano: builtins.int = ..., name: builtins.str = ..., - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "dropped_attributes_count", - b"dropped_attributes_count", - "name", - b"name", - "time_unix_nano", - b"time_unix_nano", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "name", b"name", "time_unix_nano", b"time_unix_nano"]) -> None: ... @typing_extensions.final class Link(google.protobuf.message.Message): @@ -408,11 +326,7 @@ class Span(google.protobuf.message.Message): trace_state: builtins.str """The trace_state associated with the link.""" @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """attributes is a collection of attribute key/value pairs on the link. Attribute keys MUST be unique (it is not allowed to have more than one attribute with the same key). @@ -446,32 +360,11 @@ class Span(google.protobuf.message.Message): trace_id: builtins.bytes = ..., span_id: builtins.bytes = ..., trace_state: builtins.str = ..., - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., flags: builtins.int = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "dropped_attributes_count", - b"dropped_attributes_count", - "flags", - b"flags", - "span_id", - b"span_id", - "trace_id", - b"trace_id", - "trace_state", - b"trace_state", - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "flags", b"flags", "span_id", b"span_id", "trace_id", b"trace_id", "trace_state", b"trace_state"]) -> None: ... TRACE_ID_FIELD_NUMBER: builtins.int SPAN_ID_FIELD_NUMBER: builtins.int @@ -572,11 +465,7 @@ class Span(google.protobuf.message.Message): This field is semantically required and it is expected that end_time >= start_time. """ @property - def attributes( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ]: + def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: """attributes is a collection of key/value pairs. Note, global attributes like server name can be set using the resource API. Examples of attributes: @@ -596,22 +485,14 @@ class Span(google.protobuf.message.Message): attributes. If this value is 0, then no attributes were dropped. """ @property - def events( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Span.Event - ]: + def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Event]: """events is a collection of Event items.""" dropped_events_count: builtins.int """dropped_events_count is the number of dropped events. If the value is 0, then no events were dropped. """ @property - def links( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Span.Link - ]: + def links(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Link]: """links is a collection of Links, which are references from this span to a span in the same or different trace. """ @@ -624,7 +505,6 @@ class Span(google.protobuf.message.Message): """An optional final status for this span. Semantically when Status isn't set, it means span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0). """ - def __init__( self, *, @@ -637,12 +517,7 @@ class Span(google.protobuf.message.Message): kind: global___Span.SpanKind.ValueType = ..., start_time_unix_nano: builtins.int = ..., end_time_unix_nano: builtins.int = ..., - attributes: ( - collections.abc.Iterable[ - opentelemetry.proto.common.v1.common_pb2.KeyValue - ] - | None - ) = ..., + attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ..., dropped_attributes_count: builtins.int = ..., events: collections.abc.Iterable[global___Span.Event] | None = ..., dropped_events_count: builtins.int = ..., @@ -650,46 +525,8 @@ class Span(google.protobuf.message.Message): dropped_links_count: builtins.int = ..., status: global___Status | None = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["status", b"status"] - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "attributes", - b"attributes", - "dropped_attributes_count", - b"dropped_attributes_count", - "dropped_events_count", - b"dropped_events_count", - "dropped_links_count", - b"dropped_links_count", - "end_time_unix_nano", - b"end_time_unix_nano", - "events", - b"events", - "flags", - b"flags", - "kind", - b"kind", - "links", - b"links", - "name", - b"name", - "parent_span_id", - b"parent_span_id", - "span_id", - b"span_id", - "start_time_unix_nano", - b"start_time_unix_nano", - "status", - b"status", - "trace_id", - b"trace_id", - "trace_state", - b"trace_state", - ], - ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["status", b"status"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "dropped_events_count", b"dropped_events_count", "dropped_links_count", b"dropped_links_count", "end_time_unix_nano", b"end_time_unix_nano", "events", b"events", "flags", b"flags", "kind", b"kind", "links", b"links", "name", b"name", "parent_span_id", b"parent_span_id", "span_id", b"span_id", "start_time_unix_nano", b"start_time_unix_nano", "status", b"status", "trace_id", b"trace_id", "trace_state", b"trace_state"]) -> None: ... global___Span = Span @@ -705,12 +542,7 @@ class Status(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusCodeEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ - Status._StatusCode.ValueType - ], - builtins.type, - ): + class _StatusCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Status._StatusCode.ValueType], builtins.type): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor STATUS_CODE_UNSET: Status._StatusCode.ValueType # 0 """The default status.""" @@ -747,11 +579,6 @@ class Status(google.protobuf.message.Message): message: builtins.str = ..., code: global___Status.StatusCode.ValueType = ..., ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "code", b"code", "message", b"message" - ], - ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["code", b"code", "message", b"message"]) -> None: ... global___Status = Status diff --git a/scripts/proto_codegen.sh b/scripts/proto_codegen.sh index f445d3f42a0..8597c4b9729 100755 --- a/scripts/proto_codegen.sh +++ b/scripts/proto_codegen.sh @@ -12,7 +12,7 @@ # PROTO_REPO_DIR - the path to an existing checkout of the opentelemetry-proto repo # Pinned commit/branch/tag for the current version used in opentelemetry-proto python package. -PROTO_REPO_BRANCH_OR_COMMIT="v1.2.0" +PROTO_REPO_BRANCH_OR_COMMIT="v1.7.0" set -e