Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

- Update ConsoleLogExporter.export to handle LogRecord's containing bytes type
in the body ([#4614](https://github.com/open-telemetry/opentelemetry-python/pull/4614/)).
- opentelemetry-sdk: Fix invalid `type: ignore` that causes mypy to ignore the whole file
([#4618](https://github.com/open-telemetry/opentelemetry-python/pull/4618))
- Add `span_exporter` property back to `BatchSpanProcessor` class
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import abc
import atexit
import base64
import concurrent.futures
import json
import logging
Expand Down Expand Up @@ -61,6 +62,13 @@
_ENV_VALUE_UNSET = ""


class BytesEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, bytes):
return base64.b64encode(o).decode()
return super().default(o)


class LogDroppedAttributesWarning(UserWarning):
"""Custom warning to indicate dropped log attributes due to limits.

Expand Down Expand Up @@ -248,6 +256,7 @@ def to_json(self, indent: int | None = 4) -> str:
"resource": json.loads(self.resource.to_json()),
},
indent=indent,
cls=BytesEncoder,
)

@property
Expand Down
33 changes: 4 additions & 29 deletions opentelemetry-sdk/tests/logs/test_log_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,34 +28,10 @@

class TestLogRecord(unittest.TestCase):
def test_log_record_to_json(self):
expected = json.dumps(
{
"body": "a log line",
"severity_number": None,
"severity_text": None,
"attributes": {
"mapping": {"key": "value"},
"none": None,
"sequence": [1, 2],
"str": "string",
},
"dropped_attributes": 0,
"timestamp": "1970-01-01T00:00:00.000000Z",
"observed_timestamp": "1970-01-01T00:00:00.000000Z",
"trace_id": "",
"span_id": "",
"trace_flags": None,
"resource": {
"attributes": {"service.name": "foo"},
"schema_url": "",
},
},
indent=4,
)
actual = LogRecord(
log_record = LogRecord(
timestamp=0,
observed_timestamp=0,
body="a log line",
body={"key": "logLine", "bytes": b"123"},
resource=Resource({"service.name": "foo"}),
attributes={
"mapping": {"key": "value"},
Expand All @@ -65,10 +41,9 @@ def test_log_record_to_json(self):
},
)

self.assertEqual(expected, actual.to_json(indent=4))
self.assertEqual(
actual.to_json(indent=None),
'{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}',
log_record.to_json(indent=None),
'{"body": {"key": "logLine", "bytes": "MTIz"}, "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}',
)

def test_log_record_to_json_serializes_severity_number_as_int(self):
Expand Down