Skip to content

fix(langchain): Ensure no duplicate SentryLangchainCallback #4485

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: szokeasaurusrex/refactor-langchain-args
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion sentry_sdk/integrations/langchain.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import itertools
from collections import OrderedDict
from functools import wraps

Expand Down Expand Up @@ -451,7 +452,14 @@ def new_configure(
**kwargs,
)

if not any(isinstance(cb, SentryLangchainCallback) for cb in callbacks_list):
inheritable_callbacks_list = (
inheritable_callbacks if isinstance(inheritable_callbacks, list) else []
)

if not any(
isinstance(cb, SentryLangchainCallback)
for cb in itertools.chain(callbacks_list, inheritable_callbacks_list)
):
# Avoid mutating the existing callbacks list
callbacks_list = [
*callbacks_list,
Expand Down
78 changes: 76 additions & 2 deletions tests/integrations/langchain/test_langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,15 @@

from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.messages import BaseMessage, AIMessageChunk
from langchain_core.outputs import ChatGenerationChunk
from langchain_core.outputs import ChatGenerationChunk, ChatResult
from langchain_core.runnables import RunnableConfig
from langchain_core.language_models.chat_models import BaseChatModel

from sentry_sdk import start_transaction
from sentry_sdk.integrations.langchain import LangchainIntegration
from sentry_sdk.integrations.langchain import (
LangchainIntegration,
SentryLangchainCallback,
)
from langchain.agents import tool, AgentExecutor, create_openai_tools_agent
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

Expand Down Expand Up @@ -342,3 +347,72 @@ def test_span_origin(sentry_init, capture_events):
assert event["contexts"]["trace"]["origin"] == "manual"
for span in event["spans"]:
assert span["origin"] == "auto.ai.langchain"


def test_manual_callback_no_duplication(sentry_init):
"""
Test that when a user manually provides a SentryLangchainCallback,
the integration doesn't create a duplicate callback.
"""

# Track callback instances
tracked_callback_instances = set()

class CallbackTrackingModel(BaseChatModel):
"""Mock model that tracks callback instances for testing."""

def _generate(
self,
messages,
stop=None,
run_manager=None,
**kwargs,
):
# Track all SentryLangchainCallback instances
if run_manager:
for handler in run_manager.handlers:
if isinstance(handler, SentryLangchainCallback):
tracked_callback_instances.add(id(handler))

for handler in run_manager.inheritable_handlers:
if isinstance(handler, SentryLangchainCallback):
tracked_callback_instances.add(id(handler))

return ChatResult(
generations=[
ChatGenerationChunk(message=AIMessageChunk(content="Hello!"))
],
llm_output={},
)

@property
def _llm_type(self):
return "test_model"

@property
def _identifying_params(self):
return {}

sentry_init(integrations=[LangchainIntegration()])

# Create a manual SentryLangchainCallback
manual_callback = SentryLangchainCallback(
max_span_map_size=100, include_prompts=False
)

# Create RunnableConfig with the manual callback
config = RunnableConfig(callbacks=[manual_callback])

# Invoke the model with the config
llm = CallbackTrackingModel()
llm.invoke("Hello", config)

# Verify that only ONE SentryLangchainCallback instance was used
assert len(tracked_callback_instances) == 1, (
f"Expected exactly 1 SentryLangchainCallback instance, "
f"but found {len(tracked_callback_instances)}. "
f"This indicates callback duplication occurred."
)

# Verify the callback ID matches our manual callback
assert id(manual_callback) in tracked_callback_instances
Loading