Skip to content

Commit a9c8a14

Browse files
committed
style(ruff): make files compliant.
1 parent eba8bf9 commit a9c8a14

File tree

8 files changed

+20
-24
lines changed

8 files changed

+20
-24
lines changed

langchain/data_extractor.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
11
import os
22
from typing import List, Optional
33

4+
from langchain.chat_models import init_chat_model
45
from langchain_core.prompts import ChatPromptTemplate
56
from langchain_core.utils.function_calling import tool_example_to_messages
67
from pydantic import BaseModel, Field
78

8-
from langchain.chat_models import init_chat_model
9-
109
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
1110

1211
llm = init_chat_model("gpt-4o-mini", model_provider="openai")

langchain/simple_llm_application.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import os
22

3+
from langchain.chat_models import init_chat_model
34
from langchain_core.messages import HumanMessage, SystemMessage
45
from langchain_core.output_parsers import StrOutputParser
56
from langchain_core.prompts import ChatPromptTemplate
67

7-
from langchain.chat_models import init_chat_model
8-
98
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
109

1110

langchain/text_classifier.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import os
22

3+
from langchain.chat_models import init_chat_model
34
from langchain_core.prompts import ChatPromptTemplate
45
from langchain_openai import ChatOpenAI
56
from pydantic import BaseModel, Field
67

7-
from langchain.chat_models import init_chat_model
8-
98
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
109

1110
llm = init_chat_model("gpt-4o-mini", model_provider="openai")

langgraph/agents/agent.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
import os
22
from typing import Annotated
33

4-
from langchain_tavily import TavilySearch
5-
from typing_extensions import TypedDict
6-
74
from langchain.chat_models import init_chat_model
5+
from langchain_tavily import TavilySearch
86
from langgraph.checkpoint.memory import MemorySaver
97
from langgraph.graph import START, StateGraph
108
from langgraph.graph.message import add_messages
119
from langgraph.prebuilt import ToolNode, tools_condition
10+
from typing_extensions import TypedDict
1211

1312
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
1413
os.environ["TAVILY_API_KEY"] = input("Tavily API key: ")
@@ -68,7 +67,7 @@ def chatbot(state: State):
6867
def stream_graph_updates(user_input: str):
6968
for event in graph.stream(
7069
{"messages": [{"role": "user", "content": user_input}]},
71-
config, # setup the memory thread to use
70+
config, # setup the memory thread to use
7271
):
7372
for value in event.values():
7473
print("Assistant:", value["messages"][-1].content)

langgraph/agents/agent_human_assistance.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,16 @@
11
import os
22
from typing import Annotated
33

4+
from langchain.chat_models import init_chat_model
45
from langchain_core.messages import ToolMessage
56
from langchain_core.tools import InjectedToolCallId, tool
67
from langchain_tavily import TavilySearch
7-
from typing_extensions import TypedDict
8-
9-
from langchain.chat_models import init_chat_model
108
from langgraph.checkpoint.memory import MemorySaver
119
from langgraph.graph import START, StateGraph
1210
from langgraph.graph.message import add_messages
1311
from langgraph.prebuilt import ToolNode, tools_condition
1412
from langgraph.types import Command, interrupt
13+
from typing_extensions import TypedDict
1514

1615
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
1716
os.environ["TAVILY_API_KEY"] = input("Tavily API key: ")

langgraph/agents/agent_time_travel.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
import os
22
from typing import Annotated
33

4-
from langchain_tavily import TavilySearch
5-
from typing_extensions import TypedDict
6-
74
from langchain.chat_models import init_chat_model
5+
from langchain_tavily import TavilySearch
86
from langgraph.checkpoint.memory import MemorySaver
97
from langgraph.graph import START, StateGraph
108
from langgraph.graph.message import add_messages
119
from langgraph.prebuilt import ToolNode, tools_condition
10+
from typing_extensions import TypedDict
1211

1312
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
1413
os.environ["TAVILY_API_KEY"] = input("Tavily API key: ")

langgraph/rag/rag.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,20 @@
22
from typing import Annotated, List, Literal
33

44
import bs4 # BeautifulSoup to parse HTML
5+
from langchain.chat_models import init_chat_model
56
from langchain_community.document_loaders import WebBaseLoader
67
from langchain_core.documents import Document
78
from langchain_core.vectorstores import InMemoryVectorStore
89
from langchain_openai import OpenAIEmbeddings
910
from langchain_text_splitters import RecursiveCharacterTextSplitter
11+
from langgraph.graph import START, StateGraph
1012
from typing_extensions import TypedDict
1113

1214
# LangChain Hub is a centralized platform for uploading,
1315
# browsing, pulling, and managing prompts to help developers
1416
# discover and share polished prompt templates for various large
1517
# language models (LLMs)
1618
from langchain import hub
17-
from langchain.chat_models import init_chat_model
18-
from langgraph.graph import START, StateGraph
1919

2020
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
2121

langgraph/rag/rag_delegation.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
11
import os
22
from typing import List
33

4-
import bs4 # BeautifulSoup to parse HTML
4+
import bs4 # BeautifulSoup to parse HTML
5+
from langchain.chat_models import init_chat_model
56
from langchain_community.document_loaders import WebBaseLoader
67
from langchain_core.documents import Document
78
from langchain_core.messages import SystemMessage
89
from langchain_core.tools import tool
910
from langchain_openai import OpenAIEmbeddings
1011
from langchain_text_splitters import RecursiveCharacterTextSplitter
11-
from typing_extensions import TypedDict
12-
13-
from langchain import hub
14-
from langchain.chat_models import init_chat_model
1512
from langgraph.checkpoint.memory import MemorySaver
1613
from langgraph.graph import END, MessagesState, StateGraph
1714
from langgraph.prebuilt import ToolNode, create_react_agent, tools_condition
15+
from typing_extensions import TypedDict
16+
17+
from langchain import hub
1818

1919
os.environ["OPENAI_API_KEY"] = input("OpenAI API key: ")
2020

@@ -56,7 +56,9 @@
5656

5757

5858
# Turn the retrieve step into a tool call option for the LLM
59-
@tool(response_format="content_and_artifact") # make the model see only the first output
59+
@tool(
60+
response_format="content_and_artifact"
61+
) # make the model see only the first output
6062
def retrieve(query: str):
6163
"""Retrieve information related to a query."""
6264
retrieved_docs = vector_store.similarity_search(query, k=2)

0 commit comments

Comments
 (0)