Skip to content

Commit d865edd

Browse files
committed
Add langchain standard integration tests
1 parent 9b7c35c commit d865edd

File tree

5 files changed

+1152
-203
lines changed

5 files changed

+1152
-203
lines changed

langchain/pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ Issues = "https://github.com/vectorize-io/integrations-python/issues"
3333
dev = [
3434
"mypy>=1.13.0",
3535
"pytest>=8.3.3",
36+
"pytest-asyncio>=0.26.0",
3637
"ruff>=0.9.0,<0.10",
38+
"langchain-tests>=0.3.20",
3739
]
3840

3941
[tool.ruff.lint]
@@ -71,3 +73,5 @@ packages = ["langchain_vectorize"]
7173
requires = ["hatchling"]
7274
build-backend = "hatchling.build"
7375

76+
[tool.pytest.ini_options]
77+
asyncio_mode = "auto"

langchain/tests/conftest.py

Lines changed: 214 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,214 @@
1+
import json
2+
import logging
3+
import os
4+
import time
5+
from collections.abc import Iterator
6+
from pathlib import Path
7+
from typing import Literal
8+
9+
import pytest
10+
import urllib3
11+
from vectorize_client.api.ai_platform_connectors_api import AIPlatformConnectorsApi
12+
from vectorize_client.api.destination_connectors_api import DestinationConnectorsApi
13+
from vectorize_client.api.pipelines_api import PipelinesApi
14+
from vectorize_client.api.source_connectors_api import SourceConnectorsApi
15+
from vectorize_client.api.uploads_api import UploadsApi
16+
from vectorize_client.api_client import ApiClient
17+
from vectorize_client.configuration import Configuration
18+
from vectorize_client.exceptions import ApiException
19+
from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema
20+
from vectorize_client.models.ai_platform_type_for_pipeline import (
21+
AIPlatformTypeForPipeline,
22+
)
23+
from vectorize_client.models.create_source_connector_request import (
24+
CreateSourceConnectorRequest,
25+
)
26+
from vectorize_client.models.destination_connector_type_for_pipeline import (
27+
DestinationConnectorTypeForPipeline,
28+
)
29+
from vectorize_client.models.file_upload import FileUpload
30+
from vectorize_client.models.pipeline_ai_platform_connector_schema import (
31+
PipelineAIPlatformConnectorSchema,
32+
)
33+
from vectorize_client.models.pipeline_configuration_schema import (
34+
PipelineConfigurationSchema,
35+
)
36+
from vectorize_client.models.pipeline_destination_connector_schema import (
37+
PipelineDestinationConnectorSchema,
38+
)
39+
from vectorize_client.models.pipeline_source_connector_schema import (
40+
PipelineSourceConnectorSchema,
41+
)
42+
from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest
43+
from vectorize_client.models.schedule_schema import ScheduleSchema
44+
from vectorize_client.models.schedule_schema_type import ScheduleSchemaType
45+
from vectorize_client.models.source_connector_type import SourceConnectorType
46+
from vectorize_client.models.start_file_upload_to_connector_request import (
47+
StartFileUploadToConnectorRequest,
48+
)
49+
50+
51+
@pytest.fixture(scope="session")
52+
def api_token() -> str:
53+
token = os.getenv("VECTORIZE_TOKEN")
54+
if not token:
55+
msg = "Please set the VECTORIZE_TOKEN environment variable"
56+
raise ValueError(msg)
57+
return token
58+
59+
60+
@pytest.fixture(scope="session")
61+
def org_id() -> str:
62+
org = os.getenv("VECTORIZE_ORG")
63+
if not org:
64+
msg = "Please set the VECTORIZE_ORG environment variable"
65+
raise ValueError(msg)
66+
return org
67+
68+
69+
@pytest.fixture(scope="session")
70+
def environment() -> Literal["prod", "dev", "local", "staging"]:
71+
env = os.getenv("VECTORIZE_ENV", "prod")
72+
if env not in ["prod", "dev", "local", "staging"]:
73+
msg = "Invalid VECTORIZE_ENV environment variable."
74+
raise ValueError(msg)
75+
return env # type: ignore[return-value]
76+
77+
78+
@pytest.fixture(scope="session")
79+
def api_client(api_token: str, environment: str) -> Iterator[ApiClient]:
80+
header_name = None
81+
header_value = None
82+
if environment == "prod":
83+
host = "https://api.vectorize.io/v1"
84+
elif environment == "dev":
85+
host = "https://api-dev.vectorize.io/v1"
86+
elif environment == "local":
87+
host = "http://localhost:3000/api"
88+
header_name = "x-lambda-api-key"
89+
header_value = api_token
90+
else:
91+
host = "https://api-staging.vectorize.io/v1"
92+
93+
with ApiClient(
94+
Configuration(host=host, access_token=api_token, debug=True),
95+
header_name,
96+
header_value,
97+
) as api:
98+
yield api
99+
100+
101+
@pytest.fixture(scope="session")
102+
def pipeline_id(api_client: ApiClient, org_id: str) -> Iterator[str]:
103+
pipelines = PipelinesApi(api_client)
104+
105+
connectors_api = SourceConnectorsApi(api_client)
106+
response = connectors_api.create_source_connector(
107+
org_id,
108+
CreateSourceConnectorRequest(FileUpload(name="from api", type="FILE_UPLOAD")),
109+
)
110+
source_connector_id = response.connector.id
111+
logging.info("Created source connector %s", source_connector_id)
112+
113+
uploads_api = UploadsApi(api_client)
114+
upload_response = uploads_api.start_file_upload_to_connector(
115+
org_id,
116+
source_connector_id,
117+
StartFileUploadToConnectorRequest( # type: ignore[call-arg]
118+
name="research.pdf",
119+
content_type="application/pdf",
120+
metadata=json.dumps({"created-from-api": True}),
121+
),
122+
)
123+
124+
http = urllib3.PoolManager()
125+
this_dir = Path(__file__).parent
126+
file_path = this_dir / "research.pdf"
127+
128+
with file_path.open("rb") as f:
129+
http_response = http.request(
130+
"PUT",
131+
upload_response.upload_url,
132+
body=f,
133+
headers={
134+
"Content-Type": "application/pdf",
135+
"Content-Length": str(file_path.stat().st_size),
136+
},
137+
)
138+
if http_response.status != 200:
139+
msg = "Upload failed:"
140+
raise ValueError(msg)
141+
else:
142+
logging.info("Upload successful")
143+
144+
ai_platforms = AIPlatformConnectorsApi(api_client).get_ai_platform_connectors(
145+
org_id
146+
)
147+
builtin_ai_platform = next(
148+
c.id for c in ai_platforms.ai_platform_connectors if c.type == "VECTORIZE"
149+
)
150+
logging.info("Using AI platform %s", builtin_ai_platform)
151+
152+
vector_databases = DestinationConnectorsApi(api_client).get_destination_connectors(
153+
org_id
154+
)
155+
builtin_vector_db = next(
156+
c.id for c in vector_databases.destination_connectors if c.type == "VECTORIZE"
157+
)
158+
logging.info("Using destination connector %s", builtin_vector_db)
159+
160+
pipeline_response = pipelines.create_pipeline(
161+
org_id,
162+
PipelineConfigurationSchema( # type: ignore[call-arg]
163+
source_connectors=[
164+
PipelineSourceConnectorSchema(
165+
id=source_connector_id,
166+
type=SourceConnectorType.FILE_UPLOAD,
167+
config={},
168+
)
169+
],
170+
destination_connector=PipelineDestinationConnectorSchema(
171+
id=builtin_vector_db,
172+
type=DestinationConnectorTypeForPipeline.VECTORIZE,
173+
config={},
174+
),
175+
ai_platform_connector=PipelineAIPlatformConnectorSchema(
176+
id=builtin_ai_platform,
177+
type=AIPlatformTypeForPipeline.VECTORIZE,
178+
config=AIPlatformConfigSchema(),
179+
),
180+
pipeline_name="Test pipeline",
181+
schedule=ScheduleSchema(type=ScheduleSchemaType.MANUAL),
182+
),
183+
)
184+
pipeline_id = pipeline_response.data.id
185+
186+
# Wait for the pipeline to be created
187+
request = RetrieveDocumentsRequest(
188+
question="query",
189+
num_results=2,
190+
)
191+
start = time.time()
192+
while True:
193+
try:
194+
response = pipelines.retrieve_documents(org_id, pipeline_id, request)
195+
except ApiException as e:
196+
if "503" not in str(e):
197+
raise
198+
else:
199+
docs = response.documents
200+
if len(docs) == 2:
201+
break
202+
if time.time() - start > 180:
203+
msg = "Docs not retrieved in time"
204+
raise RuntimeError(msg)
205+
time.sleep(1)
206+
207+
logging.info("Created pipeline %s", pipeline_id)
208+
209+
yield pipeline_id
210+
211+
try:
212+
pipelines.delete_pipeline(org_id, pipeline_id)
213+
except Exception:
214+
logging.exception("Failed to delete pipeline %s", pipeline_id)
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
from typing import Literal
2+
3+
import pytest
4+
from langchain_tests.integration_tests import RetrieversIntegrationTests
5+
6+
from langchain_vectorize import VectorizeRetriever
7+
8+
9+
class TestVectorizeRetrieverIntegration(RetrieversIntegrationTests):
10+
@pytest.fixture(autouse=True)
11+
def setup(
12+
self,
13+
environment: Literal["prod", "dev", "local", "staging"],
14+
api_token: str,
15+
org_id: str,
16+
pipeline_id: str,
17+
) -> None:
18+
self._environment = environment
19+
self._api_token = api_token
20+
self._org_id = org_id
21+
self._pipeline_id = pipeline_id
22+
23+
@property
24+
def retriever_constructor(self) -> type[VectorizeRetriever]:
25+
return VectorizeRetriever
26+
27+
@property
28+
def retriever_constructor_params(self) -> dict:
29+
return {
30+
"environment": self._environment,
31+
"api_token": self._api_token,
32+
"organization": self._org_id,
33+
"pipeline_id": self._pipeline_id,
34+
}
35+
36+
@property
37+
def retriever_query_example(self) -> str:
38+
return "What are you?"
39+
40+
@pytest.mark.xfail(
41+
reason="VectorizeRetriever does not support k parameter in constructor"
42+
)
43+
def test_k_constructor_param(self) -> None:
44+
raise NotImplementedError
45+
46+
@pytest.mark.xfail(
47+
reason="VectorizeRetriever does not support k parameter in invoke"
48+
)
49+
def test_invoke_with_k_kwarg(self) -> None:
50+
raise NotImplementedError

0 commit comments

Comments
 (0)