Skip to content

Commit c5d3247

Browse files
committed
feat: add long context Ollama through LCOllama
1 parent c2c3292 commit c5d3247

File tree

7 files changed

+58
-5
lines changed

7 files changed

+58
-5
lines changed

flowsettings.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,16 @@
181181
},
182182
"default": False,
183183
}
184+
KH_LLMS["ollama-long-context"] = {
185+
"spec": {
186+
"__type__": "kotaemon.llms.LCOllamaChat",
187+
"base_url": KH_OLLAMA_URL.replace("v1/", ""),
188+
"model": config("LOCAL_MODEL", default="qwen2.5:7b"),
189+
"num_ctx": 8192,
190+
},
191+
"default": False,
192+
}
193+
184194
KH_EMBEDDINGS["ollama"] = {
185195
"spec": {
186196
"__type__": "kotaemon.embeddings.OpenAIEmbeddings",
@@ -190,7 +200,6 @@
190200
},
191201
"default": False,
192202
}
193-
194203
KH_EMBEDDINGS["fast_embed"] = {
195204
"spec": {
196205
"__type__": "kotaemon.embeddings.FastEmbedEmbeddings",

libs/kotaemon/kotaemon/llms/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
LCChatOpenAI,
1313
LCCohereChat,
1414
LCGeminiChat,
15+
LCOllamaChat,
1516
LlamaCppChat,
1617
)
1718
from .completions import LLM, AzureOpenAI, LlamaCpp, OpenAI
@@ -33,6 +34,7 @@
3334
"LCAnthropicChat",
3435
"LCGeminiChat",
3536
"LCCohereChat",
37+
"LCOllamaChat",
3638
"LCAzureChatOpenAI",
3739
"LCChatOpenAI",
3840
"LlamaCppChat",

libs/kotaemon/kotaemon/llms/chats/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
LCChatOpenAI,
88
LCCohereChat,
99
LCGeminiChat,
10+
LCOllamaChat,
1011
)
1112
from .llamacpp import LlamaCppChat
1213
from .openai import AzureChatOpenAI, ChatOpenAI
@@ -20,6 +21,7 @@
2021
"LCAnthropicChat",
2122
"LCGeminiChat",
2223
"LCCohereChat",
24+
"LCOllamaChat",
2325
"LCChatOpenAI",
2426
"LCAzureChatOpenAI",
2527
"LCChatMixin",

libs/kotaemon/kotaemon/llms/chats/langchain_based.py

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -358,3 +358,40 @@ def _get_lc_class(self):
358358
raise ImportError("Please install langchain-cohere")
359359

360360
return ChatCohere
361+
362+
363+
class LCOllamaChat(LCChatMixin, ChatLLM): # type: ignore
364+
base_url: str = Param(
365+
help="Base Ollama URL. (default: http://localhost:11434/api/)", # noqa
366+
required=True,
367+
)
368+
model: str = Param(
369+
help="Model name to use (https://ollama.com/library)",
370+
required=True,
371+
)
372+
num_ctx: int = Param(
373+
help="The size of the context window (default: 8192)",
374+
required=True,
375+
)
376+
377+
def __init__(
378+
self,
379+
model: str | None = None,
380+
base_url: str | None = None,
381+
num_ctx: int | None = None,
382+
**params,
383+
):
384+
super().__init__(
385+
base_url=base_url,
386+
model=model,
387+
num_ctx=num_ctx,
388+
**params,
389+
)
390+
391+
def _get_lc_class(self):
392+
try:
393+
from langchain_ollama import ChatOllama
394+
except ImportError:
395+
raise ImportError("Please install langchain-ollama")
396+
397+
return ChatOllama

libs/kotaemon/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ dependencies = [
3535
"langchain-openai>=0.1.4,<0.2.0",
3636
"langchain-google-genai>=1.0.3,<2.0.0",
3737
"langchain-anthropic",
38+
"langchain-ollama",
3839
"langchain-cohere>=0.2.4,<0.3.0",
3940
"llama-hub>=0.0.79,<0.1.0",
4041
"llama-index>=0.10.40,<0.11.0",

libs/ktem/ktem/llms/manager.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ def load_vendors(self):
6060
LCAnthropicChat,
6161
LCCohereChat,
6262
LCGeminiChat,
63+
LCOllamaChat,
6364
LlamaCppChat,
6465
)
6566

@@ -69,6 +70,7 @@ def load_vendors(self):
6970
LCAnthropicChat,
7071
LCGeminiChat,
7172
LCCohereChat,
73+
LCOllamaChat,
7274
LlamaCppChat,
7375
]
7476

libs/ktem/ktem/pages/setup.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -265,12 +265,12 @@ def update_model(
265265
)
266266
elif radio_model_value == "ollama":
267267
llms.update(
268-
name="ollama",
268+
name="ollama-long-context",
269269
spec={
270-
"__type__": "kotaemon.llms.ChatOpenAI",
271-
"base_url": KH_OLLAMA_URL,
270+
"__type__": "kotaemon.llms.LCOllamaChat",
271+
"base_url": KH_OLLAMA_URL.replace("v1/", ""),
272272
"model": config("LOCAL_MODEL", default="qwen2.5:7b"),
273-
"api_key": "ollama",
273+
"num_ctx": 8192,
274274
},
275275
default=True,
276276
)

0 commit comments

Comments
 (0)