Skip to content

Commit c7443d3

Browse files
committed
New script ollama.py: AI auto-responder using Ollama
Update ollama.py: translate comments to English Update ollama_bot.py: translate comments to English Update ollamabot.py: translate comments to English Update ollamabot.py: translate comments to English
1 parent 0fe640a commit c7443d3

File tree

1 file changed

+132
-0
lines changed

1 file changed

+132
-0
lines changed

python/ollama.py

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
import weechat
2+
import requests
3+
import json
4+
5+
"""
6+
Ollama Bot for WeeChat
7+
8+
This script automatically responds to mentions in channels and private messages using an Ollama LLM running locally.
9+
10+
Features:
11+
- Responds to mentions in channels.
12+
- Can respond to private messages if enabled.
13+
- Allows manual queries using the /ollama command.
14+
- Configurable via WeeChat /set commands.
15+
16+
Usage:
17+
- To ask a question manually:
18+
/ollama What is Python?
19+
20+
- To enable or disable automatic responses in channels:
21+
/set plugins.var.python.ollama.highlight_response on # Enable responses in channels
22+
/set plugins.var.python.ollama.highlight_response off # Disable responses in channels
23+
24+
- To enable or disable automatic responses in private messages:
25+
/set plugins.var.python.ollama.pm_response on # Enable PM responses
26+
/set plugins.var.python.ollama.pm_response off # Disable PM responses
27+
28+
Dependencies:
29+
- Requires an Ollama server running locally at http://localhost:11434/api/generate
30+
"""
31+
32+
# Script metadata
33+
SCRIPT_NAME = "ollama"
34+
SCRIPT_AUTHOR = "teraflops"
35+
SCRIPT_VERSION = "2.1"
36+
SCRIPT_LICENSE = "MIT"
37+
SCRIPT_DESC = "Automatically responds to mentions using Ollama and allows manual queries, including PMs"
38+
OLLAMA_API_URL = "http://localhost:11434/api/generate"
39+
40+
# Register the script
41+
weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE, SCRIPT_DESC, "", "")
42+
43+
# Script configuration in Weechat
44+
def setup_config():
45+
if not weechat.config_is_set_plugin("highlight_response"):
46+
weechat.config_set_plugin("highlight_response", "on") # Enable auto-responses by default
47+
if not weechat.config_is_set_plugin("pm_response"):
48+
weechat.config_set_plugin("pm_response", "off") # Disable PM responses by default
49+
setup_config()
50+
51+
def ask_ollama(message):
52+
"""Send a query to Ollama and return the complete response."""
53+
try:
54+
data = {"model": "gemma", "prompt": message, "stream": False}
55+
headers = {"Content-Type": "application/json"}
56+
57+
response = requests.post(
58+
OLLAMA_API_URL,
59+
json=data,
60+
headers=headers,
61+
verify=False # Change to True if you use a valid certificate
62+
)
63+
64+
if response.status_code != 200:
65+
return f"Error {response.status_code}: {response.text}"
66+
67+
response_json = response.json()
68+
return response_json.get("response", "No response received from Ollama.")
69+
70+
except requests.exceptions.RequestException as e:
71+
return f"Error connecting to Ollama: {str(e)}"
72+
73+
def command_ollama(data, buffer, args):
74+
"""Command /ollama to manually ask Ollama a question."""
75+
if not args:
76+
weechat.prnt(buffer, "[Ollama] Usage: /ollama <question>")
77+
return weechat.WEECHAT_RC_OK
78+
79+
response = ask_ollama(args)
80+
weechat.prnt(buffer, f"[Ollama] {response}")
81+
return weechat.WEECHAT_RC_OK
82+
83+
def message_callback(data, buffer, date, tags, displayed, highlight, prefix, message):
84+
"""Detect mentions in channels or private messages and respond automatically with Ollama."""
85+
86+
if weechat.config_get_plugin("highlight_response") == "off":
87+
return weechat.WEECHAT_RC_OK
88+
89+
buffer_type = weechat.buffer_get_string(buffer, "localvar_type")
90+
is_private = buffer_type == "private"
91+
username = weechat.info_get("irc_nick", "") # Get the current IRC username
92+
is_mentioned = f"@{username.lower()}" in message.lower() # Ensure @username is explicitly mentioned
93+
94+
# Ignore private messages if pm_response is off
95+
if is_private and weechat.config_get_plugin("pm_response") == "off":
96+
return weechat.WEECHAT_RC_OK
97+
98+
# Only respond in private messages if it's a direct question
99+
if is_private and not message.strip().endswith("?"):
100+
return weechat.WEECHAT_RC_OK
101+
102+
# Only respond in channels if explicitly mentioned or highlighted
103+
if not is_private and not is_mentioned and not int(highlight):
104+
return weechat.WEECHAT_RC_OK
105+
106+
response = ask_ollama(message)
107+
108+
if is_private:
109+
weechat.command(buffer, f"/msg {prefix} {response}") # Reply to private message
110+
else:
111+
weechat.command(buffer, f"/say {response}") # Reply in the channel
112+
113+
return weechat.WEECHAT_RC_OK
114+
115+
116+
def config_callback(data, option, value):
117+
"""Callback for Weechat configuration changes."""
118+
weechat.prnt("", f"[Ollama] Configuration changed: {option} = {value}")
119+
return weechat.WEECHAT_RC_OK
120+
121+
# Register configuration with /set
122+
weechat.config_set_desc_plugin("highlight_response", "Automatically respond to mentions in channels (on/off)")
123+
weechat.config_set_desc_plugin("pm_response", "Automatically respond to private messages (on/off)")
124+
weechat.hook_config("plugins.var.python.ollama.highlight_response", "config_callback", "")
125+
weechat.hook_config("plugins.var.python.ollama.pm_response", "config_callback", "")
126+
127+
# Register commands and hooks
128+
weechat.hook_command("ollama", "Ask something to Ollama", "<question>", "Example: /ollama What is Python?", "", "command_ollama", "")
129+
weechat.hook_print("", "notify_highlight", "", 1, "message_callback", "")
130+
weechat.hook_print("", "notify_message", "", 1, "message_callback", "")
131+
weechat.hook_print("", "notify_private", "", 1, "message_callback", "")
132+

0 commit comments

Comments
 (0)