Skip to content

Commit be50962

Browse files
committed
feat: ai bot example
1 parent 7f35675 commit be50962

File tree

3 files changed

+132
-64
lines changed

3 files changed

+132
-64
lines changed

README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,15 @@ This repository provides an experimental boilerplate for building bots compatibl
1010
## 🛠️ Getting Started with Examples
1111
This repository includes an `/examples` folder containing runnable example bots that demonstrate basic functionality.
1212

13-
To run an example, execute the corresponding module using the `-m` flag in Python. For example, to run the `ollama` example:
13+
To run an example, execute the corresponding module using the `-m` flag in Python. For example, to run the `ai` example:
1414

1515
```bash
16-
python -m examples.ollama
16+
python -m examples.ai
1717
```
1818

1919
> **Note**: Ensure that your current working directory (PWD) is the root of this repository when running examples, as this is required for proper execution.
2020
21-
Replace `ollama` in the command above with the specific example you’d like to execute from the `/examples` folder.
21+
Replace `ai` in the command above with the specific example you’d like to execute from the `/examples` folder.
2222

2323
## 🚧 Disclaimer
2424
This project is an early-stage proof of concept. **APIs will break** and existing functionality may change as Open WebUI evolves to include native bot support. This repository is not production-ready and primarily serves experimental and exploratory purposes.

examples/ai.py

+129
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
import asyncio
2+
import socketio
3+
from env import WEBUI_URL, TOKEN
4+
from utils import send_message, send_typing
5+
6+
7+
MODEL_ID = "llama3.2:latest"
8+
9+
# Create an asynchronous Socket.IO client instance
10+
sio = socketio.AsyncClient(logger=False, engineio_logger=False)
11+
12+
13+
# Event handlers
14+
@sio.event
15+
async def connect():
16+
print("Connected!")
17+
18+
19+
@sio.event
20+
async def disconnect():
21+
print("Disconnected from the server!")
22+
23+
24+
import aiohttp
25+
import asyncio
26+
27+
28+
async def openai_chat_completion(messages):
29+
payload = {
30+
"model": MODEL_ID,
31+
"messages": messages,
32+
"stream": False,
33+
}
34+
35+
async with aiohttp.ClientSession() as session:
36+
async with session.post(
37+
f"{WEBUI_URL}/api/chat/completions",
38+
headers={"Authorization": f"Bearer {TOKEN}"},
39+
json=payload,
40+
) as response:
41+
if response.status == 200:
42+
return await response.json()
43+
else:
44+
# Optional: Handle errors or return raw response text
45+
return {"error": await response.text(), "status": response.status}
46+
47+
48+
# Define a function to handle channel events
49+
def events(user_id):
50+
@sio.on("channel-events")
51+
async def channel_events(data):
52+
if data["user"]["id"] == user_id:
53+
# Ignore events from the bot itself
54+
return
55+
56+
if data["data"]["type"] == "message":
57+
print(f'{data["user"]["name"]}: {data["data"]["data"]["content"]}')
58+
await send_typing(sio, data["channel_id"])
59+
60+
async def send_typing_until_complete(channel_id, coro):
61+
"""
62+
Sends typing indicators every second until the provided coroutine completes.
63+
"""
64+
task = asyncio.create_task(coro) # Begin the provided coroutine task
65+
try:
66+
# While the task is running, send typing indicators every second
67+
while not task.done():
68+
await send_typing(sio, channel_id)
69+
await asyncio.sleep(1)
70+
# Await the actual result of the coroutine
71+
return await task
72+
except Exception as e:
73+
task.cancel()
74+
raise e # Propagate any exceptions that occurred in the coroutine
75+
76+
# OpenAI API coroutine
77+
openai_task = openai_chat_completion(
78+
[
79+
{"role": "system", "content": "You are a friendly AI."},
80+
{"role": "user", "content": data["data"]["data"]["content"]},
81+
]
82+
)
83+
84+
try:
85+
# Run OpenAI coroutine while showing typing indicators
86+
response = await send_typing_until_complete(
87+
data["channel_id"], openai_task
88+
)
89+
90+
if response.get("choices"):
91+
completion = response["choices"][0]["message"]["content"]
92+
await send_message(data["channel_id"], completion)
93+
else:
94+
await send_message(
95+
data["channel_id"], "I'm sorry, I don't understand."
96+
)
97+
except Exception:
98+
await send_message(
99+
data["channel_id"],
100+
"Something went wrong while processing your request.",
101+
)
102+
103+
104+
# Define an async function for the main workflow
105+
async def main():
106+
try:
107+
print(f"Connecting to {WEBUI_URL}...")
108+
await sio.connect(
109+
WEBUI_URL, socketio_path="/ws/socket.io", transports=["websocket"]
110+
)
111+
print("Connection established!")
112+
except Exception as e:
113+
print(f"Failed to connect: {e}")
114+
return
115+
116+
# Callback function for user-join
117+
async def join_callback(data):
118+
events(data["id"]) # Attach the event handlers dynamically
119+
120+
# Authenticate with the server
121+
await sio.emit("user-join", {"auth": {"token": TOKEN}}, callback=join_callback)
122+
123+
# Wait indefinitely to keep the connection open
124+
await sio.wait()
125+
126+
127+
# Actually run the async `main` function using `asyncio`
128+
if __name__ == "__main__":
129+
asyncio.run(main())

examples/ollama.py

-61
This file was deleted.

0 commit comments

Comments
 (0)