Skip to content

Commit

Permalink
Merge pull request #1 from StanleyOneG/docker_implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
ruecat authored Dec 11, 2023
2 parents dc10f77 + 36d40f1 commit b00a5e2
Show file tree
Hide file tree
Showing 10 changed files with 107 additions and 55 deletions.
8 changes: 8 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
TOKEN=0123
ADMIN_IDS=000,111
USER_IDS=000,111
INITMODEL=llama-2

# COMMENT OUT ONE OF THE FOLLOWING LINES:
OLLAMA_BASE_URL=ollama-server # to run ollama in a docker container
OLLAMA_BASE_URL=host.docker.internal # to run ollama locally
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ celerybeat.pid
*.sage.py

# Environments
bot/.env
*.env
.venv
env/
venv/
Expand Down
22 changes: 20 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,28 @@ pip install -r requirements.txt
+ Launch bot

```
python3 ollama-run.py
python3 run.py
```
## Installation (Docker)
Soon..
+ Clone Repository
```
git clone https://github.com/ruecat/ollama-telegram
```

+ Enter all values in .env.example

+ Rename .env.example -> .env

+ Run ONE of the following docker compose commands to start:
1. To run ollama in docker container (optionally: uncomment GPU part of docker-compose.yml file to enable Nvidia GPU)
```
docker compose up --build -d
```

2. To run ollama from locally insatlled instance (mainly for MacOS, since docker image doesn't support Apple GPU acceleration yet):
```
docker compose up --build -d ollama-telegram
```

## Environment Configuration
| Parameter | Description | Required? | Default Value | Example |
Expand Down
4 changes: 0 additions & 4 deletions bot/.env.example

This file was deleted.

7 changes: 0 additions & 7 deletions bot/dockerfile

This file was deleted.

53 changes: 29 additions & 24 deletions bot/func/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,20 @@
import json
from dotenv import load_dotenv
import os

load_dotenv()
system_info = os.uname()
token = os.getenv("TOKEN")
ollama_base_url = os.getenv("OLLAMA_BASE_URL")
allowed_ids = list(map(int, os.getenv('USER_IDS', '').split(',')))
admin_ids = list(map(int, os.getenv('ADMIN_IDS', '').split(',')))
# Will be implemented soon
#content = []
# content = []


async def fetcher():
async with aiohttp.ClientSession() as session:
url = 'http://localhost:11434/api/tags'
url = f'http://{ollama_base_url}:11434/api/tags'
async with session.get(url) as response:
if response.status == 200:
data = await response.json()
Expand All @@ -22,25 +25,27 @@ async def fetcher():


async def streamer(prompt: str, modelname: str):
#try:
async with aiohttp.ClientSession() as session:
print("Api triggered")
url = 'http://localhost:11434/api/generate'
#content.append(prompt)
#print(f'Content updated: {content}')
data = {
"model": modelname,
"prompt": prompt,
"stream": True,
#"context": content
}
print(f"DEBUG\n{modelname}\n{prompt}")
# Stream from API
async with session.post(url, json=data) as response:
async for chunk in response.content:
if chunk:
decoded_chunk = chunk.decode()
if decoded_chunk.strip():
yield json.loads(decoded_chunk)
# except:
# print("---------\n[Ollama-API ERROR]\nNON_DOCKER: Make sure your Ollama API server is running ('ollama serve' command)\nDOCKER: Check Ollama container and try again\n---------")
# try:
async with aiohttp.ClientSession() as session:
print("Api triggered")
url = f'http://{ollama_base_url}:11434/api/generate'
# content.append(prompt)
# print(f'Content updated: {content}')
data = {
"model": modelname,
"prompt": prompt,
"stream": True,
# "context": content
}
print(f"DEBUG\n{modelname}\n{prompt}")
# Stream from API
async with session.post(url, json=data) as response:
async for chunk in response.content:
if chunk:
decoded_chunk = chunk.decode()
if decoded_chunk.strip():
yield json.loads(decoded_chunk)


# except:
# print("---------\n[Ollama-API ERROR]\nNON_DOCKER: Make sure your Ollama API server is running ('ollama serve' command)\nDOCKER: Check Ollama container and try again\n---------")
2 changes: 0 additions & 2 deletions bot/requirements.txt

This file was deleted.

37 changes: 22 additions & 15 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,32 @@
version: '3.8'
services:
ollama-telegram:
build: bot
build:
context: .
dockerfile: dockerfile
container_name: ollama-telegram
restart: no
volumes:
- ./ollama-telegram-dockerized:/app
hostname: azura-dockerized
environment:
# Necessary, your Bot token, you can get it from @BotFather
- TOKEN=''
# Get your Telegram ID
- ADMIN_IDS=''
- USER_IDS=''
# Set your default model [You can change it after, using /start -> Change Model]
- INITMODEL=''
depends_on:
- ollama-server
env_file:
- ./.env

ollama-server:
image: ollama:/root/.ollama:latest
container_name: azura-db
image: ollama/ollama:latest
container_name: ollama-server
volumes:
- ./ollama:/root/.ollama

# Uncomment to enable NVIDIA GPU
# Otherwise runs on CPU only:

# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: all
# capabilities: [gpu]

restart: always
ports:
- '11434:11434'
25 changes: 25 additions & 0 deletions dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
FROM python:3.12

ARG APPHOMEDIR=code
ARG USERNAME=user
ARG USER_UID=1001
ARG USER_GID=1001
ARG PYTHONPATH_=${APPHOMEDIR}

WORKDIR /${APPHOMEDIR}

COPY requirements.txt requirements.txt

RUN \
apt update -y && apt upgrade -y \
&& python -m pip install --upgrade pip \
&& pip install --no-cache-dir -r requirements.txt \
&& groupadd --gid "$USER_GID" "$USERNAME" \
&& useradd --uid "$USER_UID" --gid "$USER_GID" -m "$USERNAME" -d /"$APPHOMEDIR" \
&& chown "$USERNAME:$USERNAME" -R /"$APPHOMEDIR"

COPY ./bot /${APPHOMEDIR}

USER ${USERNAME}

CMD [ "python3", "-u", "run.py"]
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
python-dotenv==1.0.0
aiogram==3.2.0

0 comments on commit b00a5e2

Please sign in to comment.