Skip to content

Commit

Permalink
♻️ encapsulate common process logic into abstract service
Browse files Browse the repository at this point in the history
  • Loading branch information
ianhomer committed Oct 1, 2024
1 parent fa55e91 commit 5ec61bb
Show file tree
Hide file tree
Showing 8 changed files with 30 additions and 33 deletions.
2 changes: 1 addition & 1 deletion DEVELOP.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Run tests

Run tests with pytest watcher

ptw
ptw .

Install git hook scripts

Expand Down
20 changes: 7 additions & 13 deletions ask/ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def signal_handler(sig: int, frame: Optional[object]) -> None:
config = load_config(parse_args, config_file_name)

renderer = Renderer(pretty_markdown=config.markdown)
input_handler = PromptPreProcessor(renderer=renderer)
prompt_pre_processor = PromptPreProcessor(renderer=renderer)

file_input = False

Expand All @@ -62,12 +62,6 @@ def signal_handler(sig: int, frame: Optional[object]) -> None:
Service = Gemini
service = Service(renderer=renderer, prompt=prompt, line_target=config.line_target)

def process(user_input) -> Optional[str]:
renderer.print_processing()
response_text = service.process(user_input)
renderer.print_response(response_text)
return response_text

response_text: Optional[str] = None

if config.transcribe.enabled:
Expand All @@ -77,28 +71,28 @@ def process(user_input) -> Optional[str]:
loop_sleep=config.transcribe.loop_sleep,
)
if config.inputs or file_input:
response_text = process(
response_text = service.send_message(
"answer or do what I just asked. If you have no answer, "
+ "just say the word :'OK'",
)

while service.available:
try:
with patch_stdout():
user_input = inputter.get_input()
prompt = inputter.get_input()
except InputInterrupt:
quit(renderer)
break
if user_input and len(user_input) > 0:
input_handler_response = input_handler.handle(user_input, response_text)
if prompt and len(prompt) > 0:
input_handler_response = prompt_pre_processor.handle(prompt, response_text)
if input_handler_response.quit:
quit(renderer)
break
if input_handler_response.process:
try:
response_text = process(user_input)
response_text = service.process(prompt)
except Exception as e:
print(f"\nCannot process prompt \n{user_input}\n", e)
print(f"\nCannot process prompt \n{prompt}\n", e)

return renderer

Expand Down
6 changes: 3 additions & 3 deletions ask/services/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

class AnthropicService(BotService):
def __init__(self, prompt, renderer: AbstractRenderer, line_target=0) -> None:
self.renderer = renderer
super().__init__(prompt, renderer, line_target)
if ANTHROPIC_API_KEY_NAME not in os.environ:
self.renderer.print(
f"""
Expand All @@ -34,13 +34,13 @@ def __init__(self, prompt, renderer: AbstractRenderer, line_target=0) -> None:
def available(self) -> bool:
return self._available

def process(self, user_input: str) -> str:
def send_message(self, prompt: str) -> str:
message = self.client.messages.create(
max_tokens=4906,
messages=[
{
"role": "user",
"content": user_input,
"content": prompt,
}
],
model="claude-3-5-sonnet-20240620",
Expand Down
12 changes: 9 additions & 3 deletions ask/services/bot_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,18 @@
class BotService:
@abstractmethod
def __init__(
self, prompt: str, line_target: int, renderer: AbstractRenderer
self, prompt: str, renderer: AbstractRenderer, line_target: int
) -> None:
pass
self.renderer = renderer

def process(self, prompt) -> str:
self.renderer.print_processing()
response_text = self.send_message(prompt)
self.renderer.print_response(response_text)
return response_text

@abstractmethod
def process(self, user_input: str) -> str:
def send_message(self, prompt: str) -> str:
pass

@property
Expand Down
9 changes: 4 additions & 5 deletions ask/services/gemini.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
from collections.abc import Iterable
from typing import Optional

import google.generativeai as genai
from google.generativeai.types import content_types
Expand All @@ -12,8 +11,8 @@


class Gemini(BotService):
def __init__(self, prompt, renderer: AbstractRenderer, line_target=0) -> None:
self.renderer = renderer
def __init__(self, prompt, renderer: AbstractRenderer, line_target) -> None:
super().__init__(prompt, renderer, line_target)
if API_KEY_NAME not in os.environ:
self.renderer.print(
f"""
Expand Down Expand Up @@ -51,6 +50,6 @@ def __init__(self, prompt, renderer: AbstractRenderer, line_target=0) -> None:
def available(self) -> bool:
return self._available

def process(self, user_input: Optional[str]) -> str:
response = self.chat.send_message(user_input)
def send_message(self, prompt: str) -> str:
response = self.chat.send_message(prompt)
return response.text
4 changes: 2 additions & 2 deletions ask/services/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ class Ollama(BotService):
def available(self) -> bool:
return True

def process(self, user_input: str) -> str:
def send_messaeg(self, prompt: str) -> str:
response = requests.post(
"http://localhost:11434/api/generate",
json={
"model": "qwen2.5:1.5b",
"prompt": user_input,
"prompt": prompt,
"stream": False,
},
)
Expand Down
4 changes: 1 addition & 3 deletions ask/tests/test_ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@


class MockBotService(BotService):
def process(
self, user_input, previous_response_text: Optional[str] = None
) -> Optional[str]:
def send_message(self, prompt, previous_response_text: Optional[str] = None) -> str:
return "OK"

@property
Expand Down
6 changes: 3 additions & 3 deletions ask/tests/test_ask_transcribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ def parse_args():


class MockBotService(BotService):
def process(
self, user_input, previous_response_text: Optional[str] = None
def send_message(
self, prompt, previous_response_text: Optional[str] = None
) -> Optional[str]:
return "OK:" + user_input
return "OK:" + prompt

@property
def available(self):
Expand Down

0 comments on commit 5ec61bb

Please sign in to comment.