Skip to content

Commit

Permalink
Merge pull request #18 from atomiechen/dev
Browse files Browse the repository at this point in the history
fix prompt converter issue; support lazy import
  • Loading branch information
atomiechen committed Apr 3, 2024
2 parents 7219e8f + 78fd750 commit e07ad47
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 17 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,10 @@ new_chat = converter.chat_replace_variables(
)
```

> [!IMPORTANT]
>
> About the prompt format, each role key (e.g. `$system$` / `$user$` / `$assistant`) should be placed in a separate line.
### Substitute

`PromptConverter` can also substitute placeholder variables like `%output_format%` stored in text files to make multiple prompts modular. A substitute map `substitute.txt` looks like this:
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ build-backend = "setuptools.build_meta"

[project]
name = "HandyLLM"
version = "0.6.0"
version = "0.6.1"
authors = [
{ name="Atomie CHEN", email="atomic_cwh@163.com" },
]
description = "A handy toolkit for using LLM."
readme = "README.md"
requires-python = ">=3.6"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Python :: 3",
# "License :: OSI Approved :: MIT License",
Expand Down
18 changes: 10 additions & 8 deletions src/handyllm/openai_client.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from __future__ import annotations
from typing import Union, TYPE_CHECKING
import os
import json
import time
from enum import Enum, auto
import asyncio
from typing import Union
import requests
import httpx

from .endpoint_manager import Endpoint, EndpointManager
from .requestor import Requestor
Expand Down Expand Up @@ -65,6 +64,9 @@ def __init__(
api_version=None,
model_engine_map=None,
) -> None:
self._sync_client = None
self._async_client = None

# convert string to enum
if isinstance(mode, str):
mode = mode.upper()
Expand All @@ -73,16 +75,16 @@ def __init__(
mode = ClientMode[mode]
elif not isinstance(mode, ClientMode):
raise TypeError("Invalid client mode specified")

if mode == ClientMode.SYNC or mode == ClientMode.BOTH:
# lazy import
import requests
self._sync_client = requests.Session()
else:
self._sync_client = None

if mode == ClientMode.ASYNC or mode == ClientMode.BOTH:
# lazy import
import httpx
self._async_client = httpx.AsyncClient()
else:
self._async_client = None

self.api_base = api_base
self.api_key = api_key
Expand Down
11 changes: 9 additions & 2 deletions src/handyllm/prompt_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,16 @@

class PromptConverter:

role_keys = ['system', 'user', 'assistant']

def __init__(self):
self.substitute_map = {}

@property
def split_pattern(self):
# build a regex pattern to split the prompt by role keys
return r'^\$(' + '|'.join(self.role_keys) + r')\$$'

def read_substitute_content(self, path: str):
# 从文本文件读取所有prompt中需要替换的内容
with open(path, 'r', encoding='utf-8') as fin:
Expand All @@ -24,11 +31,11 @@ def raw2chat(self, raw_prompt: str):

# convert plain text to chat format
chat = []
blocks = re.split(r'(\$\w+\$)', raw_prompt)
blocks = re.split(self.split_pattern, raw_prompt, flags=re.MULTILINE)
for idx in range(1, len(blocks), 2):
key = blocks[idx]
value = blocks[idx+1]
chat.append({"role": key[1:-1], "content": value.strip()})
chat.append({"role": key, "content": value.strip()})

return chat

Expand Down
8 changes: 5 additions & 3 deletions src/handyllm/requestor.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from __future__ import annotations
from typing import Union, TYPE_CHECKING
import asyncio
import logging
import json
from typing import Union
import requests
import httpx
import time
if TYPE_CHECKING:
import requests
import httpx

from ._constants import _API_TYPES_AZURE

Expand Down
4 changes: 2 additions & 2 deletions tests/test_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
from handyllm import PromptConverter
converter = PromptConverter()

converter.read_substitute_content('../assets/substitute.txt') # read substitute map
converter.read_substitute_content('./assets/substitute.txt') # read substitute map

# chat can be used as the message parameter for OpenAI API
chat = converter.rawfile2chat('../assets/prompt.txt') # variables are substituted according to map
chat = converter.rawfile2chat('./assets/prompt.txt') # variables are substituted according to map
# print(json.dumps(chat, indent=2))
print(converter.chat2raw(chat))
print('-----')
Expand Down

0 comments on commit e07ad47

Please sign in to comment.