Skip to content
This repository has been archived by the owner on Dec 6, 2023. It is now read-only.

Commit

Permalink
update: stitch_prompt func should be defined by subclass
Browse files Browse the repository at this point in the history
  • Loading branch information
biswaroop1547 committed Jul 28, 2023
1 parent 7334409 commit 72f86e3
Showing 1 changed file with 25 additions and 20 deletions.
45 changes: 25 additions & 20 deletions cht-llama-v2/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,28 +31,10 @@ def generate(
def embeddings(cls, text) -> None:
pass

@abstractmethod
@staticmethod
def stitch_prompt(messages: list) -> str:
system_prompt_template = "<s>[INST] <<SYS>>\n{}\n<</SYS>>\n\n" # noqa
default_system_text = "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information." # noqa
user_prompt_template = "{} [/INST] " # noqa
assistant_prompt_template = "{} </s><s>[INST] " # noqa

system_prompt, chat_prompt = "", ""
for message in messages:
role = message["role"]
content = message["content"]
if role == "system":
system_prompt = system_prompt_template.format(content)
elif role == "user":
chat_prompt += user_prompt_template.format(content)
elif role == "assistant":
chat_prompt += assistant_prompt_template.format(content)

if not system_prompt:
system_prompt = system_prompt_template.format(default_system_text)

return system_prompt + chat_prompt
pass


class LlamaBasedModel(ChatModel):
Expand Down Expand Up @@ -110,3 +92,26 @@ def get_model(cls) -> Pipeline:
)
cls.stopping_criteria = LlamaStoppingCriteria
return cls.model

@staticmethod
def stitch_prompt(messages: list) -> str:
system_prompt_template = "<s>[INST] <<SYS>>\n{}\n<</SYS>>\n\n" # noqa
default_system_text = "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information." # noqa
user_prompt_template = "{} [/INST] " # noqa
assistant_prompt_template = "{} </s><s>[INST] " # noqa

system_prompt, chat_prompt = "", ""
for message in messages:
role = message["role"]
content = message["content"]
if role == "system":
system_prompt = system_prompt_template.format(content)
elif role == "user":
chat_prompt += user_prompt_template.format(content)
elif role == "assistant":
chat_prompt += assistant_prompt_template.format(content)

if not system_prompt:
system_prompt = system_prompt_template.format(default_system_text)

return system_prompt + chat_prompt

0 comments on commit 72f86e3

Please sign in to comment.