From ebeba91140626ac27a41c5ec3952525855c2456d Mon Sep 17 00:00:00 2001 From: biswaroop1547 Date: Fri, 28 Jul 2023 11:38:18 +0000 Subject: [PATCH] update: build version --- cht-llama-v2/build.sh | 2 +- cht-llama-v2/models.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cht-llama-v2/build.sh b/cht-llama-v2/build.sh index 523c5f5..935850a 100755 --- a/cht-llama-v2/build.sh +++ b/cht-llama-v2/build.sh @@ -1,6 +1,6 @@ #!/bin/bash set -e -export VERSION=1.0.0 +export VERSION=1.0.1 source "$(dirname "${BASH_SOURCE[0]}")/../utils.sh" build_gpu ghcr.io/premai-io/chat-llama-2-7b-gpu llama-2-7b-hf ${@:1} diff --git a/cht-llama-v2/models.py b/cht-llama-v2/models.py index 2a09098..4fd6eca 100644 --- a/cht-llama-v2/models.py +++ b/cht-llama-v2/models.py @@ -32,7 +32,6 @@ def embeddings(cls, text) -> None: pass @abstractmethod - @staticmethod def stitch_prompt(messages: list) -> str: pass @@ -68,7 +67,10 @@ def generate( do_sample=kwargs.get("do_sample", True), stop_sequence=stop[0] if stop else None, stopping_criteria=cls.stopping_criteria(stop, prompt, cls.tokenizer), - )[0]["generated_text"].rstrip(stop[0] if stop else "") + )[0]["generated_text"] + .rstrip(stop[0] if stop else "") + .rsplit(".", 1)[0] + .strip() ] @classmethod