Skip to content

Commit

Permalink
Merge pull request #790 from thepetk/ft/add_bearer_auth_support
Browse files Browse the repository at this point in the history
Add Bearer (token) authentication support to recipes
  • Loading branch information
rhatdan authored Oct 2, 2024
2 parents 1e3999d + 9bd66e0 commit 33f0d6c
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 10 deletions.
8 changes: 6 additions & 2 deletions recipes/audio/audio_to_text/app/whisper_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,17 @@
st.markdown("Upload an audio file you wish to have translated")
endpoint = os.getenv("MODEL_ENDPOINT", default="http://0.0.0.0:8001")
endpoint = f"{endpoint}/inference"
endpoint_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
request_kwargs = {}
if endpoint_bearer is not None:
request_kwargs["headers"] = {"Authorization": f"Bearer {endpoint_bearer}"}
audio = st.file_uploader("", type=["wav","mp3","mp4","flac"], accept_multiple_files=False)
# read audio file
if audio:
audio_bytes = audio.read()
st.audio(audio_bytes, format='audio/wav', start_time=0)
files = {'file': audio_bytes}
response = requests.post(endpoint, files=files)
request_kwargs["files"] = {'file': audio_bytes}
response = requests.post(endpoint, **request_kwargs)
response_json = response.json()
st.subheader(f"Translated Text")
st.text_area(label="", value=response_json['text'], height=300)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,11 @@

st.title("🕵️‍♀️ Object Detection")
endpoint =os.getenv("MODEL_ENDPOINT", default = "http://0.0.0.0:8000")
endpoint_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
headers = {"accept": "application/json",
"Content-Type": "application/json"}
if endpoint_bearer:
headers["Authorization"] = f"Bearer {endpoint_bearer}"
image = st.file_uploader("Upload Image")
window = st.empty()

Expand Down
10 changes: 7 additions & 3 deletions recipes/natural_language_processing/chatbot/app/chatbot_ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
model_service = os.getenv("MODEL_ENDPOINT",
"http://localhost:8001")
model_service = f"{model_service}/v1"
model_service_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
request_kwargs = {}
if model_service_bearer is not None:
request_kwargs = {"headers": {"Authorization": f"Bearer {model_service_bearer}"}}

@st.cache_resource(show_spinner=False)
def checking_model_service():
Expand All @@ -20,8 +24,8 @@ def checking_model_service():
ready = False
while not ready:
try:
request_cpp = requests.get(f'{model_service}/models')
request_ollama = requests.get(f'{model_service[:-2]}api/tags')
request_cpp = requests.get(f'{model_service}/models', **request_kwargs)
request_ollama = requests.get(f'{model_service[:-2]}api/tags', **request_kwargs)
if request_cpp.status_code == 200:
server = "Llamacpp_Python"
ready = True
Expand All @@ -37,7 +41,7 @@ def checking_model_service():

def get_models():
try:
response = requests.get(f"{model_service[:-2]}api/tags")
response = requests.get(f"{model_service[:-2]}api/tags", **request_kwargs)
return [i["name"].split(":")[0] for i in
json.loads(response.content)["models"]]
except:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@

model_service = os.getenv("MODEL_ENDPOINT", "http://localhost:8001")
model_service = f"{model_service}/v1"
model_service_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
request_kwargs = {}
if model_service_bearer is not None:
request_kwargs = {"headers": {"Authorization": f"Bearer {model_service_bearer}"}}

@st.cache_resource(show_spinner=False)
def checking_model_service():
Expand All @@ -18,7 +22,7 @@ def checking_model_service():
ready = False
while not ready:
try:
request = requests.get(f'{model_service}/models')
request = requests.get(f'{model_service}/models', **request_kwargs)
if request.status_code == 200:
ready = True
except:
Expand Down
3 changes: 2 additions & 1 deletion recipes/natural_language_processing/rag/app/rag_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

model_service = os.getenv("MODEL_ENDPOINT","http://0.0.0.0:8001")
model_service = f"{model_service}/v1"
model_service_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
model_name = os.getenv("MODEL_NAME", "")
chunk_size = os.getenv("CHUNK_SIZE", 150)
embedding_model = os.getenv("EMBEDDING_MODEL","BAAI/bge-base-en-v1.5")
Expand Down Expand Up @@ -75,7 +76,7 @@ def read_file(file):


llm = ChatOpenAI(base_url=model_service,
api_key="EMPTY",
api_key="EMPTY" if model_service_bearer is None else model_service_bearer,
model=model_name,
streaming=True,
callbacks=[StreamlitCallbackHandler(st.container(),
Expand Down
10 changes: 7 additions & 3 deletions recipes/natural_language_processing/summarizer/app/summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
model_service = os.getenv("MODEL_ENDPOINT",
"http://localhost:8001")
model_service = f"{model_service}/v1"
model_service_bearer = os.getenv("MODEL_ENDPOINT_BEARER")
request_kwargs = {}
if model_service_bearer is not None:
request_kwargs["headers"] = {"Authorization": f"Bearer {model_service_bearer}"}

@st.cache_resource(show_spinner=False)
def checking_model_service():
Expand All @@ -23,7 +27,7 @@ def checking_model_service():
ready = False
while not ready:
try:
request = requests.get(f'{model_service}/models')
request = requests.get(f'{model_service}/models', **request_kwargs)
if request.status_code == 200:
ready = True
except:
Expand Down Expand Up @@ -53,8 +57,8 @@ def chunk_text(text):
text_chunks = text_splitter.create_documents([text])
for chunk in text_chunks:
chunk = chunk.page_content
count = requests.post(f"{model_service[:-2]}extras/tokenize/count",
json={"input":chunk}).content
chunk_kwargs = request_kwargs | {"json": {"input": chunk}}
count = requests.post(f"{model_service[:-2]}/v1/extras/tokenize/count", **chunk_kwargs).content
count = json.loads(count)["count"]
if count >= 2048:
split_append_chunk(chunk, chunks)
Expand Down

0 comments on commit 33f0d6c

Please sign in to comment.