Skip to content

Commit

Permalink
Merge branch 'master' into features/worker
Browse files Browse the repository at this point in the history
  • Loading branch information
Gabriele Panico committed May 27, 2024
2 parents f651faa + cd5fa6c commit 084ed8e
Show file tree
Hide file tree
Showing 8 changed files with 51 additions and 44 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-05-20]

### 0.1.17
- added: PIENCONE_TYPE = "serverless|pod"

## [2024-05-18]

### 0.1.16
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ pip install -e .

```commandline
export REDIS_URL="redis://localhost:6379/0"
export PINECONE_TYPE="serverless|pod"
export PINECONE_API_KEY="pinecone api key"
export PINECONE_TEXT_KEY="pinecone field for text - default text in pod content"
export PINECONE_INDEX="pinecone index name"
Expand Down
2 changes: 1 addition & 1 deletion entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,5 +53,5 @@ fi

echo "start gunicorn with $ENVIRON --workers $WORKERS --timeout $TIMEOUT --max-requests $MAXREQUESTS --max-requests-jitter $MAXRJITTER --graceful-timeout $GRACEFULTIMEOUT"

gunicorn --bind 0.0.0.0:8000 --workers $WORKERS --timeout $TIMEOUT --max-requests $MAXREQUESTS --max-requests-jitter $MAXRJITTER --graceful-timeout $GRACEFULTIMEOUT --env ENVIRON="$environment" --log-config-json log_conf.json --worker-class uvicorn.workers.UvicornWorker tilellm.__main__:app
gunicorn --bind 0.0.0.0:8000 --workers $WORKERS --timeout $TIMEOUT --max-requests $MAXREQUESTS --max-requests-jitter $MAXRJITTER --graceful-timeout $GRACEFULTIMEOUT --env PINECONE_TYPE="$environment" --log-config-json log_conf.json --worker-class uvicorn.workers.UvicornWorker tilellm.__main__:app

4 changes: 3 additions & 1 deletion log_conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,13 @@
"stream": "ext://sys.stderr"
}
},

"root": {
"level": "INFO",
"handlers": [
"stdout","stderr"
]
],
"propagate": false
},
"formatters": {
"simple": {
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.1.16"
version = "0.1.17"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <gianluca.lorenzo@gmail.com>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand Down
72 changes: 36 additions & 36 deletions tilellm/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
# args = parser.parse_args()

ENVIRONMENTS = {
'dev': '.environ',
'serverless': '.environ',
'prod': '.environ.prod',
}

Expand All @@ -55,7 +55,7 @@
logger = logging.getLogger(__name__)


environment = os.environ.get("ENVIRON", "dev")
environment = os.environ.get("PINECONE_TYPE", "serverless")
# environment = "prod"
load_dotenv(ENVIRONMENTS.get(environment) or '.environ')

Expand Down Expand Up @@ -270,18 +270,18 @@ async def create_scrape_item_single(item: ItemSingle, redis_client: aioredis.cli

logger.info(f"webhook: {webhook}, token: {token}")

if webhook:
res = PineconeIndexingResult(id=item.id, status=200)
try:
async with aiohttp.ClientSession() as session:
res = await session.post(webhook,
json=res.model_dump(exclude_none=True),
headers={"Content-Type": "application/json",
"X-Auth-Token": token})
logger.info(f"200 {await res.json()}")
except Exception as ewh:
logger.error(ewh)
pass
# if webhook:
# res = PineconeIndexingResult(id=item.id, status=200)
# try:
# async with aiohttp.ClientSession() as session:
# res = await session.post(webhook,
# json=res.model_dump(exclude_none=True),
# headers={"Content-Type": "application/json",
# "X-Auth-Token": token})
# logger.info(f"200 {await res.json()}")
# except Exception as ewh:
# logger.error(ewh)
# pass

pc_result = await add_pc_item(item)
# import datetime
Expand All @@ -299,20 +299,20 @@ async def create_scrape_item_single(item: ItemSingle, redis_client: aioredis.cli
scrape_status_response.model_dump_json(),
ex=expiration_in_seconds)

logger.debug(f"End {add_to_queue}")
if webhook:
try:
async with aiohttp.ClientSession() as session:
res = await session.post(webhook,
json=pc_result.model_dump(exclude_none=True),
headers={"Content-Type": "application/json",
"X-Auth-Token": token})
logger.info(f"300 {await res.json()}")
except Exception as ewh:
logger.error(ewh)
pass

return JSONResponse(content={"message": f"Item {item.id} created successfully"})
# logger.debug(f"End {add_to_queue}")
# if webhook:
# try:
# async with aiohttp.ClientSession() as session:
# res = await session.post(webhook,
# json=pc_result.model_dump(exclude_none=True),
# headers={"Content-Type": "application/json",
# "X-Auth-Token": token})
# logger.info(f"300 {await res.json()}")
# except Exception as ewh:
# logger.error(ewh)
# pass

return JSONResponse(content=pc_result.model_dump(exclude_none=True)) # {"message": f"Item {item.id} created successfully"})

except Exception as e:
scrape_status_response = ScrapeStatusResponse(status_message="Error",
Expand All @@ -324,14 +324,14 @@ async def create_scrape_item_single(item: ItemSingle, redis_client: aioredis.cli

logger.error(f"Error {add_to_queue}")
import traceback
if webhook:
res = PineconeIndexingResult(id=item.id, status=400, error=repr(e))
async with aiohttp.ClientSession() as session:
response = await session.post(webhook, json=res.model_dump(exclude_none=True),
headers={"Content-Type": "application/json", "X-Auth-Token": token})
logger.error(response)
logger.error(f"{await response.json()}")
logger.error(f"Error {e}, webhook: {webhook}")
# if webhook:
# res = PineconeIndexingResult(id=item.id, status=400, error=repr(e))
# async with aiohttp.ClientSession() as session:
# response = await session.post(webhook, json=res.model_dump(exclude_none=True),
# headers={"Content-Type": "application/json", "X-Auth-Token": token})
# logger.error(response)
# logger.error(f"{await response.json()}")
# logger.error(f"Error {e}, webhook: {webhook}")
traceback.print_exc()
logger.error(e)
raise HTTPException(status_code=400, detail=repr(e))
Expand Down
1 change: 1 addition & 0 deletions tilellm/controller/openai_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@ async def delete_id_from_namespace(metadata_id:str, namespace:str):
try:
return await delete_pc_ids_namespace(metadata_id=metadata_id, namespace=namespace)
except Exception as ex:
logger.error(ex)
raise ex


Expand Down
8 changes: 3 additions & 5 deletions tilellm/store/pinecone_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ async def add_pc_item(item):
try:
await delete_pc_ids_namespace(metadata_id=metadata_id, namespace=namespace)
except Exception as ex:
logger.error(ex)
logger.warning(ex)
pass

emb_dimension = get_embeddings_dimension(embedding)
Expand Down Expand Up @@ -196,9 +196,7 @@ async def delete_pc_ids_namespace(metadata_id: str, namespace: str):
offset += len(ids)

except Exception as ex:

logger.error(ex)

# logger.error(ex)
raise ex


Expand Down Expand Up @@ -456,7 +454,7 @@ async def create_pc_index(embeddings, emb_dimension):
else:
logger.debug(f'Create index {const.PINECONE_INDEX} and embeddings ...')

if os.environ.get("ENVIRON") == "dev":
if os.environ.get("PINECONE_TYPE") == "serverless":
pc.create_index(const.PINECONE_INDEX,
dimension=emb_dimension,
metric='cosine',
Expand Down

0 comments on commit 084ed8e

Please sign in to comment.