diff --git a/.github/workflows/python_tag_and_deploy.yml b/.github/workflows/python_tag_and_deploy.yml new file mode 100644 index 00000000..fa46dd33 --- /dev/null +++ b/.github/workflows/python_tag_and_deploy.yml @@ -0,0 +1,89 @@ +name: Ahnlich Python Client Tag and Deploy + +on: + pull_request: + branches: ["main"] + types: + - closed + +jobs: + check_version_changes_and_tag: + if: github.event.pull_request.merged == true + runs-on: ubuntu-latest + outputs: + client_version: ${{ steps.get_version.outputs.CLIENT_VERSION }} + tag_version: ${{ steps.get_version.outputs.TAG_VERSION }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check for changes in .bumpversion.cfg and CLIENT_VERSION + id: check_version_changes + working-directory: ./sdk/ahnlich-client-py + run: | + if git diff HEAD^ HEAD --exit-code .bumpversion.cfg CLIENT_VERSION MSG_TAG > /dev/null; then + echo "No changes in .bumpversion.cfg or CLIENT_VERSION." + echo "CLIENT_VERSION_CHANGED=true" >> $GITHUB_OUTPUT + else + echo "Changes detected in .bumpversion.cfg or CLIENT_VERSION." + echo "CLIENT_VERSION_CHANGED=true" >> $GITHUB_OUTPUT + fi + + - name: Get client version + id: get_version + working-directory: ./sdk/ahnlich-client-py + if: steps.check_version_changes.outputs.CLIENT_VERSION_CHANGED == 'true' + run: | + CLIENT_VERSION=$(cat CLIENT_VERSION) + echo "CLIENT_VERSION=${CLIENT_VERSION}" >> $GITHUB_OUTPUT + echo "TAG_VERSION=client/py/${CLIENT_VERSION}" >> $GITHUB_OUTPUT + + + - name: Set tag message + id: set_tag_message + working-directory: ./sdk/ahnlich-client-py + if: steps.check_version_changes.outputs.CLIENT_VERSION_CHANGED == 'true' + run: | + echo "TAG_MESSAGE=$(cat MSG_TAG)" >> $GITHUB_OUTPUT + + - name: Create new tag + if: steps.check_version_changes.outputs.CLIENT_VERSION_CHANGED == 'true' + working-directory: ./sdk/ahnlich-client-py + run: | + git config --global user.name '${{github.actor}}' + git config --global user.email '${{github.actor}}@users.noreply.github.com' + git tag -a "${{ steps.get_version.outputs.TAG_VERSION }}" -m "${{steps.set_tag_message.outputs.TAG_MESSAGE}}" + git push origin "${{ steps.get_version.outputs.TAG_VERSION }}" + + + deploy_tag: + runs-on: ubuntu-latest + needs: check_version_changes_and_tag + if: ${{ needs.check_version_changes_and_tag.result == 'success' }} + steps: + + - name: Checkout tag + uses: actions/checkout@v4 + with: + ref: ${{needs.check_version_changes_and_tag.outputs.tag_version}} + - uses: actions/setup-python@v5 + with: + python-version: 3.11 + - name: Run image + uses: abatilo/actions-poetry@v3 + with: + poetry-version: 1.7.0 + + - name: Deploy using tag + working-directory: ./sdk/ahnlich-client-py + run: | + echo "Deploying tag ${{needs.check_version_changes_and_tag.outputs.tag_version}}" + # Add your deployment commands here + poetry config repositories.test-pypi https://test.pypi.org/legacy/ + echo "${{needs.check_version_changes_and_tag.outputs.tag_version}}" > CLIENT_VERSION + poetry config pypi-token.test-pypi ${{ secrets.TEST_PYPI_TOKEN }} + poetry build && poetry publish -r test-pypi + + diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 45ea265a..14284c11 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,14 +1,16 @@ -name: Rust Test +name: Ahnlich TestSuite on: push: branches: ["main"] paths: - ahnlich/** + - sdk/** pull_request: branches: ["main"] paths: - ahnlich/** + - sdk/** env: CARGO_TERM_COLOR: always @@ -43,3 +45,62 @@ jobs: run: | make test + + run-python-tests: + if: ${{ always() && needs.build.result == 'success' }} + runs-on: ubuntu-latest + needs: ["build"] + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.11 + - name: Run image + uses: abatilo/actions-poetry@v3 + with: + poetry-version: 1.7.0 + - name: View poetry --help + run: poetry --help + - name: Cache using poetry lock + uses: actions/cache@v4 + with: + path: ./.venv + key: venv-${{ hashFiles('poetry.lock') }} + + - name: Installing poetry dependencies + working-directory: ./sdk/ahnlich-client-py + run: poetry install + + - name: Set up Rust cache + uses: Swatinem/rust-cache@v2 + with: + workspaces: ahnlich + + - name: Run Ahnlich DB Server + working-directory: ./ahnlich + run: | + cargo run --bin ahnlich-db run --port 1349 & + echo $! > server_pid.txt + + + - name: Wait for Ahnlich DB to be ready + working-directory: ./ahnlich + run: | + chmod +x wait-for-it.sh + ./wait-for-it.sh localhost 1349 -t 30 + + - name: Run Python Client Tests + working-directory: ./sdk/ahnlich-client-py + env: + AHNLICH_DB_HOST: "127.0.0.1" + AHNLICH_DB_PORT: 1349 + run: | + poetry run pytest + + - name: Stop Ahnlich DB Server + working-directory: ./ahnlich + run: | + kill $(cat server_pid.txt) + + diff --git a/README.md b/README.md index 48c2df07..32d82ef7 100644 --- a/README.md +++ b/README.md @@ -10,3 +10,29 @@ In-memory vector key value store ## Architecture ![Architecture Diagram](assets/ahnlich.jpeg) + + + + +## Development + +### Using Spec documents to interact with Ahnlich DB + +To generate the spec documents, run +```bash +cargo run --bin typegen generate +``` +It is worth noting that any changes to the types crate, requires you to run the above command. This helps keep our spec document and types crate in sync. + +To Convert spec documents to a programming language, run: + +```bash + cargo run --bin typegen create-client +``` +Available languages are: +- python +- golang +- typescript. + +In order to communicate effectively with the ahnlich db, you would have to extend the bincode serialization protocol automatically provided by `serde_generate`. +Your message(in bytes) should be serialized and deserialized in the following format => `AHNLICH_HEADERS` + `VERSION` + `QUERY/SERVER_RESPONSE`. Bytes are `Little Endian`. \ No newline at end of file diff --git a/ahnlich/Makefile b/ahnlich/Makefile index 2faa5d12..4669ddbf 100644 --- a/ahnlich/Makefile +++ b/ahnlich/Makefile @@ -3,6 +3,18 @@ # # change the 20 value in printf to adjust width # # Use ' ## some comment' behind a command and it will be added to the help message automatically + +# Define paths to the project files +RUST_PROJECT_DIR := ../ahnlich +RUST_TYPES_NAME := types +PYTHON_PROJECT_DIR := ../sdk/ahnlich-client-py +CARGO_TOML := $(RUST_PROJECT_DIR)/Cargo.toml +PYPOETRY_TOML := $(PYTHON_PROJECT_DIR)/pyproject.toml + +# Default version bump rule +BUMP_RULE := patch + + help: ## Show this help message @awk 'BEGIN {FS = ":.*?## "}; /^[a-zA-Z0-9_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | grep -v '^help:.*?## ' @@ -21,7 +33,23 @@ check: ## cargo check test: ## cargo test cargo nextest run --no-capture -all: format check test clippy ## format, clip and test code + +bump-protocol-version: ## Bump project versions. Rules for bumpversion: patch, minor, major. + @echo "Bumping Rust project version to $${RULE:-$(BUMP_RULE)}" + @cd $(RUST_PROJECT_DIR) && cargo set-version --bump $(BUMP_RULE) --package $(RUST_TYPES_NAME) + @echo "Rust project version bumped to $(BUMP_RULE)" + @echo "Bumping Python project version with rule $(BUMP_RULE)" + @cd $(PYTHON_PROJECT_DIR) && poetry version $(BUMP_RULE) + @echo "Python project version bumped using rule $(BUMP_RULE)" + +bump-python-version: ## Bump python client versions. Rules for bump-python-version: patch, minor, major. + @echo "Bumping Python client version with rule $(BUMP_RULE)" + @cd $(PYTHON_PROJECT_DIR) && poetry run bumpversion $(BUMP_RULE) + @echo "Python client version bumped using rule $(BUMP_RULE)" + + + +all: format check test clippy bumpversion bump-python-version ## format, clip, test code, and bumpversions # --------------Configuration------------- # diff --git a/ahnlich/db/Cargo.toml b/ahnlich/db/Cargo.toml index 955726cb..8e4611ef 100644 --- a/ahnlich/db/Cargo.toml +++ b/ahnlich/db/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [[bin]] -name = "anlich-db" +name = "ahnlich-db" path = "src/main.rs" [lib] diff --git a/ahnlich/db/src/lib.rs b/ahnlich/db/src/lib.rs index f7dc44f0..f5f2f568 100644 --- a/ahnlich/db/src/lib.rs +++ b/ahnlich/db/src/lib.rs @@ -228,10 +228,12 @@ impl ServerTask { self.reader.read_exact(&mut data).await?; // TODO: Add trace here to catch whenever queries could not be deserialized at all if let Ok(queries) = ServerQuery::deserialize(&data) { + tracing::debug!("Got Queries {:?}", queries); // TODO: Pass in store_handler and use to respond to queries let results = self.handle(queries.into_inner()); if let Ok(binary_results) = results.serialize() { self.reader.get_mut().write_all(&binary_results).await?; + tracing::debug!("Sent Response of length {}, {:?}", binary_results.len(), binary_results); } } else { tracing::error!("{}", self.prefix_log("Could not deserialize client message as server query")); diff --git a/ahnlich/typegen/src/main.rs b/ahnlich/typegen/src/main.rs index 4d7c5712..5f48ed3e 100644 --- a/ahnlich/typegen/src/main.rs +++ b/ahnlich/typegen/src/main.rs @@ -24,7 +24,6 @@ fn main() -> Result<(), Box> { { panic!("Cannot generate type specs into invalid directory") } - let output_dir = &config .output_dir .to_owned() @@ -61,8 +60,6 @@ fn main() -> Result<(), Box> { .to_owned() .unwrap_or(std::path::PathBuf::from(SDK_PATH)); - std::fs::create_dir_all(output_dir).expect("Failed to create sdk directory"); - generate_language_definition(config.language, input_dir, output_dir); println!("Language type definition generated"); diff --git a/ahnlich/typegen/src/tracers/mod.rs b/ahnlich/typegen/src/tracers/mod.rs index d35ba7a1..f36992d4 100644 --- a/ahnlich/typegen/src/tracers/mod.rs +++ b/ahnlich/typegen/src/tracers/mod.rs @@ -96,7 +96,7 @@ struct OutputFile<'a> { impl<'a> OutputFile<'a> { fn generate(&self, config: &CodeGeneratorConfig, registry: &Registry) { let extension: &str = (&self.language).into(); - let output_dir = self.output_dir.join(extension); + let output_dir = self.output_dir.join(format!("ahnlich-client-{extension}")); let _ = std::fs::create_dir_all(&output_dir); let output_file = output_dir.join(format!("{}.{extension}", self.output_file)); @@ -106,6 +106,7 @@ impl<'a> OutputFile<'a> { let _ = match self.language { Language::Python => { + let output_dir = output_dir.join(format!("ahnlich_client_{extension}")); let installer = serde_generate::python3::Installer::new(output_dir, None); installer.install_bincode_runtime().unwrap(); installer.install_serde_runtime().unwrap(); diff --git a/ahnlich/typegen/src/tracers/query.rs b/ahnlich/typegen/src/tracers/query.rs index 3981d71b..7e5a170c 100644 --- a/ahnlich/typegen/src/tracers/query.rs +++ b/ahnlich/typegen/src/tracers/query.rs @@ -9,7 +9,7 @@ use types::similarity::Algorithm; use types::{ keyval::{StoreKey, StoreName}, metadata::{MetadataKey, MetadataValue}, - query::Query, + query::{Query, ServerQuery}, }; pub fn trace_query_enum() -> Registry { @@ -78,6 +78,8 @@ pub fn trace_query_enum() -> Registry { condition: test_predicate_condition.clone(), }; + let server_query = ServerQuery::from_queries(&[deletepred_variant.clone(), set_query.clone()]); + let _ = tracer .trace_value(&mut samples, &create_store) .expect("Error tracing the variant"); @@ -100,6 +102,10 @@ pub fn trace_query_enum() -> Registry { .trace_value(&mut samples, &deletepred_variant) .expect("Error tracing the deletepred variant"); + let _ = tracer + .trace_value(&mut samples, &server_query) + .expect("Error tracing the server_query"); + // trace enums to fix missing variants error // // Also trace each enum type separately to fix any `MissingVariants` error. @@ -121,6 +127,11 @@ pub fn trace_query_enum() -> Registry { .inspect_err(|err| println!("Failed to parse type {}", err.explanation())) .unwrap(); + let _ = tracer + .trace_type::(&samples) + .inspect_err(|err| println!("Failed to parse type {}", err.explanation())) + .unwrap(); + tracer .registry() .expect("Failed to create registry for query") diff --git a/ahnlich/typegen/src/tracers/server_response.rs b/ahnlich/typegen/src/tracers/server_response.rs index 5d15ae9a..c71ff055 100644 --- a/ahnlich/typegen/src/tracers/server_response.rs +++ b/ahnlich/typegen/src/tracers/server_response.rs @@ -7,7 +7,7 @@ use types::similarity::Similarity; use types::{ keyval::{StoreKey, StoreName}, metadata::{MetadataKey, MetadataValue}, - server::{ConnectedClient, ServerInfo, ServerResponse, ServerType, StoreInfo, StoreUpsert}, + server::{ConnectedClient, ServerInfo, ServerResponse, ServerResult, ServerType, StoreInfo, StoreUpsert}, version::Version, }; @@ -100,6 +100,16 @@ pub fn trace_server_response_enum() -> Registry { .inspect_err(|err| println!("Failed to parse type {}", err.explanation())) .unwrap(); + let _ = tracer + .trace_type::>(&samples) + .inspect_err(|err| println!("Failed to parse type {}", err.explanation())) + .unwrap(); + + let _ = tracer + .trace_type::(&samples) + .inspect_err(|err| println!("Failed to parse type {}", err.explanation())) + .unwrap(); + tracer .registry() .expect("Failed to create registry for server response") diff --git a/ahnlich/wait-for-it.sh b/ahnlich/wait-for-it.sh new file mode 100644 index 00000000..84ea5443 --- /dev/null +++ b/ahnlich/wait-for-it.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +# Use this script to wait for a service to be ready + +host=$1 +shift +port=$1 +shift +cmd="$@" + +until nc -z -v -w30 $host $port; do + >&2 echo "Waiting for $host:$port to be available..." + sleep 5 +done + +>&2 echo "$host:$port is available" +# exec $cmd diff --git a/sdk/ahnlich-client-py/.bumpversion.cfg b/sdk/ahnlich-client-py/.bumpversion.cfg new file mode 100644 index 00000000..4efd5f4a --- /dev/null +++ b/sdk/ahnlich-client-py/.bumpversion.cfg @@ -0,0 +1,5 @@ +[bumpversion] +current_version = 0.1.0 +commit = True + +[bumpversion:file:CLIENT_VERSION] diff --git a/sdk/ahnlich-client-py/.gitignore b/sdk/ahnlich-client-py/.gitignore new file mode 100644 index 00000000..3ed7d4c0 --- /dev/null +++ b/sdk/ahnlich-client-py/.gitignore @@ -0,0 +1,216 @@ +# Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode,vim +# Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode,vim + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +.python-version +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# End of https://www.toptal.com/developers/gitignore/api/python,visualstudiocode,vim diff --git a/sdk/ahnlich-client-py/CLIENT_VERSION b/sdk/ahnlich-client-py/CLIENT_VERSION new file mode 100644 index 00000000..6e8bf73a --- /dev/null +++ b/sdk/ahnlich-client-py/CLIENT_VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/sdk/ahnlich-client-py/MSG_TAG b/sdk/ahnlich-client-py/MSG_TAG new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/sdk/ahnlich-client-py/MSG_TAG @@ -0,0 +1 @@ + diff --git a/sdk/ahnlich-client-py/README.md b/sdk/ahnlich-client-py/README.md new file mode 100644 index 00000000..d86f11dd --- /dev/null +++ b/sdk/ahnlich-client-py/README.md @@ -0,0 +1,372 @@ +## Ahnlich Client PY + +A Python client that interacts with both ahnlich DB and AI + + +[![Ahnlich TestSuite](https://github.com/deven96/ahnlich/actions/workflows/test.yml/badge.svg)](https://github.com/deven96/ahnlich/actions/workflows/test.yml) +[![Ahnlich Python Client Tag and Deploy](https://github.com/deven96/ahnlich/actions/workflows/python_tag_and_deploy.yml/badge.svg)](https://github.com/deven96/ahnlich/actions/workflows/python_tag_and_deploy.yml) + +## Usage Overview + +The following topics are covered: +* [Installation](#installation) +* [Package Information](#package-information) +* [Server Response](#server-response) +* [Initialization](#initialization) + * [Protocol](#protocol) + * [Client](#client) + +* [Requests](#requests) + * [Ping](#ping) + * [Info Server](#info-server) + * [List Connected Clients](#list-connected-clients) + * [List Stores](#list-stores) + * [Create Store](#create-store) + * [Set](#set) + * [Drop Store](#drop-store) + * [Get Key](#get-key) + * [Get By Predicate](#get-by-predicate) + * [Create Index](#create-index) + * [Drop Index](#drop-index) + * [Delete Key](#delete-key) + * [Delete Predicate](#delete-predicate) +* [Bulk Requests](#bulk-requests) +* [How to Deploy to Artifactory](#deploy-to-artifactory) +* [Type Meanings](#type-meanings) +* [Change Log](#change-log) + +## Installation + +- Using Poetry +```bash +poetry add ahnlich-client-py +``` +- Using pip +```bash +pip3 install ahnlich-client-py +``` + +## Package Information +The ahnlich client has some noteworthy modules that should provide some context +- Bincode +- Serde Types +- Serde Binary + +The above mentioned are classes generated by `serde_generate` to help represent the primitive rust types and provide a base bincode serialization capabilities + +- Query: Generated from the spec document, contains all the types used by to send a request to the ahnlich database +- Server Response: Generated from the spec document, contains all the possible server response. +- Builders: +- Exceptions: Possible Client Exceptions +- Libs: Contains helpers, such as `create_store_key` + + +## Server Response + +All query types have an associating server response, all which can be found +```py +from ahnlich_client_py import server_response +``` + +## Initialization + +### Protocol + +```py +from ahnlich_client_py import AhnlichProtocol +pro = protocol.AhnlichProtocol(address="127.0.0.1", port=1369) +``` +### Client + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) +``` + + +## Requests + +### Ping + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.ping() +``` + +### Info Server + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.info_server() +``` + +### List Connected Clients + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.list_clients() +``` + +### List Stores + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.list_stores() +``` + +### Create Store + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.create_store( + store_name = "test store", + dimension = 5, + create_predicates = [ + "job" + ], + error_if_exists=True +) +``` +Once store dimension is fixed, all `store_keys` must confirm with said dimension. +Note we only accept 1 dimensional arrays/vectors of length N. +Store dimensions is a one dimensional array of length N + + +### Set +```py +from libs import create_store_key +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +store_key = create_store_key(data=[5.0, 3.0, 4.0, 3.9, 4.9]) +store_value = {"rank": "chunin"} + + +response = client.set( + store_name = "test store", + inputs=[(store_key, store_value)] +) +``` + + +### Drop store +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.drop_store( + store_name = "test store", + error_if_exists=True +) + + +``` + + +### Get Sim N +Returns an array of tuple of (store_key, store_value) of Maximum specified N + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + + + +response = client.get_sim_n( + store_name = "test store", + search_input = key, + closest_n = 3, + algorithm = query.Algorithm__CosineSimilarity(), + condition = None +) +``` +*Closest_n is a Nonzero integer value* + + + +### Get Key +Returns an array of tuple of (store_key, store_value) + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +key = some_store_key +response = client.get_key( + store_name = "test store", + keys=[key] +) +``` + + +### Get By Predicate +Same as Get_key but returns results based defined conditions + +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +condition = query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ) +response = client.get_by_predicate( + store_name = "test store", + condition=conditon +) +``` + +### Create Index +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.create_index( + store_name = "test store", + predicates=["job", "rank"] +) +``` + +### Drop Index +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +response = client.drop_index( + store_name = "test store", + predicates=["job"], + error_if_not_exists=True +) +``` + + + +### Delete Key +```py +from ahnlich_client_py.libs import create_store_key +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + + + +store_key = create_store_key(data=[5.0, 3.0, 4.0, 3.9, 4.9]) + + +response = client.delete_key( + store_name = "test store", + keys=[store_key] +) +``` + +### Delete Predicate +```py +from ahnlich_client_py import AhnlichDBClient +client = AhnlichDBClient(address="127.0.0.1", port=port) + +condition = query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ) + + +response = client.delete_predicate( + store_name = "test store", + condition = condition +) +``` + + + +## Bulk Requests +The client has the ability to send multiple requests at once, and these requests will be handled sequentially. The builder class takes care of this. The response is a list of all individual request responses. + + +```py +from ahnlich_client_py.builders import AhnlichDBRequestBuilder +from ahnlich_client_py import query +pro = protocol.AhnlichProtocol(address="127.0.0.1", port=1369) + +request_builder = db_client.pipeline() +request_builder.ping() +request_builder.info_server() +request_builder.list_clients() +request_builder.list_stores() + +response: server_response.ServerResult = db_client.exec() + +``` + + + +## Deploy to Artifactory + +Replace the contents of `MSG_TAG` file with your new tag message + +From Feature branch, either use the makefile : +```bash +make bump-python-version [major, minor, patch] +``` +or +```bash +poetry run bumpversion [major, minor, patch] +``` + +When Your PR is made, changes in the client version file would trigger a release build to Pypi + +NOTE: +1. `current_version` in `.bumpversion.cfg` and `CLIENT_VERSION` should be equal to the last/current version of a package (it will be upgraded correspondingly to the release by user's choice (`major, minor, patch`)) +2. `bumpversion` requires clean Git working directory to update the tag. + + + +## Type Meanings + +- Store Key: A one dimensional vector +- Store Value: A Dictionary containing texts associated with a storekey +- Store Predicates: Or Predicate indices are basically indices that improves the filtering of store_values +- Predicates: These are operations that can be used to filter data(Equals, NotEquals, Contains, etc) +- PredicateConditions: They are conditions that utilize one predicate or tie Multiple predicates together using the AND, OR or Value operation. Where Value means just a predicate. +Example: +Value +``` +condition = query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ) +``` +AND + + + +```py +# And[tuples[predicate_conditions]] +condition = query.PredicateCondition__AND( + ( + query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ), + query.PredicateCondition__Value( + query.Predicate__Equals(key="rank", value="Chunin") + ) + ) + ) + +``` + + + +## Change Log + +| Version| Description | +| -------|:-------------:| +| 0.1.0 | Base Python client to connect to ahnlich db. Bincode serialization and deserialization implemented | +| | | + + diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/__init__.py new file mode 100644 index 00000000..88721918 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/__init__.py @@ -0,0 +1,4 @@ +from ahnlich_client_py import builders, config, libs +from ahnlich_client_py.client import AhnlichDBClient +from ahnlich_client_py.internals import query, server_response +from ahnlich_client_py.protocol import AhnlichProtocol diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/builders/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/builders/__init__.py new file mode 100644 index 00000000..5823854f --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/builders/__init__.py @@ -0,0 +1,7 @@ +from .ai import AhnlichAIRequestBuilder +from .db import AhnlichDBRequestBuilder + +__all__ = [ + "AhnlichDBRequestBuilder", + "AhnlichAIRequestBuilder", +] diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/builders/ai.py b/sdk/ahnlich-client-py/ahnlich_client_py/builders/ai.py new file mode 100644 index 00000000..9f1b8e82 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/builders/ai.py @@ -0,0 +1,2 @@ +class AhnlichAIRequestBuilder: + pass diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/builders/db.py b/sdk/ahnlich-client-py/ahnlich_client_py/builders/db.py new file mode 100644 index 00000000..5b556907 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/builders/db.py @@ -0,0 +1,140 @@ +import typing + +import numpy as np + +from ahnlich_client_py import exceptions as ah_exceptions +from ahnlich_client_py.internals import query +from ahnlich_client_py.internals import serde_types as st +from ahnlich_client_py.protocol import AhnlichProtocol + + +class NonZeroSizeInteger: + def __init__(self, num: st.uint64) -> None: + + if num <= 0: + raise ah_exceptions.AhnlichValidationError( + "Ahnlich expects a Non zero value as integers" + ) + self.value = num + + +class AhnlichDBRequestBuilder: + def __init__(self) -> None: + self.queries: typing.List[query.Query] = [] + + def create_store( + self, + store_name: str, + dimension: st.uint64, + create_predicates: typing.Sequence[str] = None, + error_if_exists: bool = True, + ): + if not create_predicates: + create_predicates = [] + + non_zero = NonZeroSizeInteger(num=dimension) + self.queries.append( + query.Query__CreateStore( + store=store_name, + dimension=non_zero.value, + create_predicates=create_predicates, + error_if_exists=error_if_exists, + ) + ) + + def get_key(self, store_name: str, keys: typing.Sequence[query.Array]): + + self.queries.append(query.Query__GetKey(store=store_name, keys=keys)) + + def get_by_predicate(self, store_name: str, condition: query.PredicateCondition): + self.queries.append(query.Query__GetPred(store=store_name, condition=condition)) + + def get_sim_n( + self, + store_name: str, + search_input: query.Array, + closest_n: st.uint64, + algorithm: query.Algorithm, + condition: query.PredicateCondition = None, + ): + nonzero = NonZeroSizeInteger(closest_n) + self.queries.append( + query.Query__GetSimN( + store=store_name, + search_input=search_input, + closest_n=nonzero.value, + algorithm=algorithm, + condition=condition, + ) + ) + + def create_index(self, store_name: str, predicates: typing.Sequence[str]): + self.queries.append( + query.Query__CreateIndex(store=store_name, predicates=predicates) + ) + + def drop_index( + self, + store_name: str, + predicates: typing.Sequence[str], + error_if_not_exists: bool, + ): + self.queries.append( + query.Query__DropIndex( + store=store_name, + predicates=predicates, + error_if_not_exists=error_if_not_exists, + ) + ) + + def set( + self, + store_name, + inputs: typing.Sequence[typing.Tuple[query.Array, typing.Dict[str, str]]], + ): + self.queries.append(query.Query__Set(store=store_name, inputs=inputs)) + + def delete_key(self, store_name: str, keys: typing.Sequence[query.Array]): + self.queries.append(query.Query__DelKey(store=store_name, keys=keys)) + + def delete_predicate(self, store_name: str, condition: query.PredicateCondition): + self.queries.append(query.Query__DelPred(store=store_name, condition=condition)) + + def drop_store(self, store_name: str, error_if_not_exists: bool): + self.queries.append( + query.Query__DropStore( + store=store_name, error_if_not_exists=error_if_not_exists + ) + ) + + def list_stores(self): + self.queries.append(query.Query__ListStores()) + + def info_server(self): + self.queries.append(query.Query__InfoServer()) + + def list_clients(self): + self.queries.append(query.Query__ListClients()) + + def ping(self): + self.queries.append(query.Query__Ping()) + + def drop(self): + self.queries.clear() + + def to_server_query(self) -> query.ServerQuery: + if not self.queries: + raise ah_exceptions.AhnlichClientException( + "Must have atleast one request to be processed" + ) + # not optimal, but so far, recreating the list and dropping the internal store. + # seems straight forward + queries = self.queries[:] + + server_query = query.ServerQuery(queries=queries) + self.drop() + return server_query + + def execute_requests(self, protocol: AhnlichProtocol): + response = protocol.process_request(message=self.to_server_query()) + return response diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/client.py b/sdk/ahnlich-client-py/ahnlich_client_py/client.py new file mode 100644 index 00000000..3edc4bcc --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/client.py @@ -0,0 +1,140 @@ +import typing + +from ahnlich_client_py import builders, protocol +from ahnlich_client_py.internals import query +from ahnlich_client_py.internals import serde_types as st +from ahnlich_client_py.internals import server_response + + +class AhnlichDBClient: + """Wrapper for interacting with Ahnlich database or ai""" + + def __init__(self, address: str, port: int, timeout_sec: float = 5.0) -> None: + self.protocol = protocol.AhnlichProtocol( + address=address, port=port, timeout_sec=timeout_sec + ) + # would abstract this away eventually, but for now easy does it + self.builder = builders.AhnlichDBRequestBuilder() + + def get_key( + self, store_name: str, keys: typing.Sequence[query.Array] + ) -> server_response.ServerResult: + + self.builder.get_key(store_name=store_name, keys=keys) + return self.protocol.process_request(self.builder.to_server_query()) + + def get_by_predicate( + self, store_name: str, condition: query.PredicateCondition + ) -> server_response.ServerResult: + self.builder.get_by_predicate(store_name=store_name, condition=condition) + return self.protocol.process_request(self.builder.to_server_query()) + + def get_sim_n( + self, + store_name: str, + search_input: query.Array, + closest_n: st.uint64, + algorithm: query.Algorithm, + condition: query.PredicateCondition = None, + ) -> server_response.ServerResult: + self.builder.get_sim_n( + store_name=store_name, + search_input=search_input, + closest_n=closest_n, + algorithm=algorithm, + condition=condition, + ) + return self.protocol.process_request(self.builder.to_server_query()) + + def create_index( + self, store_name: str, predicates: typing.Sequence[str] + ) -> server_response.ServerResult: + self.builder.create_index(store_name=store_name, predicates=predicates) + return self.protocol.process_request(self.builder.to_server_query()) + + def drop_index( + self, + store_name: str, + predicates: typing.Sequence[str], + error_if_not_exists: bool, + ) -> server_response.ServerResult: + self.builder.drop_index( + store_name=store_name, + predicates=predicates, + error_if_not_exists=error_if_not_exists, + ) + return self.protocol.process_request(self.builder.to_server_query()) + + def set( + self, + store_name: str, + inputs: typing.Sequence[typing.Tuple[query.Array, typing.Dict[str, str]]], + ) -> server_response.ServerResult: + self.builder.set(store_name=store_name, inputs=inputs) + return self.protocol.process_request(self.builder.to_server_query()) + + def delete_key( + self, store_name: str, keys: typing.Sequence[query.Array] + ) -> server_response.ServerResult: + self.builder.delete_key(store_name=store_name, keys=keys) + return self.protocol.process_request(self.builder.to_server_query()) + + def delete_predicate( + self, store_name: str, condition: query.PredicateCondition + ) -> server_response.ServerResult: + self.builder.delete_predicate(store_name=store_name, condition=condition) + return self.protocol.process_request(self.builder.to_server_query()) + + def drop_store( + self, store_name: str, error_if_not_exists: bool + ) -> server_response.ServerResult: + self.builder.drop_store( + store_name=store_name, error_if_not_exists=error_if_not_exists + ) + return self.protocol.process_request(self.builder.to_server_query()) + + def create_store( + self, + store_name: str, + dimension: st.uint64, + create_predicates: typing.Sequence[str] = None, + error_if_exists: bool = True, + ) -> server_response.ServerResult: + if not create_predicates: + create_predicates = [] + self.builder.create_store( + store_name=store_name, + dimension=dimension, + create_predicates=create_predicates, + error_if_exists=error_if_exists, + ) + message = self.builder.to_server_query() + return self.protocol.process_request(message=message) + + def list_stores(self) -> server_response.ServerResult: + self.builder.list_stores() + return self.protocol.process_request(self.builder.to_server_query()) + + def info_server(self) -> server_response.ServerResult: + self.builder.info_server() + return self.protocol.process_request( + message=self.builder.to_server_query(), + ) + + def list_clients(self) -> server_response.ServerResult: + self.builder.list_clients() + return self.protocol.process_request( + message=self.builder.to_server_query(), + ) + + def ping(self) -> server_response.ServerResult: + self.builder.ping() + return self.protocol.process_request(message=self.builder.to_server_query()) + + def pipeline(self) -> builders.AhnlichDBRequestBuilder: + """Gives you a request builder to create multple requests""" + return self.builder + + def exec(self) -> server_response.ServerResult: + """Executes a pipelined request""" + return self.protocol.process_request(message=self.builder.to_server_query()) diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/config.py b/sdk/ahnlich-client-py/ahnlich_client_py/config.py new file mode 100644 index 00000000..81b6edc8 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/config.py @@ -0,0 +1,8 @@ +from pathlib import Path + +HEADER = b"AHNLICH;" +BUFFER_SIZE = 1024 +PACKAGE_NAME = "ahnlich-client-py" +BASE_DIR = Path(__file__).resolve().parent.parent + +AHNLICH_BIN_DIR = BASE_DIR.parent.parent / "ahnlich" diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/exceptions.py b/sdk/ahnlich-client-py/ahnlich_client_py/exceptions.py new file mode 100644 index 00000000..31442b47 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/exceptions.py @@ -0,0 +1,10 @@ +class AhnlichProtocolException(Exception): + pass + + +class AhnlichValidationError(Exception): + pass + + +class AhnlichClientException(Exception): + pass diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/bincode/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/bincode/__init__.py new file mode 100644 index 00000000..38cbd7ff --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/internals/bincode/__init__.py @@ -0,0 +1,77 @@ +# Copyright (c) Facebook, Inc. and its affiliates +# SPDX-License-Identifier: MIT OR Apache-2.0 + +import collections +import dataclasses +import io +import struct +import typing +from copy import copy +from typing import get_type_hints + +from ahnlich_client_py.internals import serde_binary as sb +from ahnlich_client_py.internals import serde_types as st + +# Maximum length in practice for sequences (e.g. in Java). +MAX_LENGTH = (1 << 31) - 1 + + +class BincodeSerializer(sb.BinarySerializer): + def __init__(self): + super().__init__(output=io.BytesIO(), container_depth_budget=None) + + def serialize_f32(self, value: st.float32): + self.output.write(struct.pack(" MAX_LENGTH: + raise st.SerializationError("Length exceeds the maximum supported value.") + self.output.write(int(value).to_bytes(8, "little", signed=False)) + + def serialize_variant_index(self, value: int): + self.output.write(int(value).to_bytes(4, "little", signed=False)) + + def sort_map_entries(self, offsets: typing.List[int]): + pass + + +class BincodeDeserializer(sb.BinaryDeserializer): + def __init__(self, content): + super().__init__(input=io.BytesIO(content), container_depth_budget=None) + + def deserialize_f32(self) -> st.float32: + (value,) = struct.unpack(" st.float64: + (value,) = struct.unpack(" int: + value = int.from_bytes(self.read(8), byteorder="little", signed=False) + if value > MAX_LENGTH: + raise st.DeserializationError("Length exceeds the maximum supported value.") + return value + + def deserialize_variant_index(self) -> int: + return int.from_bytes(self.read(4), byteorder="little", signed=False) + + def check_that_key_slices_are_increasing( + self, slice1: typing.Tuple[int, int], slice2: typing.Tuple[int, int] + ): + pass + + +def serialize(obj: typing.Any, obj_type) -> bytes: + serializer = BincodeSerializer() + serializer.serialize_any(obj, obj_type) + return serializer.get_buffer() + + +def deserialize(content: bytes, obj_type) -> typing.Tuple[typing.Any, bytes]: + deserializer = BincodeDeserializer(content) + value = deserializer.deserialize_any(obj_type) + return value, deserializer.get_remaining_buffer() diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/query.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/query.py new file mode 100644 index 00000000..a8d62f7f --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/internals/query.py @@ -0,0 +1,298 @@ +# pyre-strict +import typing +from dataclasses import dataclass + +from ahnlich_client_py.internals import bincode +from ahnlich_client_py.internals import serde_types as st + + +class Algorithm: + VARIANTS = [] # type: typing.Sequence[typing.Type[Algorithm]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Algorithm) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Algorithm": + v, buffer = bincode.deserialize(input, Algorithm) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class Algorithm__EuclideanDistance(Algorithm): + INDEX = 0 # type: int + pass + + +@dataclass(frozen=True) +class Algorithm__DotProductSimilarity(Algorithm): + INDEX = 1 # type: int + pass + + +@dataclass(frozen=True) +class Algorithm__CosineSimilarity(Algorithm): + INDEX = 2 # type: int + pass + + +Algorithm.VARIANTS = [ + Algorithm__EuclideanDistance, + Algorithm__DotProductSimilarity, + Algorithm__CosineSimilarity, +] + + +@dataclass(frozen=True) +class Array: + v: st.uint8 + dim: typing.Tuple[st.uint64] + data: typing.Sequence[st.float32] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Array) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Array": + v, buffer = bincode.deserialize(input, Array) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +class Predicate: + VARIANTS = [] # type: typing.Sequence[typing.Type[Predicate]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Predicate) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Predicate": + v, buffer = bincode.deserialize(input, Predicate) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class Predicate__Equals(Predicate): + INDEX = 0 # type: int + key: str + value: str + + +@dataclass(frozen=True) +class Predicate__NotEquals(Predicate): + INDEX = 1 # type: int + key: str + value: str + + +@dataclass(frozen=True) +class Predicate__In(Predicate): + INDEX = 2 # type: int + key: str + value: typing.Sequence[str] + + +@dataclass(frozen=True) +class Predicate__NotIn(Predicate): + INDEX = 3 # type: int + key: str + value: typing.Sequence[str] + + +Predicate.VARIANTS = [ + Predicate__Equals, + Predicate__NotEquals, + Predicate__In, + Predicate__NotIn, +] + + +class PredicateCondition: + VARIANTS = [] # type: typing.Sequence[typing.Type[PredicateCondition]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, PredicateCondition) + + @staticmethod + def bincode_deserialize(input: bytes) -> "PredicateCondition": + v, buffer = bincode.deserialize(input, PredicateCondition) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class PredicateCondition__Value(PredicateCondition): + INDEX = 0 # type: int + value: "Predicate" + + +@dataclass(frozen=True) +class PredicateCondition__And(PredicateCondition): + INDEX = 1 # type: int + value: typing.Tuple["PredicateCondition", "PredicateCondition"] + + +@dataclass(frozen=True) +class PredicateCondition__Or(PredicateCondition): + INDEX = 2 # type: int + value: typing.Tuple["PredicateCondition", "PredicateCondition"] + + +PredicateCondition.VARIANTS = [ + PredicateCondition__Value, + PredicateCondition__And, + PredicateCondition__Or, +] + + +class Query: + VARIANTS = [] # type: typing.Sequence[typing.Type[Query]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Query) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Query": + v, buffer = bincode.deserialize(input, Query) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class Query__CreateStore(Query): + INDEX = 0 # type: int + store: str + dimension: st.uint64 + create_predicates: typing.Sequence[str] + error_if_exists: bool + + +@dataclass(frozen=True) +class Query__GetKey(Query): + INDEX = 1 # type: int + store: str + keys: typing.Sequence["Array"] + + +@dataclass(frozen=True) +class Query__GetPred(Query): + INDEX = 2 # type: int + store: str + condition: "PredicateCondition" + + +@dataclass(frozen=True) +class Query__GetSimN(Query): + INDEX = 3 # type: int + store: str + search_input: "Array" + closest_n: st.uint64 + algorithm: "Algorithm" + condition: typing.Optional["PredicateCondition"] + + +@dataclass(frozen=True) +class Query__CreateIndex(Query): + INDEX = 4 # type: int + store: str + predicates: typing.Sequence[str] + + +@dataclass(frozen=True) +class Query__DropIndex(Query): + INDEX = 5 # type: int + store: str + predicates: typing.Sequence[str] + error_if_not_exists: bool + + +@dataclass(frozen=True) +class Query__Set(Query): + INDEX = 6 # type: int + store: str + inputs: typing.Sequence[typing.Tuple["Array", typing.Dict[str, str]]] + + +@dataclass(frozen=True) +class Query__DelKey(Query): + INDEX = 7 # type: int + store: str + keys: typing.Sequence["Array"] + + +@dataclass(frozen=True) +class Query__DelPred(Query): + INDEX = 8 # type: int + store: str + condition: "PredicateCondition" + + +@dataclass(frozen=True) +class Query__DropStore(Query): + INDEX = 9 # type: int + store: str + error_if_not_exists: bool + + +@dataclass(frozen=True) +class Query__InfoServer(Query): + INDEX = 10 # type: int + pass + + +@dataclass(frozen=True) +class Query__ListStores(Query): + INDEX = 11 # type: int + pass + + +@dataclass(frozen=True) +class Query__ListClients(Query): + INDEX = 12 # type: int + pass + + +@dataclass(frozen=True) +class Query__Ping(Query): + INDEX = 13 # type: int + pass + + +Query.VARIANTS = [ + Query__CreateStore, + Query__GetKey, + Query__GetPred, + Query__GetSimN, + Query__CreateIndex, + Query__DropIndex, + Query__Set, + Query__DelKey, + Query__DelPred, + Query__DropStore, + Query__InfoServer, + Query__ListStores, + Query__ListClients, + Query__Ping, +] + + +@dataclass(frozen=True) +class ServerQuery: + queries: typing.Sequence["Query"] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ServerQuery) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ServerQuery": + v, buffer = bincode.deserialize(input, ServerQuery) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_binary/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_binary/__init__.py new file mode 100644 index 00000000..a71b03f5 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_binary/__init__.py @@ -0,0 +1,418 @@ +# Copyright (c) Facebook, Inc. and its affiliates +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""" +Module describing the "binary" serialization formats. + +Note: This internal module is currently only meant to share code between the BCS and bincode formats. Internal APIs could change in the future. +""" + +import collections +import dataclasses +import io +import typing +from typing import get_type_hints + +from ahnlich_client_py.internals import serde_types as st + + +@dataclasses.dataclass +class BinarySerializer: + """Serialization primitives for binary formats (abstract class). + + "Binary" serialization formats may differ in the way they encode sequence lengths, variant + index, and how they sort map entries (or not). + """ + + output: io.BytesIO + container_depth_budget: typing.Optional[int] + primitive_type_serializer: typing.Mapping = dataclasses.field(init=False) + + def __post_init__(self): + self.primitive_type_serializer = { + bool: self.serialize_bool, + st.uint8: self.serialize_u8, + st.uint16: self.serialize_u16, + st.uint32: self.serialize_u32, + st.uint64: self.serialize_u64, + st.uint128: self.serialize_u128, + st.int8: self.serialize_i8, + st.int16: self.serialize_i16, + st.int32: self.serialize_i32, + st.int64: self.serialize_i64, + st.int128: self.serialize_i128, + st.float32: self.serialize_f32, + st.float64: self.serialize_f64, + st.unit: self.serialize_unit, + st.char: self.serialize_char, + str: self.serialize_str, + bytes: self.serialize_bytes, + } + + def serialize_bytes(self, value: bytes): + self.serialize_len(len(value)) + self.output.write(value) + + def serialize_str(self, value: str): + self.serialize_bytes(value.encode()) + + def serialize_unit(self, value: st.unit): + pass + + def serialize_bool(self, value: bool): + self.output.write(int(value).to_bytes(1, "little", signed=False)) + + def serialize_u8(self, value: st.uint8): + self.output.write(int(value).to_bytes(1, "little", signed=False)) + + def serialize_u16(self, value: st.uint16): + self.output.write(int(value).to_bytes(2, "little", signed=False)) + + def serialize_u32(self, value: st.uint32): + self.output.write(int(value).to_bytes(4, "little", signed=False)) + + def serialize_u64(self, value: st.uint64): + self.output.write(int(value).to_bytes(8, "little", signed=False)) + + def serialize_u128(self, value: st.uint128): + self.output.write(int(value).to_bytes(16, "little", signed=False)) + + def serialize_i8(self, value: st.uint8): + self.output.write(int(value).to_bytes(1, "little", signed=True)) + + def serialize_i16(self, value: st.uint16): + self.output.write(int(value).to_bytes(2, "little", signed=True)) + + def serialize_i32(self, value: st.uint32): + self.output.write(int(value).to_bytes(4, "little", signed=True)) + + def serialize_i64(self, value: st.uint64): + self.output.write(int(value).to_bytes(8, "little", signed=True)) + + def serialize_i128(self, value: st.uint128): + self.output.write(int(value).to_bytes(16, "little", signed=True)) + + def serialize_f32(self, value: st.float32): + raise NotImplementedError + + def serialize_f64(self, value: st.float64): + raise NotImplementedError + + def serialize_char(self, value: st.char): + raise NotImplementedError + + def get_buffer_offset(self) -> int: + return len(self.output.getbuffer()) + + def get_buffer(self) -> bytes: + return self.output.getvalue() + + def increase_container_depth(self): + if self.container_depth_budget is not None: + if self.container_depth_budget == 0: + raise st.SerializationError("Exceeded maximum container depth") + self.container_depth_budget -= 1 + + def decrease_container_depth(self): + if self.container_depth_budget is not None: + self.container_depth_budget += 1 + + def serialize_len(self, value: int): + raise NotImplementedError + + def serialize_variant_index(self, value: int): + raise NotImplementedError + + def sort_map_entries(self, offsets: typing.List[int]): + raise NotImplementedError + + # noqa: C901 + def serialize_any(self, obj: typing.Any, obj_type): + if obj_type in self.primitive_type_serializer: + self.primitive_type_serializer[obj_type](obj) + + elif hasattr(obj_type, "__origin__"): # Generic type + types = getattr(obj_type, "__args__") + + if getattr(obj_type, "__origin__") == collections.abc.Sequence: # Sequence + assert len(types) == 1 + item_type = types[0] + self.serialize_len(len(obj)) + for item in obj: + self.serialize_any(item, item_type) + + elif getattr(obj_type, "__origin__") == tuple: # Tuple + if len(types) != 1 or types[0] != (): + for i in range(len(obj)): + self.serialize_any(obj[i], types[i]) + + elif getattr(obj_type, "__origin__") == typing.Union: # Option + assert len(types) == 2 and types[1] == type(None) + if obj is None: + self.output.write(b"\x00") + else: + self.output.write(b"\x01") + self.serialize_any(obj, types[0]) + + elif getattr(obj_type, "__origin__") == dict: # Map + assert len(types) == 2 + self.serialize_len(len(obj)) + offsets = [] + for key, value in obj.items(): + offsets.append(self.get_buffer_offset()) + self.serialize_any(key, types[0]) + self.serialize_any(value, types[1]) + self.sort_map_entries(offsets) + + else: + raise st.SerializationError("Unexpected type", obj_type) + + else: + if not dataclasses.is_dataclass(obj_type): # Enum + if not hasattr(obj_type, "VARIANTS"): + raise st.SerializationError("Unexpected type", obj_type) + if not hasattr(obj, "INDEX"): + raise st.SerializationError( + "Wrong Value for the type", obj, obj_type + ) + self.serialize_variant_index(obj.__class__.INDEX) + # Proceed to variant + obj_type = obj_type.VARIANTS[obj.__class__.INDEX] + if not dataclasses.is_dataclass(obj_type): + raise st.SerializationError("Unexpected type", obj_type) + + # pyre-ignore + if not isinstance(obj, obj_type): + raise st.SerializationError("Wrong Value for the type", obj, obj_type) + + # Content of struct or variant + fields = dataclasses.fields(obj_type) + types = get_type_hints(obj_type) + self.increase_container_depth() + for field in fields: + field_value = obj.__dict__[field.name] + field_type = types[field.name] + self.serialize_any(field_value, field_type) + self.decrease_container_depth() + + +@dataclasses.dataclass +class BinaryDeserializer: + """Deserialization primitives for binary formats (abstract class). + + "Binary" serialization formats may differ in the way they encode sequence lengths, variant + index, and how they verify the ordering of keys in map entries (or not). + """ + + input: io.BytesIO + container_depth_budget: typing.Optional[int] + primitive_type_deserializer: typing.Mapping = dataclasses.field(init=False) + + def __post_init__(self): + self.primitive_type_deserializer = { + bool: self.deserialize_bool, + st.uint8: self.deserialize_u8, + st.uint16: self.deserialize_u16, + st.uint32: self.deserialize_u32, + st.uint64: self.deserialize_u64, + st.uint128: self.deserialize_u128, + st.int8: self.deserialize_i8, + st.int16: self.deserialize_i16, + st.int32: self.deserialize_i32, + st.int64: self.deserialize_i64, + st.int128: self.deserialize_i128, + st.float32: self.deserialize_f32, + st.float64: self.deserialize_f64, + st.unit: self.deserialize_unit, + st.char: self.deserialize_char, + str: self.deserialize_str, + bytes: self.deserialize_bytes, + } + + def read(self, length: int) -> bytes: + value = self.input.read(length) + if value is None or len(value) < length: + raise st.DeserializationError("Input is too short") + return value + + def deserialize_bytes(self) -> bytes: + length = self.deserialize_len() + return self.read(length) + + def deserialize_str(self) -> str: + content = self.deserialize_bytes() + try: + return content.decode() + except UnicodeDecodeError: + raise st.DeserializationError("Invalid unicode string:", content) + + def deserialize_unit(self) -> st.unit: + pass + + def deserialize_bool(self) -> bool: + b = int.from_bytes(self.read(1), byteorder="little", signed=False) + if b == 0: + return False + elif b == 1: + return True + else: + raise st.DeserializationError("Unexpected boolean value:", b) + + def deserialize_u8(self) -> st.uint8: + return st.uint8(int.from_bytes(self.read(1), byteorder="little", signed=False)) + + def deserialize_u16(self) -> st.uint16: + return st.uint16(int.from_bytes(self.read(2), byteorder="little", signed=False)) + + def deserialize_u32(self) -> st.uint32: + return st.uint32(int.from_bytes(self.read(4), byteorder="little", signed=False)) + + def deserialize_u64(self) -> st.uint64: + return st.uint64(int.from_bytes(self.read(8), byteorder="little", signed=False)) + + def deserialize_u128(self) -> st.uint128: + return st.uint128( + int.from_bytes(self.read(16), byteorder="little", signed=False) + ) + + def deserialize_i8(self) -> st.int8: + return st.int8(int.from_bytes(self.read(1), byteorder="little", signed=True)) + + def deserialize_i16(self) -> st.int16: + return st.int16(int.from_bytes(self.read(2), byteorder="little", signed=True)) + + def deserialize_i32(self) -> st.int32: + return st.int32(int.from_bytes(self.read(4), byteorder="little", signed=True)) + + def deserialize_i64(self) -> st.int64: + return st.int64(int.from_bytes(self.read(8), byteorder="little", signed=True)) + + def deserialize_i128(self) -> st.int128: + return st.int128(int.from_bytes(self.read(16), byteorder="little", signed=True)) + + def deserialize_f32(self) -> st.float32: + raise NotImplementedError + + def deserialize_f64(self) -> st.float64: + raise NotImplementedError + + def deserialize_char(self) -> st.char: + raise NotImplementedError + + def get_buffer_offset(self) -> int: + return self.input.tell() + + def get_remaining_buffer(self) -> bytes: + buf = self.input.getbuffer() + offset = self.get_buffer_offset() + return bytes(buf[offset:]) + + def increase_container_depth(self): + if self.container_depth_budget is not None: + if self.container_depth_budget == 0: + raise st.DeserializationError("Exceeded maximum container depth") + self.container_depth_budget -= 1 + + def decrease_container_depth(self): + if self.container_depth_budget is not None: + self.container_depth_budget += 1 + + def deserialize_len(self) -> int: + raise NotImplementedError + + def deserialize_variant_index(self) -> int: + raise NotImplementedError + + def check_that_key_slices_are_increasing( + self, slice1: typing.Tuple[int, int], slice2: typing.Tuple[int, int] + ) -> bool: + raise NotImplementedError + + # noqa + def deserialize_any(self, obj_type) -> typing.Any: + if obj_type in self.primitive_type_deserializer: + return self.primitive_type_deserializer[obj_type]() + + elif hasattr(obj_type, "__origin__"): # Generic type + types = getattr(obj_type, "__args__") + if getattr(obj_type, "__origin__") == collections.abc.Sequence: # Sequence + assert len(types) == 1 + item_type = types[0] + length = self.deserialize_len() + result = [] + for i in range(0, length): + item = self.deserialize_any(item_type) + result.append(item) + + return result + + elif getattr(obj_type, "__origin__") == tuple: # Tuple + result = [] + if len(types) == 1 and types[0] == (): + return tuple() + for i in range(len(types)): + item = self.deserialize_any(types[i]) + result.append(item) + return tuple(result) + + elif getattr(obj_type, "__origin__") == typing.Union: # Option + assert len(types) == 2 and types[1] == type(None) + tag = int.from_bytes(self.read(1), byteorder="little", signed=False) + if tag == 0: + return None + elif tag == 1: + return self.deserialize_any(types[0]) + else: + raise st.DeserializationError("Wrong tag for Option value") + + elif getattr(obj_type, "__origin__") == dict: # Map + assert len(types) == 2 + length = self.deserialize_len() + result = dict() + previous_key_slice = None + for i in range(0, length): + key_start = self.get_buffer_offset() + key = self.deserialize_any(types[0]) + key_end = self.get_buffer_offset() + value = self.deserialize_any(types[1]) + + key_slice = (key_start, key_end) + if previous_key_slice is not None: + self.check_that_key_slices_are_increasing( + previous_key_slice, key_slice + ) + previous_key_slice = key_slice + + result[key] = value + + return result + + else: + raise st.DeserializationError("Unexpected type", obj_type) + + else: + # handle structs + if dataclasses.is_dataclass(obj_type): + values = [] + fields = dataclasses.fields(obj_type) + typing_hints = get_type_hints(obj_type) + self.increase_container_depth() + for field in fields: + field_type = typing_hints[field.name] + field_value = self.deserialize_any(field_type) + values.append(field_value) + self.decrease_container_depth() + return obj_type(*values) + + # handle variant + elif hasattr(obj_type, "VARIANTS"): + variant_index = self.deserialize_variant_index() + if variant_index not in range(len(obj_type.VARIANTS)): + raise st.DeserializationError( + "Unexpected variant index", variant_index + ) + new_type = obj_type.VARIANTS[variant_index] + return self.deserialize_any(new_type) + + else: + raise st.DeserializationError("Unexpected type", obj_type) diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_types/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_types/__init__.py new file mode 100644 index 00000000..1c85909c --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/internals/serde_types/__init__.py @@ -0,0 +1,75 @@ +# Copyright (c) Facebook, Inc. and its affiliates +# SPDX-License-Identifier: MIT OR Apache-2.0 + +import typing +from dataclasses import dataclass + +import numpy as np + + +class SerializationError(ValueError): + """Error raised during Serialization""" + + pass + + +class DeserializationError(ValueError): + """Error raised during Deserialization""" + + pass + + +@dataclass(init=False) +class uint128: + high: np.uint64 + low: np.uint64 + + def __init__(self, num): + self.high = np.uint64(num >> 64) + self.low = np.uint64(num & 0xFFFFFFFFFFFFFFFF) + + def __int__(self): + return (int(self.high) << 64) | int(self.low) + + +@dataclass(init=False) +class int128: + high: np.int64 + low: np.uint64 + + def __init__(self, num): + self.high = np.int64(num >> 64) + self.low = np.uint64(num & 0xFFFFFFFFFFFFFFFF) + + def __int__(self): + return (int(self.high) << 64) | int(self.low) + + +@dataclass(init=False) +class char: + value: str + + def __init__(self, s): + if len(s) != 1: + raise ValueError("`char` expects a single unicode character") + self.value = s + + def __str__(self): + return self.value + + +unit = typing.Type[None] + +bool = bool +int8 = np.int8 +int16 = np.int16 +int32 = np.int32 +int64 = np.int64 + +uint8 = np.uint8 +uint16 = np.uint16 +uint32 = np.uint32 +uint64 = np.uint64 + +float32 = np.float32 +float64 = np.float64 diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/internals/server_response.py b/sdk/ahnlich-client-py/ahnlich_client_py/internals/server_response.py new file mode 100644 index 00000000..8f4b818d --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/internals/server_response.py @@ -0,0 +1,299 @@ +# pyre-strict +import typing +from dataclasses import dataclass + +from ahnlich_client_py.internals import bincode +from ahnlich_client_py.internals import serde_types as st + + +@dataclass(frozen=True) +class Array: + v: st.uint8 + dim: typing.Tuple[st.uint64] + data: typing.Sequence[st.float32] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Array) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Array": + v, buffer = bincode.deserialize(input, Array) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class ConnectedClient: + address: str + time_connected: "SystemTime" + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ConnectedClient) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ConnectedClient": + v, buffer = bincode.deserialize(input, ConnectedClient) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +class Result: + VARIANTS = [] # type: typing.Sequence[typing.Type[Result]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Result) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Result": + v, buffer = bincode.deserialize(input, Result) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class Result__Ok(Result): + INDEX = 0 # type: int + value: "ServerResponse" + + +@dataclass(frozen=True) +class Result__Err(Result): + INDEX = 1 # type: int + value: str + + +Result.VARIANTS = [ + Result__Ok, + Result__Err, +] + + +@dataclass(frozen=True) +class ServerInfo: + address: str + version: "Version" + type: "ServerType" + limit: st.uint64 + remaining: st.uint64 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ServerInfo) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ServerInfo": + v, buffer = bincode.deserialize(input, ServerInfo) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +class ServerResponse: + VARIANTS = [] # type: typing.Sequence[typing.Type[ServerResponse]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ServerResponse) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ServerResponse": + v, buffer = bincode.deserialize(input, ServerResponse) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class ServerResponse__Unit(ServerResponse): + INDEX = 0 # type: int + pass + + +@dataclass(frozen=True) +class ServerResponse__Pong(ServerResponse): + INDEX = 1 # type: int + pass + + +@dataclass(frozen=True) +class ServerResponse__ClientList(ServerResponse): + INDEX = 2 # type: int + value: typing.Sequence["ConnectedClient"] + + +@dataclass(frozen=True) +class ServerResponse__StoreList(ServerResponse): + INDEX = 3 # type: int + value: typing.Sequence["StoreInfo"] + + +@dataclass(frozen=True) +class ServerResponse__InfoServer(ServerResponse): + INDEX = 4 # type: int + value: "ServerInfo" + + +@dataclass(frozen=True) +class ServerResponse__Set(ServerResponse): + INDEX = 5 # type: int + value: "StoreUpsert" + + +@dataclass(frozen=True) +class ServerResponse__Get(ServerResponse): + INDEX = 6 # type: int + value: typing.Sequence[typing.Tuple["Array", typing.Dict[str, str]]] + + +@dataclass(frozen=True) +class ServerResponse__GetSimN(ServerResponse): + INDEX = 7 # type: int + value: typing.Sequence[typing.Tuple["Array", typing.Dict[str, str], "Similarity"]] + + +@dataclass(frozen=True) +class ServerResponse__Del(ServerResponse): + INDEX = 8 # type: int + value: st.uint64 + + +@dataclass(frozen=True) +class ServerResponse__CreateIndex(ServerResponse): + INDEX = 9 # type: int + value: st.uint64 + + +ServerResponse.VARIANTS = [ + ServerResponse__Unit, + ServerResponse__Pong, + ServerResponse__ClientList, + ServerResponse__StoreList, + ServerResponse__InfoServer, + ServerResponse__Set, + ServerResponse__Get, + ServerResponse__GetSimN, + ServerResponse__Del, + ServerResponse__CreateIndex, +] + + +@dataclass(frozen=True) +class ServerResult: + results: typing.Sequence["Result"] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ServerResult) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ServerResult": + v, buffer = bincode.deserialize(input, ServerResult) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +class ServerType: + VARIANTS = [] # type: typing.Sequence[typing.Type[ServerType]] + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, ServerType) + + @staticmethod + def bincode_deserialize(input: bytes) -> "ServerType": + v, buffer = bincode.deserialize(input, ServerType) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class ServerType__Database(ServerType): + INDEX = 0 # type: int + pass + + +ServerType.VARIANTS = [ + ServerType__Database, +] + + +@dataclass(frozen=True) +class Similarity: + value: st.float32 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Similarity) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Similarity": + v, buffer = bincode.deserialize(input, Similarity) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class StoreInfo: + name: str + len: st.uint64 + size_in_bytes: st.uint64 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, StoreInfo) + + @staticmethod + def bincode_deserialize(input: bytes) -> "StoreInfo": + v, buffer = bincode.deserialize(input, StoreInfo) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class StoreUpsert: + inserted: st.uint64 + updated: st.uint64 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, StoreUpsert) + + @staticmethod + def bincode_deserialize(input: bytes) -> "StoreUpsert": + v, buffer = bincode.deserialize(input, StoreUpsert) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class SystemTime: + secs_since_epoch: st.uint64 + nanos_since_epoch: st.uint32 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, SystemTime) + + @staticmethod + def bincode_deserialize(input: bytes) -> "SystemTime": + v, buffer = bincode.deserialize(input, SystemTime) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v + + +@dataclass(frozen=True) +class Version: + major: st.uint8 + minor: st.uint16 + patch: st.uint16 + + def bincode_serialize(self) -> bytes: + return bincode.serialize(self, Version) + + @staticmethod + def bincode_deserialize(input: bytes) -> "Version": + v, buffer = bincode.deserialize(input, Version) + if buffer: + raise st.DeserializationError("Some input bytes were not read") + return v diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/libs.py b/sdk/ahnlich-client-py/ahnlich_client_py/libs.py new file mode 100644 index 00000000..66888369 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/libs.py @@ -0,0 +1,13 @@ +import typing + +import numpy as np + +from ahnlich_client_py.internals import query +from ahnlich_client_py.internals import serde_types as st + + +def create_store_key(data: typing.List[float], v: int = 1) -> query.Array: + np_array = np.array(data, dtype=np.float32) + dimensions = (st.uint64(np_array.shape[0]),) + store_key = query.Array(v=st.uint8(v), dim=dimensions, data=np_array.tolist()) + return store_key diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/protocol.py b/sdk/ahnlich-client-py/ahnlich_client_py/protocol.py new file mode 100644 index 00000000..4cea3852 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/protocol.py @@ -0,0 +1,74 @@ +import socket + +from ahnlich_client_py import config +from ahnlich_client_py.exceptions import AhnlichProtocolException +from ahnlich_client_py.internals import query, server_response + + +class AhnlichProtocol: + def __init__(self, address: str, port: int, timeout_sec: float = 5.0): + self.address = address + self.port = port + self.conn = self.connect() + self.version = self.get_version() + self.timeout_sec = timeout_sec + + def serialize_query(self, server_query: query.ServerQuery) -> bytes: + version = self.version.bincode_serialize() + response = server_query.bincode_serialize() + response_length = int(len(response)).to_bytes(8, "little") + return config.HEADER + version + response_length + response + + def deserialize_server_response(self, b: bytes) -> server_response.ServerResult: + return server_response.ServerResult([]).bincode_deserialize(b) + + def connect(self) -> socket.socket: + tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + tcp_socket.connect((self.address, self.port)) + return tcp_socket + + def send(self, message: query.ServerQuery): + serialized_bin = self.serialize_query(message) + self.conn.sendall(serialized_bin) + + def receive(self) -> server_response.ServerResult: + header = self.conn.recv(8) + if header == b"": + self.conn.close() + raise AhnlichProtocolException("socket connection broken") + + if header != config.HEADER: + raise AhnlichProtocolException("Fake server") + # ignore version of 5 bytes + _version = self.conn.recv(5) + length = self.conn.recv(8) + # header length u64, little endian + length_to_read = int.from_bytes(length, byteorder="little") + # information data + self.conn.settimeout(self.timeout_sec) + data = self.conn.recv(length_to_read) + response = self.deserialize_server_response(data) + return response + + def process_request( + self, message: query.ServerQuery + ) -> server_response.ServerResult: + self.send(message=message) + response = self.receive() + return response + + @staticmethod + def get_version() -> server_response.Version: + from importlib import metadata + + try: + str_version = metadata.version(config.PACKAGE_NAME) + except metadata.PackageNotFoundError: + import toml + + with open(config.BASE_DIR / "pyproject.toml", "r") as f: + reader = toml.load(f) + str_version = reader["tool"]["poetry"]["version"] + + # split and convert from str to int + return server_response.Version(*map(lambda x: int(x), str_version.split("."))) diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/tests/__init__.py b/sdk/ahnlich-client-py/ahnlich_client_py/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/tests/conftest.py b/sdk/ahnlich-client-py/ahnlich_client_py/tests/conftest.py new file mode 100644 index 00000000..83c4d6c5 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/tests/conftest.py @@ -0,0 +1,71 @@ +import os +import random +import signal +import socket +import subprocess +import time + +import pytest + +from ahnlich_client_py import client, config +from ahnlich_client_py.libs import create_store_key + + +def is_port_occupied(port, host="127.0.0.1") -> bool: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.settimeout(1) + result = sock.connect_ex((host, port)) + return result == 0 + + +@pytest.fixture(scope="module") +def db_client(): + host = os.environ.get("AHNLICH_DB_HOST", "127.0.0.1") + port = int(os.environ.get("AHNLICH_DB_PORT", 1369)) + timeout_sec = float(os.environ.get("AHNLICH_DB_CLIENT_TIMEOUT", 5.0)) + return client.AhnlichDBClient(address=host, port=port, timeout_sec=timeout_sec) + + +@pytest.fixture +def random_port(): + port = random.randint(5000, 8000) + return port + + +@pytest.fixture +def spin_up_ahnlich_db(random_port): + port = random_port + command = f"cargo run --bin ahnlich-db run --port {port}".split(" ") + process = subprocess.Popen(args=command, cwd=config.AHNLICH_BIN_DIR) + while not is_port_occupied(port): + time.sleep(0.2) + yield port + # cleanup + os.kill(process.pid, signal.SIGINT) + # wait for process to clean up + process.wait(5) + + +@pytest.fixture(scope="module") +def module_scopped_ahnlich_db(): + port = 8001 + command = f"cargo run --bin ahnlich-db run --port {port}".split(" ") + process = subprocess.Popen(args=command, cwd=config.AHNLICH_BIN_DIR) + while not is_port_occupied(port): + time.sleep(0.2) + yield port + # cleanup + os.kill(process.pid, signal.SIGINT) + # wait for process to clean up + process.wait(5) + + +@pytest.fixture +def store_key(): + sample_array = [1.0, 2.0, 3.0, 4.0, 5.0] + return create_store_key(sample_array) + + +@pytest.fixture +def store_value(): + return dict(job="sorcerer") diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_store_commands.py b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_store_commands.py new file mode 100644 index 00000000..c65f3ead --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_store_commands.py @@ -0,0 +1,292 @@ +from ahnlich_client_py.client import AhnlichDBClient +from ahnlich_client_py.internals import query, server_response +from ahnlich_client_py.libs import create_store_key + +store_payload_no_predicates = { + "store_name": "Diretnan Station", + "dimension": 5, + "error_if_exists": True, +} + +store_payload_with_predicates = { + "store_name": "Diretnan Predication", + "dimension": 5, + "error_if_exists": True, + "create_predicates": ["is_tyrannical", "rank"], +} + + +def test_client_sends_create_stores_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.create_store( + **store_payload_no_predicates + ) + + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Unit() + ) + + +def test_client_sends_list_stores_on_existing_database_succeeds( + module_scopped_ahnlich_db, +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.list_stores() + store_list: server_response.ServerResponse__StoreList = response.results[0].value + store_info: server_response.StoreInfo = store_list.value[0] + assert store_info.name == store_payload_no_predicates["store_name"] + assert isinstance(response.results[0], server_response.Result__Ok) + + +def test_client_sends_create_stores_with_predicates_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.create_store( + **store_payload_with_predicates + ) + + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Unit() + ) + + +def test_client_list_stores_finds_created_store_with_predicate( + module_scopped_ahnlich_db, +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.list_stores() + assert isinstance(response.results[0], server_response.Result__Ok) + + store_lists: server_response.ServerResponse__StoreList = response.results[0].value + assert len(store_lists.value) == 2 + store_info: server_response.StoreInfo = store_lists.value[0] + queried_store_names = [] + for store_info in store_lists.value: + queried_store_names.append(store_info.name) + assert store_payload_with_predicates["store_name"] in queried_store_names + + +def test_client_set_in_store_succeeds( + module_scopped_ahnlich_db, store_key, store_value +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + store_key_2 = create_store_key(data=[5.0, 3.0, 4.0, 3.9, 4.9]) + + # prepare data + store_data = { + "store_name": store_payload_no_predicates["store_name"], + "inputs": [(store_key, store_value), (store_key_2, {"rank": "chunin"})], + } + # process data + response: server_response.ServerResult = db_client.set(**store_data) + + assert isinstance(response.results[0], server_response.Result__Ok) + + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Set( + server_response.StoreUpsert(inserted=2, updated=0) + ) + ) + + +def test_client_get_key_succeeds(module_scopped_ahnlich_db, store_key, store_value): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + # prepare data + get_key_data = { + "store_name": store_payload_no_predicates["store_name"], + "keys": [store_key], + } + # process data + response: server_response.ServerResult = db_client.get_key(**get_key_data) + assert isinstance(response.results[0], server_response.Result__Ok) + expected_result = [(store_key, store_value)] + actual_response = response.results[0].value.value + assert len(actual_response) == len(expected_result) + assert actual_response[0][0].data == store_key.data + + +def test_client_get_by_predicate_fails_no_index_found_in_store( + module_scopped_ahnlich_db, store_key, store_value +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + # prepare data + get_predicate_data = { + "store_name": store_payload_no_predicates["store_name"], + "condition": query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ), + } + # process data + response: server_response.ServerResult = db_client.get_by_predicate( + **get_predicate_data + ) + assert isinstance(response.results[0], server_response.Result__Err) + + error_message = "Predicate job not found in store" + error_response = response.results[0].value + assert error_message.lower() in error_response.lower() + + +def test_client_create_index_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + create_index_data = { + "store_name": store_payload_no_predicates["store_name"], + "predicates": ["job", "rank"], + } + response: server_response.ServerResult = db_client.create_index(**create_index_data) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__CreateIndex(2) + ) + + +def test_client_get_by_predicate_succeeds( + module_scopped_ahnlich_db, store_key, store_value +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + # prepare data + get_predicate_data = { + "store_name": store_payload_no_predicates["store_name"], + "condition": query.PredicateCondition__Value( + query.Predicate__Equals(key="job", value="sorcerer") + ), + } + # process data + response: server_response.ServerResult = db_client.get_by_predicate( + **get_predicate_data + ) + assert isinstance(response.results[0], server_response.Result__Ok) + expected_result = [(store_key, store_value)] + actual_response = response.results[0].value.value + assert len(actual_response) == len(expected_result) + assert actual_response[0][0].data == store_key.data + + +def test_client_get_sim_n_succeeds(module_scopped_ahnlich_db, store_key, store_value): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + # closest to 1.0,2.0,3.0,4.0,5.0 + search_input = create_store_key(data=[1.0, 2.0, 3.0, 3.9, 4.9]) + + # prepare data + get_sim_n_data = { + "store_name": store_payload_no_predicates["store_name"], + "closest_n": 1, + "search_input": search_input, + "algorithm": query.Algorithm__CosineSimilarity(), + } + # process data + response: server_response.ServerResult = db_client.get_sim_n(**get_sim_n_data) + + actual_results: server_response.ServerResponse__GetSimN = response.results[ + 0 + ].value.value + + expected_results = [ + store_key, + store_value, + 0.9999504, + ] + assert len(actual_results) == 1 + assert actual_results[0][1] == expected_results[1] + assert str(expected_results[2]) in str(actual_results[0]).lower() + + +def test_client_drop_index_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + create_index_data = { + "store_name": store_payload_no_predicates["store_name"], + "predicates": ["to_drop"], + } + response: server_response.ServerResult = db_client.create_index(**create_index_data) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__CreateIndex(1) + ) + + drop_index_data = { + "store_name": store_payload_no_predicates["store_name"], + "predicates": ["to_drop"], + "error_if_not_exists": True, + } + + response: server_response.ServerResult = db_client.drop_index(**drop_index_data) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Del(1) + ) + + +def test_client_delete_predicate_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + delete_predicate_data = { + "store_name": store_payload_no_predicates["store_name"], + "condition": query.PredicateCondition__Value( + query.Predicate__Equals(key="rank", value="chunin") + ), + } + + response: server_response.ServerResult = db_client.delete_predicate( + **delete_predicate_data + ) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Del(1) + ) + + +def test_client_delete_key_succeeds(module_scopped_ahnlich_db, store_key): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + delete_key_data = { + "store_name": store_payload_no_predicates["store_name"], + "keys": [store_key], + } + + response: server_response.ServerResult = db_client.delete_key(**delete_key_data) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Del(1) + ) + + +def test_client_drop_store_succeeds(module_scopped_ahnlich_db): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + + drop_store_data = { + "store_name": store_payload_no_predicates["store_name"], + "error_if_not_exists": True, + } + + response: server_response.ServerResult = db_client.drop_store(**drop_store_data) + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Del(1) + ) + + +def test_client_list_stores_reflects_dropped_store( + module_scopped_ahnlich_db, +): + port = module_scopped_ahnlich_db + db_client = AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.list_stores() + store_list: server_response.ServerResponse__StoreList = response.results[0].value + assert len(store_list.value) == 1 + store_info: server_response.StoreInfo = store_list.value[0] + assert store_info.name == store_payload_with_predicates["store_name"] + assert isinstance(response.results[0], server_response.Result__Ok) diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_unit_commands.py b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_unit_commands.py new file mode 100644 index 00000000..bf8fe9cf --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_client_unit_commands.py @@ -0,0 +1,35 @@ +from ahnlich_client_py import client +from ahnlich_client_py.internals import server_response + + +def test_client_sends_ping_to_db_success(db_client): + response: server_response.ServerResult = db_client.ping() + + assert len(response.results) == 1 + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Pong() + ) + + +def test_client_sends_list_clients_to_db_success(db_client): + + response: server_response.ServerResult = db_client.list_clients() + assert len(response.results) == 1 + + +def test_client_sends_info_server_to_db_success(db_client): + response: server_response.ServerResult = db_client.info_server() + assert len(response.results) == 1 + info_server: server_response.ServerInfo = response.results[0].value + assert info_server.value.version == db_client.protocol.version + assert info_server.value.type == server_response.ServerType__Database() + + +def test_client_sends_list_stores_to_fresh_database_succeeds(spin_up_ahnlich_db): + port = spin_up_ahnlich_db + db_client = client.AhnlichDBClient(address="127.0.0.1", port=port) + response: server_response.ServerResult = db_client.list_stores() + + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__StoreList([]) + ) diff --git a/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_db_builders.py b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_db_builders.py new file mode 100644 index 00000000..5c098077 --- /dev/null +++ b/sdk/ahnlich-client-py/ahnlich_client_py/tests/test_db_builders.py @@ -0,0 +1,28 @@ +from ahnlich_client_py import client +from ahnlich_client_py.internals import server_response + + +def test_client_sends_bulk_unit_requests_to_db_succeeds(spin_up_ahnlich_db): + port = spin_up_ahnlich_db + db_client = client.AhnlichDBClient(address="127.0.0.1", port=port) + request_builder = db_client.pipeline() + request_builder.ping() + request_builder.info_server() + request_builder.list_clients() + request_builder.list_stores() + + response: server_response.ServerResult = db_client.exec() + + assert len(response.results) == 4 + assert response.results[0] == server_response.Result__Ok( + server_response.ServerResponse__Pong() + ) + # assert info servers + info_server: server_response.ServerInfo = response.results[1].value + assert info_server.value.version == db_client.protocol.version + assert info_server.value.type == server_response.ServerType__Database() + + # assert list_stores + assert response.results[3] == server_response.Result__Ok( + server_response.ServerResponse__StoreList([]) + ) diff --git a/sdk/ahnlich-client-py/poetry.lock b/sdk/ahnlich-client-py/poetry.lock new file mode 100644 index 00000000..3dd475e6 --- /dev/null +++ b/sdk/ahnlich-client-py/poetry.lock @@ -0,0 +1,326 @@ +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "astroid" +version = "3.2.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, +] + +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bump2version" +version = "1.0.1" +description = "Version-bump your software with a single command!" +optional = false +python-versions = ">=3.5" +files = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pylint" +version = "3.2.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, + {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "8.2.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.5" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "7b449a2fcfdfae35c09e0fea287dba525e9d4cb74004c39d5ecbcd5a7e1e3c23" diff --git a/sdk/ahnlich-client-py/pyproject.toml b/sdk/ahnlich-client-py/pyproject.toml new file mode 100644 index 00000000..ecde0dc5 --- /dev/null +++ b/sdk/ahnlich-client-py/pyproject.toml @@ -0,0 +1,27 @@ +[tool.poetry] +name = "ahnlich-client-py" +version = "0.1.0" +description = "A python client for interacting with Ahnlich DB and AI" +authors = ["David Onuh "] +readme = "README.md" + +packages = [ + { include = "ahnlich_client_py" }, +] + +[tool.poetry.dependencies] +python = "^3.11" +numpy = "^1.26.4" +toml = "^0.10.2" +bump2version = "^1.0.1" + +[tool.poetry.group.dev.dependencies] +black = "^24.4.2" +isort = "^5.13.2" +pytest = "^8.2.2" +pylint = "^3.2.3" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/type_specs/query.json b/type_specs/query.json index 86d7001c..ece20cb7 100644 --- a/type_specs/query.json +++ b/type_specs/query.json @@ -315,5 +315,16 @@ "Ping": "UNIT" } } + }, + "ServerQuery": { + "STRUCT": [ + { + "queries": { + "SEQ": { + "TYPENAME": "Query" + } + } + } + ] } } \ No newline at end of file diff --git a/type_specs/server_response.json b/type_specs/server_response.json index 4de14e9d..d3fc7615 100644 --- a/type_specs/server_response.json +++ b/type_specs/server_response.json @@ -30,6 +30,22 @@ } ] }, + "Result": { + "ENUM": { + "0": { + "Ok": { + "NEWTYPE": { + "TYPENAME": "ServerResponse" + } + } + }, + "1": { + "Err": { + "NEWTYPE": "STR" + } + } + } + }, "ServerInfo": { "STRUCT": [ { @@ -146,6 +162,17 @@ } } }, + "ServerResult": { + "STRUCT": [ + { + "results": { + "SEQ": { + "TYPENAME": "Result" + } + } + } + ] + }, "ServerType": { "ENUM": { "0": {