From 3a0883a2c2d2de831c3f5324e31efd81f068d1a8 Mon Sep 17 00:00:00 2001 From: Gabriel Fior Date: Wed, 2 Oct 2024 16:13:58 -0300 Subject: [PATCH] 404 add methods to pmat for storing prediction into ipfs and adding that to contract mapping (#446) * Small log | PMAT integration for agent result mapping | test * Subgraph integration for result mapping * WIP * Added IPFS upload to DeployableTraderAgent * Added test for ipfs upload/unpin * Removed test hardcoding * Local test deploy working * Small fixes before PR review * Fixing CI * Added txHashes to contract prediction being stored on chain * Fixed isort * Missing secrets * Added safety margin * Fixing test * Adding new test * Making tests pass * local_web3 session scoped * Tests finally passing locally * Fixing unit tests * Increasing rtol on market_moving bet test * Fixed isort * Fixed mypy * Incrased timeout of test * Making tests pass * Reactivating tests * Update tests_integration/tools/ipfs/test_ipfs_handler.py Co-authored-by: Evan Griffiths <56087052+evangriffiths@users.noreply.github.com> * Implemented PR comments * Extracted local_chain test to other PR * Fixed isort --------- Co-authored-by: Evan Griffiths <56087052+evangriffiths@users.noreply.github.com> --- .env.example | 4 +- .github/workflows/python_ci.yaml | 2 + poetry.lock | 67 ++++--- .../abis/omen_agentresultmapping.abi.json | 171 ++++++++++++++++++ prediction_market_agent_tooling/config.py | 15 ++ .../deploy/agent.py | 67 ++++++- .../markets/omen/data_models.py | 28 ++- .../markets/omen/omen_contracts.py | 40 ++++ .../markets/omen/omen_subgraph_handler.py | 35 ++++ .../tools/ipfs/ipfs_handler.py | 33 ++++ pyproject.toml | 1 + scripts/store_prediction.py | 51 ++++++ .../omen/test_omen_subgraph_handler.py | 49 +++-- tests/markets/test_betting_strategies.py | 2 +- .../tools/ipfs/test_ipfs_handler.py | 32 ++++ .../conftest.py | 18 +- .../markets/omen/test_local_chain.py | 2 +- .../markets/omen/test_omen.py | 24 +++ 18 files changed, 594 insertions(+), 47 deletions(-) create mode 100644 prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json create mode 100644 prediction_market_agent_tooling/tools/ipfs/ipfs_handler.py create mode 100644 scripts/store_prediction.py create mode 100644 tests_integration/tools/ipfs/test_ipfs_handler.py diff --git a/.env.example b/.env.example index 63365c39..ee40fcd2 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,6 @@ MANIFOLD_API_KEY= BET_FROM_PRIVATE_KEY= OPENAI_API_KEY= -GRAPH_API_KEY= \ No newline at end of file +GRAPH_API_KEY= +PINATA_API_KEY= +PINATA_API_SECRET= \ No newline at end of file diff --git a/.github/workflows/python_ci.yaml b/.github/workflows/python_ci.yaml index 0d40f0d7..4e9fb11c 100644 --- a/.github/workflows/python_ci.yaml +++ b/.github/workflows/python_ci.yaml @@ -16,6 +16,8 @@ env: GNOSIS_RPC_URL: ${{ secrets.GNOSIS_RPC_URL }} GRAPH_API_KEY: ${{ secrets.GRAPH_API_KEY }} METACULUS_API_KEY: ${{ secrets.METACULUS_API_KEY }} + PINATA_API_KEY: ${{ secrets.PINATA_API_KEY }} + PINATA_API_SECRET: ${{ secrets.PINATA_API_SECRET }} jobs: mypy: diff --git a/poetry.lock b/poetry.lock index 753be1cc..7f1c7026 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -2010,13 +2010,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -2027,7 +2027,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" @@ -3149,13 +3149,13 @@ files = [ [[package]] name = "openai" -version = "1.50.2" +version = "1.51.0" description = "The official Python library for the openai API" optional = true python-versions = ">=3.7.1" files = [ - {file = "openai-1.50.2-py3-none-any.whl", hash = "sha256:822dd2051baa3393d0d5406990611975dd6f533020dc9375a34d4fe67e8b75f7"}, - {file = "openai-1.50.2.tar.gz", hash = "sha256:3987ae027152fc8bea745d60b02c8f4c4a76e1b5c70e73565fa556db6f78c9e6"}, + {file = "openai-1.51.0-py3-none-any.whl", hash = "sha256:d9affafb7e51e5a27dce78589d4964ce4d6f6d560307265933a94b2e3f3c5d2c"}, + {file = "openai-1.51.0.tar.gz", hash = "sha256:8dc4f9d75ccdd5466fc8c99a952186eddceb9fd6ba694044773f3736a847149d"}, ] [package.dependencies] @@ -3257,31 +3257,43 @@ python-versions = ">=3.9" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, @@ -3472,6 +3484,20 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa typing = ["typing-extensions"] xmp = ["defusedxml"] +[[package]] +name = "pinatapy-vourhey" +version = "0.2.0" +description = "Non-official Pinata.cloud library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pinatapy-vourhey-0.2.0.tar.gz", hash = "sha256:453a32629ad11b92dd7153feb64d4302c8d17036cc17c5c3d8108809700bdc2b"}, + {file = "pinatapy_vourhey-0.2.0-py3-none-any.whl", hash = "sha256:86c204949eb9dd8e21ca28e5ee1f7b774a72f57f998ef7fba8a4ca41dd5a2554"}, +] + +[package.dependencies] +requests = "*" + [[package]] name = "pipe" version = "2.2" @@ -3871,7 +3897,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -3882,7 +3907,6 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] @@ -4536,18 +4560,19 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.8.1" +version = "13.9.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, + {file = "rich-13.9.1-py3-none-any.whl", hash = "sha256:b340e739f30aa58921dc477b8adaa9ecdb7cecc217be01d93730ee1bc8aa83be"}, + {file = "rich-13.9.1.tar.gz", hash = "sha256:097cffdf85db1babe30cc7deba5ab3a29e1b9885047dab24c57e9a7f8a9c1466"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -5041,13 +5066,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "streamlit" -version = "1.38.0" +version = "1.39.0" description = "A faster way to build and share data apps" optional = false python-versions = "!=3.9.7,>=3.8" files = [ - {file = "streamlit-1.38.0-py2.py3-none-any.whl", hash = "sha256:0653ecfe86fef0f1608e3e082aef7eb335d8713f6f31e9c3b19486d1c67d7c41"}, - {file = "streamlit-1.38.0.tar.gz", hash = "sha256:c4bf36b3ef871499ed4594574834583113f93f077dd3035d516d295786f2ad63"}, + {file = "streamlit-1.39.0-py2.py3-none-any.whl", hash = "sha256:a359fc54ed568b35b055ff1d453c320735539ad12e264365a36458aef55a5fba"}, + {file = "streamlit-1.39.0.tar.gz", hash = "sha256:fef9de7983c4ee65c08e85607d7ffccb56b00482b1041fa62f90e4815d39df3a"}, ] [package.dependencies] @@ -5058,18 +5083,18 @@ click = ">=7.0,<9" gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4" numpy = ">=1.20,<3" packaging = ">=20,<25" -pandas = ">=1.3.0,<3" +pandas = ">=1.4.0,<3" pillow = ">=7.1.0,<11" protobuf = ">=3.20,<6" pyarrow = ">=7.0" pydeck = ">=0.8.0b4,<1" requests = ">=2.27,<3" rich = ">=10.14.0,<14" -tenacity = ">=8.1.0,<9" +tenacity = ">=8.1.0,<10" toml = ">=0.10.1,<2" tornado = ">=6.0.3,<7" typing-extensions = ">=4.3.0,<5" -watchdog = {version = ">=2.1.5,<5", markers = "platform_system != \"Darwin\""} +watchdog = {version = ">=2.1.5,<6", markers = "platform_system != \"Darwin\""} [package.extras] snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python[modin] (>=1.17.0)"] @@ -5837,4 +5862,4 @@ openai = ["openai"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.12" -content-hash = "658abc9433182dac32862d8925b9fd262609b55adf55ecc7bbbed1af39709883" +content-hash = "a8aa7db40788e0cbabcab2be15e479007ffab8542ef3266743f29cd85b1cacd1" diff --git a/prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json b/prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json new file mode 100644 index 00000000..7d0f38b2 --- /dev/null +++ b/prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json @@ -0,0 +1,171 @@ + [ + { + "type": "constructor", + "inputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "addPrediction", + "inputs": [ + { + "name": "marketAddress", + "type": "address", + "internalType": "address" + }, + { + "name": "prediction", + "type": "tuple", + "internalType": "struct Prediction", + "components": [ + { + "name": "publisherAddress", + "type": "address", + "internalType": "address" + }, + { + "name": "ipfsHash", + "type": "bytes32", + "internalType": "bytes32" + }, + { + "name": "txHashes", + "type": "bytes32[]", + "internalType": "bytes32[]" + }, + { + "name": "estimatedProbabilityBps", + "type": "uint16", + "internalType": "uint16" + } + ] + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "getPredictionByIndex", + "inputs": [ + { + "name": "marketAddress", + "type": "address", + "internalType": "address" + }, + { + "name": "index", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [ + { + "name": "", + "type": "tuple", + "internalType": "struct Prediction", + "components": [ + { + "name": "publisherAddress", + "type": "address", + "internalType": "address" + }, + { + "name": "ipfsHash", + "type": "bytes32", + "internalType": "bytes32" + }, + { + "name": "txHashes", + "type": "bytes32[]", + "internalType": "bytes32[]" + }, + { + "name": "estimatedProbabilityBps", + "type": "uint16", + "internalType": "uint16" + } + ] + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "getPredictions", + "inputs": [ + { + "name": "marketAddress", + "type": "address", + "internalType": "address" + } + ], + "outputs": [ + { + "name": "", + "type": "tuple[]", + "internalType": "struct Prediction[]", + "components": [ + { + "name": "publisherAddress", + "type": "address", + "internalType": "address" + }, + { + "name": "ipfsHash", + "type": "bytes32", + "internalType": "bytes32" + }, + { + "name": "txHashes", + "type": "bytes32[]", + "internalType": "bytes32[]" + }, + { + "name": "estimatedProbabilityBps", + "type": "uint16", + "internalType": "uint16" + } + ] + } + ], + "stateMutability": "view" + }, + { + "type": "event", + "name": "PredictionAdded", + "inputs": [ + { + "name": "marketAddress", + "type": "address", + "indexed": true, + "internalType": "address" + }, + { + "name": "estimatedProbabilityBps", + "type": "uint16", + "indexed": false, + "internalType": "uint16" + }, + { + "name": "publisherAddress", + "type": "address", + "indexed": true, + "internalType": "address" + }, + { + "name": "txHashes", + "type": "bytes32[]", + "indexed": false, + "internalType": "bytes32[]" + }, + { + "name": "ipfsHash", + "type": "bytes32", + "indexed": false, + "internalType": "bytes32" + } + ], + "anonymous": false + } + ] \ No newline at end of file diff --git a/prediction_market_agent_tooling/config.py b/prediction_market_agent_tooling/config.py index c124625b..1be6fa0b 100644 --- a/prediction_market_agent_tooling/config.py +++ b/prediction_market_agent_tooling/config.py @@ -45,6 +45,9 @@ class APIKeys(BaseSettings): LANGFUSE_HOST: t.Optional[str] = None LANGFUSE_DEPLOYMENT_VERSION: t.Optional[str] = None + PINATA_API_KEY: t.Optional[SecretStr] = None + PINATA_API_SECRET: t.Optional[SecretStr] = None + TAVILY_API_KEY: t.Optional[SecretStr] = None SQLALCHEMY_DB_URL: t.Optional[SecretStr] = None @@ -148,6 +151,18 @@ def default_enable_langfuse(self) -> bool: and self.LANGFUSE_HOST is not None ) + @property + def pinata_api_key(self) -> SecretStr: + return check_not_none( + self.PINATA_API_KEY, "PINATA_API_KEY missing in the environment." + ) + + @property + def pinata_api_secret(self) -> SecretStr: + return check_not_none( + self.PINATA_API_SECRET, "PINATA_API_SECRET missing in the environment." + ) + @property def tavily_api_key(self) -> SecretStr: return check_not_none( diff --git a/prediction_market_agent_tooling/deploy/agent.py b/prediction_market_agent_tooling/deploy/agent.py index cf421c29..6c77dc66 100644 --- a/prediction_market_agent_tooling/deploy/agent.py +++ b/prediction_market_agent_tooling/deploy/agent.py @@ -10,6 +10,7 @@ from pydantic import BaseModel, BeforeValidator, computed_field from typing_extensions import Annotated +from web3 import Web3 from prediction_market_agent_tooling.config import APIKeys from prediction_market_agent_tooling.deploy.betting_strategy import ( @@ -30,7 +31,7 @@ gcp_function_is_active, gcp_resolve_api_keys_secrets, ) -from prediction_market_agent_tooling.gtypes import xDai, xdai_type +from prediction_market_agent_tooling.gtypes import HexStr, xDai, xdai_type from prediction_market_agent_tooling.loggers import logger from prediction_market_agent_tooling.markets.agent_market import ( AgentMarket, @@ -47,17 +48,27 @@ MarketType, have_bet_on_market_since, ) +from prediction_market_agent_tooling.markets.omen.data_models import ( + ContractPrediction, + IPFSAgentResult, +) from prediction_market_agent_tooling.markets.omen.omen import ( is_minimum_required_balance, redeem_from_all_user_positions, withdraw_wxdai_to_xdai_to_keep_balance, ) +from prediction_market_agent_tooling.markets.omen.omen_contracts import ( + OmenAgentResultMappingContract, +) from prediction_market_agent_tooling.monitor.monitor_app import ( MARKET_TYPE_TO_DEPLOYED_AGENT, ) +from prediction_market_agent_tooling.tools.hexbytes_custom import HexBytes +from prediction_market_agent_tooling.tools.ipfs.ipfs_handler import IPFSHandler from prediction_market_agent_tooling.tools.is_predictable import is_predictable_binary from prediction_market_agent_tooling.tools.langfuse_ import langfuse_context, observe from prediction_market_agent_tooling.tools.utils import DatetimeWithTimezone, utcnow +from prediction_market_agent_tooling.tools.web3_utils import ipfscidv0_to_byte32 MAX_AVAILABLE_MARKETS = 20 TRADER_TAG = "trader" @@ -291,6 +302,7 @@ def __init__( ) -> None: super().__init__(enable_langfuse=enable_langfuse) self.place_bet = place_bet + self.ipfs_handler = IPFSHandler(APIKeys()) def get_betting_strategy(self, market: AgentMarket) -> BettingStrategy: user_id = market.get_user_id(api_keys=APIKeys()) @@ -461,7 +473,7 @@ def process_market( placed_trades = [] if self.place_bet: for trade in trades: - logger.info(f"Executing trade {trade}") + logger.info(f"Executing trade {trade} on market {market.id}") match trade.trade_type: case TradeType.BUY: @@ -476,18 +488,61 @@ def process_market( raise ValueError(f"Unexpected trade type {trade.trade_type}.") placed_trades.append(PlacedTrade.from_trade(trade, id)) - self.after_process_market(market_type, market) - processed_market = ProcessedMarket(answer=answer, trades=placed_trades) self.update_langfuse_trace_by_processed_market(market_type, processed_market) + self.after_process_market( + market_type, market, processed_market=processed_market + ) + logger.info(f"Processed market {market.question=} from {market.url=}.") return processed_market def after_process_market( - self, market_type: MarketType, market: AgentMarket + self, + market_type: MarketType, + market: AgentMarket, + processed_market: ProcessedMarket, ) -> None: - pass + if market_type != MarketType.OMEN: + logger.info( + f"Skipping after_process_market since market_type {market_type} != OMEN" + ) + return + keys = APIKeys() + self.store_prediction( + market_id=market.id, processed_market=processed_market, keys=keys + ) + + def store_prediction( + self, market_id: str, processed_market: ProcessedMarket, keys: APIKeys + ) -> None: + reasoning = ( + processed_market.answer.reasoning + if processed_market.answer.reasoning + else "" + ) + ipfs_hash = self.ipfs_handler.store_agent_result( + IPFSAgentResult(reasoning=reasoning) + ) + + tx_hashes = [ + HexBytes(HexStr(i.id)) for i in processed_market.trades if i.id is not None + ] + prediction = ContractPrediction( + publisher=keys.public_key, + ipfs_hash=ipfscidv0_to_byte32(ipfs_hash), + tx_hashes=tx_hashes, + estimated_probability_bps=int(processed_market.answer.p_yes * 10000), + ) + tx_receipt = OmenAgentResultMappingContract().add_prediction( + api_keys=keys, + market_address=Web3.to_checksum_address(market_id), + prediction=prediction, + ) + logger.info( + f"Added prediction to market {market_id}. - receipt {tx_receipt['transactionHash'].hex()}." + ) def before_process_markets(self, market_type: MarketType) -> None: """ diff --git a/prediction_market_agent_tooling/markets/omen/data_models.py b/prediction_market_agent_tooling/markets/omen/data_models.py index 3c0d8546..a429085d 100644 --- a/prediction_market_agent_tooling/markets/omen/data_models.py +++ b/prediction_market_agent_tooling/markets/omen/data_models.py @@ -2,7 +2,7 @@ from datetime import datetime import pytz -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, Field, computed_field from web3 import Web3 from prediction_market_agent_tooling.gtypes import ( @@ -742,3 +742,29 @@ class CreatedMarket(BaseModel): initial_funds: Wei fee: Wei distribution_hint: list[OmenOutcomeToken] | None + + +class ContractPrediction(BaseModel): + model_config = ConfigDict(populate_by_name=True) + publisher: str = Field(..., alias="publisherAddress") + ipfs_hash: HexBytes = Field(..., alias="ipfsHash") + tx_hashes: list[HexBytes] = Field(..., alias="txHashes") + estimated_probability_bps: int = Field(..., alias="estimatedProbabilityBps") + + @computed_field # type: ignore[prop-decorator] # Mypy issue: https://github.com/python/mypy/issues/14461 + @property + def publisher_checksummed(self) -> ChecksumAddress: + return Web3.to_checksum_address(self.publisher) + + @staticmethod + def from_tuple(values: tuple[t.Any]) -> "ContractPrediction": + data = {k: v for k, v in zip(ContractPrediction.model_fields.keys(), values)} + return ContractPrediction.model_validate(data) + + +class IPFSAgentResult(BaseModel): + reasoning: str + + model_config = ConfigDict( + extra="forbid", + ) diff --git a/prediction_market_agent_tooling/markets/omen/omen_contracts.py b/prediction_market_agent_tooling/markets/omen/omen_contracts.py index d9f9b4df..2adcfadd 100644 --- a/prediction_market_agent_tooling/markets/omen/omen_contracts.py +++ b/prediction_market_agent_tooling/markets/omen/omen_contracts.py @@ -25,6 +25,7 @@ from prediction_market_agent_tooling.markets.omen.data_models import ( INVALID_ANSWER_HEX_BYTES, ConditionPreparationEvent, + ContractPrediction, FPMMFundingAddedEvent, OmenFixedProductMarketMakerCreationEvent, RealitioLogNewQuestionEvent, @@ -777,6 +778,45 @@ def isPendingArbitration( return is_pending_arbitration +class OmenAgentResultMappingContract(ContractOnGnosisChain): + # Contract ABI taken from built https://github.com/gnosis/labs-contracts. + + abi: ABI = abi_field_validator( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), + "../../abis/omen_agentresultmapping.abi.json", + ) + ) + + address: ChecksumAddress = Web3.to_checksum_address( + "0x260E1077dEA98e738324A6cEfB0EE9A272eD471a" + ) + + def get_predictions( + self, + market_address: ChecksumAddress, + web3: Web3 | None = None, + ) -> list[ContractPrediction]: + prediction_tuples = self.call( + "getPredictions", function_params=[market_address], web3=web3 + ) + return [ContractPrediction.from_tuple(p) for p in prediction_tuples] + + def add_prediction( + self, + api_keys: APIKeys, + market_address: ChecksumAddress, + prediction: ContractPrediction, + web3: Web3 | None = None, + ) -> TxReceipt: + return self.send( + api_keys=api_keys, + function_name="addPrediction", + function_params=[market_address, prediction.model_dump(by_alias=True)], + web3=web3, + ) + + class OmenThumbnailMapping(ContractOnGnosisChain): # Contract ABI taken from built https://github.com/gnosis/labs-contracts. abi: ABI = abi_field_validator( diff --git a/prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py b/prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py index ea1f6b8e..e830127b 100644 --- a/prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py +++ b/prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py @@ -20,6 +20,7 @@ from prediction_market_agent_tooling.markets.agent_market import FilterBy, SortBy from prediction_market_agent_tooling.markets.omen.data_models import ( OMEN_BINARY_MARKET_OUTCOMES, + ContractPrediction, OmenBet, OmenMarket, OmenPosition, @@ -60,6 +61,8 @@ class OmenSubgraphHandler(metaclass=SingletonMeta): OMEN_IMAGE_MAPPING_GRAPH_URL = "https://gateway-arbitrum.network.thegraph.com/api/{graph_api_key}/subgraphs/id/EWN14ciGK53PpUiKSm7kMWQ6G4iz3tDrRLyZ1iXMQEdu" + OMEN_AGENT_RESULT_MAPPING_GRAPH_URL = "https://gateway-arbitrum.network.thegraph.com/api/{graph_api_key}/subgraphs/id/GoE3UFyc8Gg9xzv92oinonyhRCphpGu62qB2Eh2XvJ8F" + INVALID_ANSWER = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" def __init__(self) -> None: @@ -96,6 +99,12 @@ def __init__(self) -> None: ) ) + self.omen_agent_result_mapping_subgraph = self.sg.load_subgraph( + self.OMEN_AGENT_RESULT_MAPPING_GRAPH_URL.format( + graph_api_key=keys.graph_api_key.get_secret_value() + ) + ) + def _get_fields_for_bets(self, bets_field: FieldPath) -> list[FieldPath]: markets = bets_field.fpmm fields_for_markets = self._get_fields_for_markets(markets) @@ -797,3 +806,29 @@ def get_market_image(self, market_id: HexAddress) -> ImageType | None: if image_url is not None else None ) + + def get_agent_results_for_market( + self, market_id: HexAddress | None = None + ) -> list[ContractPrediction]: + where_stms = {} + if market_id: + where_stms["marketAddress"] = market_id.lower() + + prediction_added = ( + self.omen_agent_result_mapping_subgraph.Query.predictionAddeds( + where=where_stms, + orderBy="blockNumber", + orderDirection="asc", + ) + ) + fields = [ + prediction_added.publisherAddress, + prediction_added.ipfsHash, + prediction_added.txHashes, + prediction_added.estimatedProbabilityBps, + ] + result = self.sg.query_json(fields) + items = self._parse_items_from_json(result) + if not items: + return [] + return [ContractPrediction.model_validate(i) for i in items] diff --git a/prediction_market_agent_tooling/tools/ipfs/ipfs_handler.py b/prediction_market_agent_tooling/tools/ipfs/ipfs_handler.py new file mode 100644 index 00000000..ea263274 --- /dev/null +++ b/prediction_market_agent_tooling/tools/ipfs/ipfs_handler.py @@ -0,0 +1,33 @@ +import json +import tempfile + +from pinatapy import PinataPy + +from prediction_market_agent_tooling.config import APIKeys +from prediction_market_agent_tooling.gtypes import IPFSCIDVersion0 +from prediction_market_agent_tooling.markets.omen.data_models import IPFSAgentResult + + +class IPFSHandler: + def __init__(self, api_keys: APIKeys): + self.pinata = PinataPy( + api_keys.pinata_api_key.get_secret_value(), + api_keys.pinata_api_secret.get_secret_value(), + ) + + def upload_file(self, file_path: str) -> IPFSCIDVersion0: + return IPFSCIDVersion0( + self.pinata.pin_file_to_ipfs(file_path, save_absolute_paths=False)[ + "IpfsHash" + ] + ) + + def store_agent_result(self, agent_result: IPFSAgentResult) -> IPFSCIDVersion0: + with tempfile.NamedTemporaryFile(mode="r+", encoding="utf-8") as json_file: + json.dump(agent_result.model_dump(), json_file, indent=4) + json_file.flush() + ipfs_hash = self.upload_file(json_file.name) + return ipfs_hash + + def unpin_file(self, hash_to_remove: str) -> None: + self.pinata.remove_pin_from_ipfs(hash_to_remove=hash_to_remove) diff --git a/pyproject.toml b/pyproject.toml index 5087fe62..fb9e1b72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ sqlmodel = "^0.0.21" psycopg2-binary = "^2.9.9" base58 = ">=1.0.2,<2.0" loky = "^3.4.1" +pinatapy-vourhey = "^0.2.0" [tool.poetry.extras] openai = ["openai"] diff --git a/scripts/store_prediction.py b/scripts/store_prediction.py new file mode 100644 index 00000000..42526671 --- /dev/null +++ b/scripts/store_prediction.py @@ -0,0 +1,51 @@ +import typer +from web3 import Web3 + +from prediction_market_agent_tooling.config import APIKeys +from prediction_market_agent_tooling.gtypes import private_key_type +from prediction_market_agent_tooling.loggers import logger +from prediction_market_agent_tooling.markets.omen.data_models import ContractPrediction +from prediction_market_agent_tooling.markets.omen.omen_contracts import ( + OmenAgentResultMappingContract, +) +from prediction_market_agent_tooling.tools.hexbytes_custom import HexBytes + + +def main( + from_private_key: str = typer.Option(), +) -> None: + """ + Helper script to create a market on Omen, usage: + + ```bash + python scripts/store_prediction.py \ + --from-private-key your-private-key + ``` + + Market can be created also on the web: https://aiomen.eth.limo/#/create + """ + + agent_result_mapping = OmenAgentResultMappingContract() + api_keys = APIKeys( + BET_FROM_PRIVATE_KEY=private_key_type(from_private_key), + SAFE_ADDRESS=None, + ) + market_address = Web3.to_checksum_address(api_keys.public_key) + dummy_transaction_hash = ( + "0x3750ffa211dab39b4d0711eb27b02b56a17fa9d257ee549baa3110725fd1d41b" + ) + contract_prediction = ContractPrediction( + tx_hashes=[HexBytes(dummy_transaction_hash)], + estimated_probability_bps=5454, + ipfs_hash=HexBytes(dummy_transaction_hash), + publisher=api_keys.public_key, + ) + tx_hash = agent_result_mapping.add_prediction( + api_keys=api_keys, market_address=market_address, prediction=contract_prediction + ) + + logger.info(f"Added prediction, tx_hash {tx_hash}") + + +if __name__ == "__main__": + typer.run(main) diff --git a/tests/markets/omen/test_omen_subgraph_handler.py b/tests/markets/omen/test_omen_subgraph_handler.py index f29f1276..9ba95cc5 100644 --- a/tests/markets/omen/test_omen_subgraph_handler.py +++ b/tests/markets/omen/test_omen_subgraph_handler.py @@ -285,10 +285,9 @@ def build_incomplete_user_position_from_condition_ids( ) -def test_get_markets_id_in() -> None: +def test_get_markets_id_in(omen_subgraph_handler: OmenSubgraphHandler) -> None: market_id = "0x934b9f379dd9d8850e468df707d58711da2966cd" - sgh = OmenSubgraphHandler() - markets = sgh.get_omen_binary_markets( + markets = omen_subgraph_handler.get_omen_binary_markets( limit=1, id_in=[market_id], ) @@ -296,33 +295,37 @@ def test_get_markets_id_in() -> None: assert markets[0].id == market_id -def test_omen_get_non_existing_market_by_id() -> None: +def test_omen_get_non_existing_market_by_id( + omen_subgraph_handler: OmenSubgraphHandler, +) -> None: with pytest.raises(ValueError) as e: - OmenSubgraphHandler().get_omen_market_by_market_id(HexAddress(HexStr("0x123"))) + omen_subgraph_handler.get_omen_market_by_market_id(HexAddress(HexStr("0x123"))) assert "Fetched wrong number of markets. Expected 1 but got 0" in str(e) -def test_get_existing_image() -> None: +def test_get_existing_image(omen_subgraph_handler: OmenSubgraphHandler) -> None: market_id = HexAddress(HexStr("0x0539590c0cf0d929e3f40b290fda04b9b4a8cf68")) - image_url = OmenSubgraphHandler().get_market_image_url(market_id) + image_url = omen_subgraph_handler.get_market_image_url(market_id) assert ( image_url == "https://ipfs.io/ipfs/QmRiPQ4x7jAgKMyJMV8Uqw7vQir6vmgB851TXe7CgQx7cV" ) - image = OmenSubgraphHandler().get_market_image(market_id) + image = omen_subgraph_handler.get_market_image(market_id) assert image is not None -def test_get_non_existing_image() -> None: +def test_get_non_existing_image(omen_subgraph_handler: OmenSubgraphHandler) -> None: market_id = HexAddress(HexStr("0xd37dfcee94666f83d7c334658719817bf8ee1bca")) - image_url = OmenSubgraphHandler().get_market_image_url(market_id) + image_url = omen_subgraph_handler.get_market_image_url(market_id) assert image_url is None - image = OmenSubgraphHandler().get_market_image(market_id) + image = omen_subgraph_handler.get_market_image(market_id) assert image is None -def test_wont_return_non_wxdai_markets_if_not_wanted() -> None: - markets = OmenSubgraphHandler().get_omen_binary_markets( +def test_wont_return_non_wxdai_markets_if_not_wanted( + omen_subgraph_handler: OmenSubgraphHandler, +) -> None: + markets = omen_subgraph_handler.get_omen_binary_markets( limit=None, id_in=[MARKET_ID_WITH_SDAI_AS_COLLATERAL], collateral_token_address_in=(WrappedxDaiContract().address,), @@ -332,10 +335,26 @@ def test_wont_return_non_wxdai_markets_if_not_wanted() -> None: ), "Shouldn't return markets that are not on wxDai, because of the default filter." -def test_will_return_non_wxdai_markets_if_asked_for() -> None: - markets = OmenSubgraphHandler().get_omen_binary_markets( +def test_will_return_non_wxdai_markets_if_asked_for( + omen_subgraph_handler: OmenSubgraphHandler, +) -> None: + markets = omen_subgraph_handler.get_omen_binary_markets( limit=None, id_in=[MARKET_ID_WITH_SDAI_AS_COLLATERAL], collateral_token_address_in=None, ) assert len(markets) == 1, "Should have return that one market with the given ID." + + +def test_get_predictions_from_market( + omen_subgraph_handler: OmenSubgraphHandler, +) -> None: + predictions = omen_subgraph_handler.get_agent_results_for_market() + # We have at least 1 prediction + # We hardcode the first publisher we addded a prediction for. + first_publisher = Web3.to_checksum_address( + "0x134f193625bbc38f31aeeecf41f5f96c1ad6ea9a" + ) + assert len(predictions) >= 1 + # We can get the 0th element since we sort by block number asc. + assert predictions[0].publisher_checksummed == first_publisher diff --git a/tests/markets/test_betting_strategies.py b/tests/markets/test_betting_strategies.py index e91ee58b..4353d98e 100644 --- a/tests/markets/test_betting_strategies.py +++ b/tests/markets/test_betting_strategies.py @@ -281,7 +281,7 @@ def test_zero_bets() -> None: target_p_yes=market.current_p_yes, fee=market.fee, ) - assert np.isclose(market_moving_bet.size, 0.0, atol=1e-4) + assert np.isclose(market_moving_bet.size, 0.0, atol=1e-3) kelly_bet = get_kelly_bet_full( yes_outcome_pool_size=yes_outcome_pool_size, diff --git a/tests_integration/tools/ipfs/test_ipfs_handler.py b/tests_integration/tools/ipfs/test_ipfs_handler.py new file mode 100644 index 00000000..93906f2b --- /dev/null +++ b/tests_integration/tools/ipfs/test_ipfs_handler.py @@ -0,0 +1,32 @@ +import datetime +import typing as t +from tempfile import NamedTemporaryFile + +import pytest +import requests + +from prediction_market_agent_tooling.config import APIKeys +from prediction_market_agent_tooling.tools.ipfs.ipfs_handler import IPFSHandler + + +@pytest.fixture(scope="module") +def test_ipfs_handler() -> t.Generator[IPFSHandler, None, None]: + keys = APIKeys() + yield IPFSHandler(keys) + + +def test_ipfs_upload_and_removal(test_ipfs_handler: IPFSHandler) -> None: + # We add the current datetime to avoid uploading an existing file (CID is content-based) + temp_string = f"Hello World {datetime.datetime.utcnow()}" + with NamedTemporaryFile() as temp_file: + temp_file.write(temp_string.encode("utf-8")) + temp_file.flush() + ipfs_hash = test_ipfs_handler.upload_file(temp_file.name) + + # assert uploaded + # can take a while to be available for download + r = requests.get(f"https://gateway.pinata.cloud/ipfs/{ipfs_hash}", timeout=60) + r.raise_for_status() + assert r.text == temp_string + # remove from IPFS + test_ipfs_handler.unpin_file(ipfs_hash) diff --git a/tests_integration_with_local_chain/conftest.py b/tests_integration_with_local_chain/conftest.py index 1bb9d8ec..2d1e80be 100644 --- a/tests_integration_with_local_chain/conftest.py +++ b/tests_integration_with_local_chain/conftest.py @@ -5,17 +5,20 @@ from ape.managers import ChainManager from ape_test import TestAccount from dotenv import load_dotenv +from eth_account import Account +from eth_account.signers.local import LocalAccount from gnosis.eth import EthereumClient from web3 import Web3 from prediction_market_agent_tooling.config import APIKeys from prediction_market_agent_tooling.gtypes import ( HexAddress, + PrivateKey, private_key_type, xDai, xdai_type, ) -from prediction_market_agent_tooling.tools.web3_utils import xdai_to_wei +from prediction_market_agent_tooling.tools.web3_utils import send_xdai_to, xdai_to_wei @pytest.fixture(autouse=True, scope="session") @@ -73,3 +76,16 @@ def fund_account_on_tenderly( } response = requests.post(fork_rpc, json=payload) response.raise_for_status() + + +def create_and_fund_random_account( + web3: Web3, private_key: PrivateKey, deposit_amount: xDai = xDai(10) +) -> LocalAccount: + fresh_account: LocalAccount = Account.create() + send_xdai_to( + web3=web3, + from_private_key=private_key, + to_address=fresh_account.address, + value=xdai_to_wei(deposit_amount), + ) + return fresh_account diff --git a/tests_integration_with_local_chain/markets/omen/test_local_chain.py b/tests_integration_with_local_chain/markets/omen/test_local_chain.py index 9478b1fe..d27d95a8 100644 --- a/tests_integration_with_local_chain/markets/omen/test_local_chain.py +++ b/tests_integration_with_local_chain/markets/omen/test_local_chain.py @@ -113,8 +113,8 @@ def test_now(local_web3: Web3, test_keys: APIKeys) -> None: def test_now_failed(local_web3: Web3, test_keys: APIKeys) -> None: # Sleep a little to let the local chain go out of sync without updating the block - time.sleep(5) allowed_difference = 3 # seconds + time.sleep(allowed_difference + 1) # safety margin for assertion chain_timestamp = DebuggingContract().getNow(local_web3) utc_timestamp = int(utcnow().timestamp()) assert ( diff --git a/tests_integration_with_local_chain/markets/omen/test_omen.py b/tests_integration_with_local_chain/markets/omen/test_omen.py index 56166890..f3c93ed2 100644 --- a/tests_integration_with_local_chain/markets/omen/test_omen.py +++ b/tests_integration_with_local_chain/markets/omen/test_omen.py @@ -29,6 +29,7 @@ OMEN_BINARY_MARKET_OUTCOMES, OMEN_FALSE_OUTCOME, OMEN_TRUE_OUTCOME, + ContractPrediction, get_bet_outcome, ) from prediction_market_agent_tooling.markets.omen.omen import ( @@ -44,6 +45,7 @@ OMEN_DEFAULT_MARKET_FEE_PERC, ContractDepositableWrapperERC20OnGnosisChain, ContractERC4626OnGnosisChain, + OmenAgentResultMappingContract, OmenConditionalTokenContract, OmenFixedProductMarketMakerContract, OmenRealitioContract, @@ -54,6 +56,7 @@ OmenSubgraphHandler, ) from prediction_market_agent_tooling.tools.balances import get_balances +from prediction_market_agent_tooling.tools.hexbytes_custom import HexBytes from prediction_market_agent_tooling.tools.utils import utcnow from prediction_market_agent_tooling.tools.web3_utils import wei_to_xdai, xdai_to_wei @@ -421,6 +424,27 @@ def get_position_balance_by_position_id( ) +def test_add_predictions(local_web3: Web3, test_keys: APIKeys) -> None: + agent_result_mapping = OmenAgentResultMappingContract() + market_address = test_keys.public_key + dummy_transaction_hash = ( + "0x3750ffa211dab39b4d0711eb27b02b56a17fa9d257ee549baa3110725fd1d41b" + ) + p = ContractPrediction( + tx_hashes=[HexBytes(dummy_transaction_hash)], + estimated_probability_bps=5454, + ipfs_hash=HexBytes(dummy_transaction_hash), + publisher=test_keys.public_key, + ) + + agent_result_mapping.add_prediction(test_keys, market_address, p, web3=local_web3) + stored_predictions = agent_result_mapping.get_predictions( + market_address, web3=local_web3 + ) + assert len(stored_predictions) == 1 + assert stored_predictions[0] == p + + def test_place_bet_with_prev_existing_positions( local_web3: Web3, test_keys: APIKeys ) -> None: