From d06f99d2a409168301c638c39f0c4262ad6956db Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 11:12:22 -0500 Subject: [PATCH 01/55] Add cm api client to dependencies, pin python 3.11 --- .python-version | 1 + poetry.lock | 129 +++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 3 files changed, 130 insertions(+), 2 deletions(-) create mode 100644 .python-version diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..2c07333 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/poetry.lock b/poetry.lock index 4b7abfe..a1d31cb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -774,6 +774,28 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "coinmetrics-api-client" +version = "2024.11.21.20" +description = "Python client for Coin Metrics API v4." +optional = false +python-versions = "<4.0.0,>=3.7.1" +files = [ + {file = "coinmetrics_api_client-2024.11.21.20-py3-none-any.whl", hash = "sha256:21ec15cba06416b450e7c5168c8a2dc42767f9df970ab23330f151205f6cd81f"}, + {file = "coinmetrics_api_client-2024.11.21.20.tar.gz", hash = "sha256:5dae5e5b3724865006c9f035734f82b35d4294ead17d892cda6773db006e3f2a"}, +] + +[package.dependencies] +orjson = ">=3.6.0,<4.0.0" +python-dateutil = ">=2.8.2,<3.0.0" +requests = ">=2.24.0,<3.0.0" +tqdm = ">=4.64.1,<5.0.0" +typer = ">=0.7.0" +websocket-client = ">=1.2.1,<2.0.0" + +[package.extras] +pandas = ["pandas (>=1.3.3,<2.0.0)"] + [[package]] name = "colorama" version = "0.4.6" @@ -1954,6 +1976,90 @@ files = [ {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] +[[package]] +name = "orjson" +version = "3.10.12" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.12-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ece01a7ec71d9940cc654c482907a6b65df27251255097629d0dea781f255c6d"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34ec9aebc04f11f4b978dd6caf697a2df2dd9b47d35aa4cc606cabcb9df69d7"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6ec8658da3480939c79b9e9e27e0db31dffcd4ba69c334e98c9976ac29140e"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17e6baf4cf01534c9de8a16c0c611f3d94925d1701bf5f4aff17003677d8ced"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6402ebb74a14ef96f94a868569f5dccf70d791de49feb73180eb3c6fda2ade56"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0000758ae7c7853e0a4a6063f534c61656ebff644391e1f81698c1b2d2fc8cd2"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:888442dcee99fd1e5bd37a4abb94930915ca6af4db50e23e746cdf4d1e63db13"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c1f7a3ce79246aa0e92f5458d86c54f257fb5dfdc14a192651ba7ec2c00f8a05"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:802a3935f45605c66fb4a586488a38af63cb37aaad1c1d94c982c40dcc452e85"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1da1ef0113a2be19bb6c557fb0ec2d79c92ebd2fed4cfb1b26bab93f021fb885"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a3273e99f367f137d5b3fecb5e9f45bcdbfac2a8b2f32fbc72129bbd48789c2"}, + {file = "orjson-3.10.12-cp310-none-win32.whl", hash = "sha256:475661bf249fd7907d9b0a2a2421b4e684355a77ceef85b8352439a9163418c3"}, + {file = "orjson-3.10.12-cp310-none-win_amd64.whl", hash = "sha256:87251dc1fb2b9e5ab91ce65d8f4caf21910d99ba8fb24b49fd0c118b2362d509"}, + {file = "orjson-3.10.12-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a734c62efa42e7df94926d70fe7d37621c783dea9f707a98cdea796964d4cf74"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:750f8b27259d3409eda8350c2919a58b0cfcd2054ddc1bd317a643afc646ef23"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb52c22bfffe2857e7aa13b4622afd0dd9d16ea7cc65fd2bf318d3223b1b6252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:440d9a337ac8c199ff8251e100c62e9488924c92852362cd27af0e67308c16ef"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e15c06491c69997dfa067369baab3bf094ecb74be9912bdc4339972323f252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:362d204ad4b0b8724cf370d0cd917bb2dc913c394030da748a3bb632445ce7c4"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b57cbb4031153db37b41622eac67329c7810e5f480fda4cfd30542186f006ae"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:165c89b53ef03ce0d7c59ca5c82fa65fe13ddf52eeb22e859e58c237d4e33b9b"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5dee91b8dfd54557c1a1596eb90bcd47dbcd26b0baaed919e6861f076583e9da"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a4e1cfb72de6f905bdff061172adfb3caf7a4578ebf481d8f0530879476c07"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038d42c7bc0606443459b8fe2d1f121db474c49067d8d14c6a075bbea8bf14dd"}, + {file = "orjson-3.10.12-cp311-none-win32.whl", hash = "sha256:03b553c02ab39bed249bedd4abe37b2118324d1674e639b33fab3d1dafdf4d79"}, + {file = "orjson-3.10.12-cp311-none-win_amd64.whl", hash = "sha256:8b8713b9e46a45b2af6b96f559bfb13b1e02006f4242c156cbadef27800a55a8"}, + {file = "orjson-3.10.12-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:53206d72eb656ca5ac7d3a7141e83c5bbd3ac30d5eccfe019409177a57634b0d"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac8010afc2150d417ebda810e8df08dd3f544e0dd2acab5370cfa6bcc0662f8f"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed459b46012ae950dd2e17150e838ab08215421487371fa79d0eced8d1461d70"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dcb9673f108a93c1b52bfc51b0af422c2d08d4fc710ce9c839faad25020bb69"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22a51ae77680c5c4652ebc63a83d5255ac7d65582891d9424b566fb3b5375ee9"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910fdf2ac0637b9a77d1aad65f803bac414f0b06f720073438a7bd8906298192"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:24ce85f7100160936bc2116c09d1a8492639418633119a2224114f67f63a4559"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a76ba5fc8dd9c913640292df27bff80a685bed3a3c990d59aa6ce24c352f8fc"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ff70ef093895fd53f4055ca75f93f047e088d1430888ca1229393a7c0521100f"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4244b7018b5753ecd10a6d324ec1f347da130c953a9c88432c7fbc8875d13be"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:16135ccca03445f37921fa4b585cff9a58aa8d81ebcb27622e69bfadd220b32c"}, + {file = "orjson-3.10.12-cp312-none-win32.whl", hash = "sha256:2d879c81172d583e34153d524fcba5d4adafbab8349a7b9f16ae511c2cee8708"}, + {file = "orjson-3.10.12-cp312-none-win_amd64.whl", hash = "sha256:fc23f691fa0f5c140576b8c365bc942d577d861a9ee1142e4db468e4e17094fb"}, + {file = "orjson-3.10.12-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47962841b2a8aa9a258b377f5188db31ba49af47d4003a32f55d6f8b19006543"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6334730e2532e77b6054e87ca84f3072bee308a45a452ea0bffbbbc40a67e296"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:accfe93f42713c899fdac2747e8d0d5c659592df2792888c6c5f829472e4f85e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7974c490c014c48810d1dede6c754c3cc46598da758c25ca3b4001ac45b703f"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3f250ce7727b0b2682f834a3facff88e310f52f07a5dcfd852d99637d386e79e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f31422ff9486ae484f10ffc51b5ab2a60359e92d0716fcce1b3593d7bb8a9af6"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5f29c5d282bb2d577c2a6bbde88d8fdcc4919c593f806aac50133f01b733846e"}, + {file = "orjson-3.10.12-cp313-none-win32.whl", hash = "sha256:f45653775f38f63dc0e6cd4f14323984c3149c05d6007b58cb154dd080ddc0dc"}, + {file = "orjson-3.10.12-cp313-none-win_amd64.whl", hash = "sha256:229994d0c376d5bdc91d92b3c9e6be2f1fbabd4cc1b59daae1443a46ee5e9825"}, + {file = "orjson-3.10.12-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7d69af5b54617a5fac5c8e5ed0859eb798e2ce8913262eb522590239db6c6763"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ed119ea7d2953365724a7059231a44830eb6bbb0cfead33fcbc562f5fd8f935"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5fc1238ef197e7cad5c91415f524aaa51e004be5a9b35a1b8a84ade196f73f"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43509843990439b05f848539d6f6198d4ac86ff01dd024b2f9a795c0daeeab60"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72e27a62041cfb37a3de512247ece9f240a561e6c8662276beaf4d53d406db4"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a904f9572092bb6742ab7c16c623f0cdccbad9eeb2d14d4aa06284867bddd31"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:855c0833999ed5dc62f64552db26f9be767434917d8348d77bacaab84f787d7b"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:897830244e2320f6184699f598df7fb9db9f5087d6f3f03666ae89d607e4f8ed"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0b32652eaa4a7539f6f04abc6243619c56f8530c53bf9b023e1269df5f7816dd"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b4aa31e0f6a1aeeb6f8377769ca5d125db000f05c20e54163aef1d3fe8e833"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5535163054d6cbf2796f93e4f0dbc800f61914c0e3c4ed8499cf6ece22b4a3da"}, + {file = "orjson-3.10.12-cp38-none-win32.whl", hash = "sha256:90a5551f6f5a5fa07010bf3d0b4ca2de21adafbbc0af6cb700b63cd767266cb9"}, + {file = "orjson-3.10.12-cp38-none-win_amd64.whl", hash = "sha256:703a2fb35a06cdd45adf5d733cf613cbc0cb3ae57643472b16bc22d325b5fb6c"}, + {file = "orjson-3.10.12-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f29de3ef71a42a5822765def1febfb36e0859d33abf5c2ad240acad5c6a1b78d"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de365a42acc65d74953f05e4772c974dad6c51cfc13c3240899f534d611be967"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a5a0158648a67ff0004cb0df5df7dcc55bfc9ca154d9c01597a23ad54c8d0c"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c47ce6b8d90fe9646a25b6fb52284a14ff215c9595914af63a5933a49972ce36"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0eee4c2c5bfb5c1b47a5db80d2ac7aaa7e938956ae88089f098aff2c0f35d5d8"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d3081bbe8b86587eb5c98a73b97f13d8f9fea685cf91a579beddacc0d10566"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c23a6e90383884068bc2dba83d5222c9fcc3b99a0ed2411d38150734236755"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5472be7dc3269b4b52acba1433dac239215366f89dc1d8d0e64029abac4e714e"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7319cda750fca96ae5973efb31b17d97a5c5225ae0bc79bf5bf84df9e1ec2ab6"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:74d5ca5a255bf20b8def6a2b96b1e18ad37b4a122d59b154c458ee9494377f80"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ff31d22ecc5fb85ef62c7d4afe8301d10c558d00dd24274d4bbe464380d3cd69"}, + {file = "orjson-3.10.12-cp39-none-win32.whl", hash = "sha256:c22c3ea6fba91d84fcb4cda30e64aff548fcf0c44c876e681f47d61d24b12e6b"}, + {file = "orjson-3.10.12-cp39-none-win_amd64.whl", hash = "sha256:be604f60d45ace6b0b33dd990a66b4526f1a7a186ac411c942674625456ca548"}, + {file = "orjson-3.10.12.tar.gz", hash = "sha256:0a78bbda3aea0f9f079057ee1ee8a1ecf790d4f1af88dd67493c6b8ee52506ff"}, +] + [[package]] name = "packaging" version = "24.2" @@ -3506,6 +3612,27 @@ files = [ {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, ] +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typer" version = "0.13.0" @@ -3989,4 +4116,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.0" python-versions = ">= 3.9, < 3.12" -content-hash = "365cbd298623955c424f43db374933ddd86d2b18c592a95cb44d48fd821b100a" +content-hash = "e812a8e353fe8f309bc25e6eb9b48fcc7ad2b5c95c135ad640b29afd1ad12163" diff --git a/pyproject.toml b/pyproject.toml index 58c07c7..c4943cf 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ pydantic = "^2.3.0" requests = "^2.32.3" # Subnet Specific Dependencies -# coinmetrics-api-client = "^2024.10.31.17" +coinmetrics-api-client = "^2024.11.21.20" pytz = "^2024.2" pandas = "^2.2.3" gitpython = "^3.1.43" From 8378e07294407c2201716bc6adb528f876e9c306 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:02:04 -0500 Subject: [PATCH 02/55] Update pre commit branch name check --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 704824e..3ddb7af 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -94,7 +94,7 @@ repos: language: system pass_filenames: false always_run: true - args: [--branch, main, --branch, staging, --branch, dev] + args: [--branch, master, --branch, staging, --branch, dev] - id: trailing-whitespace name: trim trailing whitespace description: trims trailing whitespace. From 9973fdb791f1661d264b849a6540e71369df7d23 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:02:26 -0500 Subject: [PATCH 03/55] Add file for release notes --- docs/Release Notes.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 docs/Release Notes.md diff --git a/docs/Release Notes.md b/docs/Release Notes.md new file mode 100644 index 0000000..439a8fc --- /dev/null +++ b/docs/Release Notes.md @@ -0,0 +1,8 @@ +Release Notes +============= + +1.0.0 +----- +Release on +- ... +- ... From e2e6783276f9e467266e8e1293dd2c5409856816 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:03:37 -0500 Subject: [PATCH 04/55] Read version from toml file --- precog/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/__init__.py b/precog/__init__.py index 29e2dca..e51a7ec 100644 --- a/precog/__init__.py +++ b/precog/__init__.py @@ -1,5 +1,5 @@ -import json +import importlib.metadata -__version__ = "0.0.0" +__version__ = importlib.metadata.version(__name__ or __package__) version_split = __version__.split(".") __spec_version__ = (1000 * int(version_split[0])) + (10 * int(version_split[1])) + (1 * int(version_split[2])) From 8cd804b02883859acd443c40e17f5140484c7dd0 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:05:00 -0500 Subject: [PATCH 05/55] Test package version, adds layer to prevent accidental versioning --- tests/__init__.py | 0 tests/test_package.py | 14 ++++++++++++++ 2 files changed, 14 insertions(+) create mode 100644 tests/__init__.py create mode 100644 tests/test_package.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_package.py b/tests/test_package.py new file mode 100644 index 0000000..e3e55f8 --- /dev/null +++ b/tests/test_package.py @@ -0,0 +1,14 @@ +import unittest + +from precog import __version__ + + +class TestPackage(unittest.TestCase): + + def setUp(self): + pass + + def test_package_version(self): + # Check that version is as expected + # Must update to increment package version successfully + self.assertEqual(__version__, "0.1.0") From 1fe713e0b9b76c49701a070224f681d3eff72b29 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:27:04 -0500 Subject: [PATCH 06/55] Add License --- LICENSE | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7562375 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Opentensor + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From 4120ea07f5045b3e8076ae8bfe68abb999dd6896 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 12:29:43 -0500 Subject: [PATCH 07/55] Add basic badges. Not functional since repo is private --- README.md | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 35b40d9..ad58612 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,10 @@ | | | | :-: | :-: | -Badges here? +| **Status** |

| +| **Activity** |

| +| **Compatibility** |
| +| **Social** | | @@ -20,8 +23,8 @@ Badges here? - [Running a Miner](#running-a-miner) - [Running a Validator](#running-a-validator) - [About the Rewards Mechanism](#about-the-rewards-mechanism) -- [Compute Requirements](#compute-requirements) - [Roadmap](#roadmap) +- [Compute Requirements](#compute-requirements) - [License](#license) --- @@ -56,7 +59,7 @@ cd precog Create and source a python virtual environment: ``` -python3 -m venv +python3 -m venv source .venv/bin/activate ``` @@ -75,23 +78,23 @@ Start by editing the Makefile with you wallet and network information. ### Running a Miner TODO: write this \ -Base miner: +Base miner: 1. Run the command: - ``` - make miner + ``` + make miner ``` Custom miner: -1. Write a custom forward function stored in precog/miners/your_function.py +1. Write a custom forward function stored in precog/miners/your_function.py - This function should handle how the miner responds to requests from the validator - Within the function, synapse.predictions and synapse.interval should be set. - - See [forward.py](https://github.com/coinmetrics/precog/blob/master/precog/miners/forward.py) for an example + - See [forward.py](https://github.com/coinmetrics/precog/blob/master/precog/miners/forward.py) for an example 2. Add a command to Makefile. - copy the miner command and rename it (e.g. miner_custom) in Makefile - replace the --forward_function argument with your_function 3. Run the Command: - ``` - make miner_custom + ``` + make miner_custom ``` @@ -119,7 +122,7 @@ TODO: update these This repository is licensed under the MIT License. ```text # The MIT License (MIT) -# Copyright Β© 2024 Foundry Digital LLC +# Copyright Β© 2024 CoinMetrics LLC # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation From 2d1f497eb6e0076fb143510db5cb4f540a92b550 Mon Sep 17 00:00:00 2001 From: umocm Date: Tue, 3 Dec 2024 14:04:44 -0600 Subject: [PATCH 08/55] First update to cm_utils. Added ReferenceRateUSD get method. Added some other default parameters --- precog/utils/cm_data.py | 119 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 108 insertions(+), 11 deletions(-) diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py index 8ea492f..a318be2 100644 --- a/precog/utils/cm_data.py +++ b/precog/utils/cm_data.py @@ -1,4 +1,6 @@ import pandas as pd +from typing import Optional, Union +from datetime import datetime, date from coinmetrics.api_client import CoinMetricsClient @@ -15,31 +17,126 @@ def api_key(self): def client(self): return self._client - def get_pair_candles(self, pairs: list, page_size: int = 10000, **kwargs) -> pd.DataFrame: - """Fetches candles for specific asset pairs from CoinMetrics Python client. + def get_CM_ReferenceRate( + self, + assets: Union[list, str], + start: Optional[Union[datetime, date, str]] = None, + end: Optional[Union[datetime, date, str]] = None, + end_inclusive: bool = True, + frequency: str = "1s", + page_size: int = 10000, + parallelize: bool = False, + time_inc_parallel: pd.Timedelta = pd.Timedelta("1h"), + **kwargs, + ) -> pd.DataFrame: + """Fetches CM Reference Rate for specific asset ticker or list of tickers from CoinMetrics Python client. Returns: - DataFrame: Available pair candles + DataFrame: Reference Rate of assets over time, with columns + ['asset', 'time', 'ReferenceRateUSD'] Notes: CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_pair_candles """ - pair_candles = self.client.get_pair_candles(pairs, page_size, **kwargs) - return pair_candles.to_dataframe() + reference_rate = self.client.get_asset_metrics( + assets, + metrics="ReferenceRateUSD", + start_time=start, + end_time=end, + end_inclusive=end_inclusive, + frequency=frequency, + page_size=page_size, + **kwargs, + ) + + if parallelize: + reference_rate_df = reference_rate.parallel(time_increment=time_inc_parallel).to_dataframe() + else: + reference_rate_df = reference_rate.to_dataframe() + + reference_rate_df = reference_rate_df.sort_values("time").reset_index(drop=True) + return reference_rate_df + + def get_pair_candles( + self, + pairs: Union[list, str], + start: Optional[Union[datetime, date, str]] = None, + end: Optional[Union[datetime, date, str]] = None, + end_inclusive: bool = True, + frequency: str = "1h", + page_size: int = 10000, + parallelize: bool = False, + time_inc_parallel: pd.Timedelta = pd.Timedelta("1d"), + **kwargs, + ) -> pd.DataFrame: + """Fetches candles for specific asset pairs from CoinMetrics Python client. + Note 'pair' must be in format {base}-{quote} (ie. pair='btc-usd') - def get_market_open_interest(self, markets: list, page_size: int = 10000, **kwargs) -> pd.DataFrame: - """Fetches available market open interest from CoinMetrics Python client. + Returns: + DataFrame: Available pair candles with columns: + ['pair', 'time', 'price_open', 'price_close', 'price_high', 'price_low'] + Notes: + CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_pair_candles + """ + + pair_candles = self.client.get_pair_candles( + pairs, + start_time=start, + end_time=end, + end_inclusive=end_inclusive, + frequency=frequency, + page_size=page_size, + **kwargs, + ) + + if parallelize: + pair_candles_df = pair_candles.parallel(time_increment=time_inc_parallel).to_dataframe() + else: + pair_candles_df = pair_candles.to_dataframe() + + pair_candles_df = pair_candles_df.sort_values("time").reset_index(drop=True) + return pair_candles_df + + def get_open_interest_catalog(self, base: str = "btc", quote: str = "usd", market_type: str = "future", **kwargs): + """Returns the CM Catalog for active markets by base asset, quote asset, and type ('spot', 'option', or 'future') + + Args: + base (str, optional): Base Asset of Market. Defaults to "btc". + quote (str, optional): Quote Asset of Market. Defaults to "usd". + market_type (str, optional): Market type ('spot', 'option', 'future'). Defaults to "spot". + Returns: - DataFrame: Available market open interest + catalog (pd.DataFrame): Dataframe containing active markets with columns + ['market', 'min_time', 'max_time'] + """ + catalog = self.client.catalog_market_open_interest_v2( + base=base, quote=quote, market_type=market_type, page_size=10000, paging_from="end" + ).to_dataframe() + + return catalog + + def get_market_open_interest( + self, markets: list, page_size: int = 10000, parallelize=False, **kwargs + ) -> pd.DataFrame: + """Fetches available market open interest from CoinMetrics Python client. + Possible markets can be obtained from the get_open_interest_catalog() method + + Returns: + DataFrame: Open Interest of unsettled derivatives contracts. Columns are: + [market, time, contract_count, value_usd, database_time, exchange_time] Notes: CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_market_open_interest """ - market_open_interest = self.client.get_market_open_interest(markets, page_size, **kwargs) - return market_open_interest.to_dataframe() + market_open_interest = self.client.get_market_open_interest(markets, page_size=page_size, **kwargs) + + if parallelize: + return market_open_interest.parallel().to_dataframe() + else: + return market_open_interest.to_dataframe() def get_market_funding_rates(self, markets: list, page_size: int = 10000, **kwargs) -> pd.DataFrame: """Fetches available market funding rates from CoinMetrics Python client. @@ -51,7 +148,7 @@ def get_market_funding_rates(self, markets: list, page_size: int = 10000, **kwar CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_market_funding_rates """ - market_funding_rates = self.client.get_market_funding_rates(markets, page_size, **kwargs) + market_funding_rates = self.client.get_market_funding_rates(markets, page_size=page_size, **kwargs) return market_funding_rates.to_dataframe() # def get_time_reference_rate(self): From 7dd0a434bc3af122fc245366bc417819b660f7de Mon Sep 17 00:00:00 2001 From: umocm Date: Tue, 3 Dec 2024 14:55:04 -0600 Subject: [PATCH 09/55] Trivial Addition to cm_utils --- precog/utils/cm_data.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py index a318be2..5d4b086 100644 --- a/precog/utils/cm_data.py +++ b/precog/utils/cm_data.py @@ -106,7 +106,7 @@ def get_open_interest_catalog(self, base: str = "btc", quote: str = "usd", marke base (str, optional): Base Asset of Market. Defaults to "btc". quote (str, optional): Quote Asset of Market. Defaults to "usd". market_type (str, optional): Market type ('spot', 'option', 'future'). Defaults to "spot". - + Returns: catalog (pd.DataFrame): Dataframe containing active markets with columns ['market', 'min_time', 'max_time'] @@ -114,15 +114,19 @@ def get_open_interest_catalog(self, base: str = "btc", quote: str = "usd", marke catalog = self.client.catalog_market_open_interest_v2( base=base, quote=quote, market_type=market_type, page_size=10000, paging_from="end" ).to_dataframe() - + return catalog def get_market_open_interest( self, markets: list, page_size: int = 10000, parallelize=False, **kwargs ) -> pd.DataFrame: - """Fetches available market open interest from CoinMetrics Python client. + """Fetches available market open interest from CoinMetrics Python client. Possible markets can be obtained from the get_open_interest_catalog() method + Args: + markets (list): List of derivatives markets to get the Open Interest for. + Note there is a character limit to the query, so may need to be done in chunks for a long list + Returns: DataFrame: Open Interest of unsettled derivatives contracts. Columns are: [market, time, contract_count, value_usd, database_time, exchange_time] From c84f4e93b2fefa6b4e9d2f837fb2992f8a6dea5b Mon Sep 17 00:00:00 2001 From: umocm Date: Tue, 3 Dec 2024 15:06:23 -0600 Subject: [PATCH 10/55] Take 3 Testing Pull Request rule enforcement --- precog/utils/cm_data.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py index 5d4b086..f1545a5 100644 --- a/precog/utils/cm_data.py +++ b/precog/utils/cm_data.py @@ -28,11 +28,22 @@ def get_CM_ReferenceRate( parallelize: bool = False, time_inc_parallel: pd.Timedelta = pd.Timedelta("1h"), **kwargs, - ) -> pd.DataFrame: + ) -> pd.DataFrame: """Fetches CM Reference Rate for specific asset ticker or list of tickers from CoinMetrics Python client. + Args: + assets (Union[list, str]): Asset ticker or list of tickers to retrieve CM Reference Rates for + start (Optional[Union[datetime, date, str]], optional): Start time of data, if None will return earliest available. Defaults to None. + end (Optional[Union[datetime, date, str]], optional): End time of data, if None will return earliest available. Defaults to None. + end_inclusive (bool, optional): Whether to include a data point occuring at the "end" time. Defaults to True. + frequency (str, optional): Frequency of prices - '200ms', '1s', '1m', '1m', '1d'. Defaults to "1s". + page_size (int, optional): Page size of return, recommended 10000. Defaults to 10000. + parallelize (bool, optional): Whether to parallelize query into multiple queries. + Can speed up retrieval but may go over usage limits. Defaults to False. + time_inc_parallel (pd.Timedelta, optional): If using parallelize, time interval queried by each thread. Defaults to pd.Timedelta("1h"). + Returns: - DataFrame: Reference Rate of assets over time, with columns + pd.DataFrame: Reference Rate of assets over time, with columns ['asset', 'time', 'ReferenceRateUSD'] Notes: From bae5ce6b68ad0e027278a18b75fb84e4563ae42c Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 16:06:37 -0500 Subject: [PATCH 11/55] First attempt at CI workflow --- .github/workflows/build.yml | 84 +++++++++++++++++++++++++++++++++++++ .github/workflows/ci.yml | 71 +++++++++++++++++++++++++++++++ 2 files changed, 155 insertions(+) create mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..4af059f --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,84 @@ +name: Cache Build + +on: + workflow_call: + inputs: + command: + required: true + type: string + name: + required: true + type: string + +jobs: + reusable-build: + name: ${{ inputs.name }} + runs-on: ubuntu-latest + steps: + + #------------------------------------------------ + # check-out repo and set-up python + #------------------------------------------------ + - name: Check out repository + uses: actions/checkout@v4 + - name: Set up python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + #------------------------------------------------ + # ----- install & configure poetry ----- + #------------------------------------------------ + - name: Load cached Poetry installation + id: cached-poetry + uses: actions/cache@v4 + with: + path: ~/.local # the path depends on the OS + key: poetry-0 # increment to reset cache + + - name: Install Poetry + if: steps.cached-poetry.outputs.cache-hit != 'true' + uses: snok/install-poetry@v1 + with: + version: latest + virtualenvs-create: true + virtualenvs-in-project: true + virtualenvs-path: .venv + installer-parallel: true + + # If cache was loaded, we must redo configuration + - name: Configure poetry + if: steps.cached-poetry.outputs.cache-hit == 'true' + run: | + poetry config virtualenvs.create true + poetry config virtualenvs.in-project true + poetry config virtualenvs.path .venv + + #------------------------------------------------ + # Load cached venv if exists + #------------------------------------------------ + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v4 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} + + #------------------------------------------------ + # Install dependencies if cache does not exist + #------------------------------------------------ + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + + #------------------------------------------------ + # Install your root project + #------------------------------------------------ + - name: Install project + run: poetry install --no-interaction + + #------------------------------------------------ + # Run custom command(s) within venv + #------------------------------------------------ + - name: Run commands + run: ${{ inputs.command }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..63e9043 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,71 @@ +name: CI + +on: + pull_request: + push: + branches: [master, staging, dev] + +jobs: + #---------------------------------------------- + # Build Environment + #---------------------------------------------- + build: + name: Build + uses: ./.github/workflows/build.yml + with: + name: Cache + command: | + poetry run python -m pip list + poetry run python --version + poetry --version + poetry run echo "Build successful" + + #---------------------------------------------- + # Run Linters + #---------------------------------------------- + lint-black: + name: Linter + needs: build + uses: ./.github/workflows/build.yml + with: + name: Black + command: poetry run python -m black --check . + lint-isort: + name: Linter + needs: build + uses: ./.github/workflows/build.yml + with: + name: Isort + command: poetry run python -m isort --check-only . + lint-mypy: + name: Linter + needs: build + if: false # This condition ensures the job is never executed + uses: ./.github/workflows/build.yml + with: + name: Mypy + command: poetry run python -m mypy --verbose 0 . + lint-flake8: + name: Linter + needs: build + uses: ./.github/workflows/build.yml + with: + name: Flake8 + command: poetry run python -m flake8 . + + #---------------------------------------------- + # Run Tests + #---------------------------------------------- + test-unittest: + name: Tests + needs: [ + lint-black, + lint-isort, + lint-mypy, + lint-flake8, + ] + if: ${{ always() }} # will run the tests regardless of linting success + uses: ./.github/workflows/build.yml + with: + name: Unittests + command: poetry run pytest tests/ From 6cb85d327d91bde13f3b4d1387392571014c7888 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Tue, 3 Dec 2024 16:15:26 -0500 Subject: [PATCH 12/55] Run pre commit on all files --- precog/utils/cm_data.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py index 5d4b086..288fe0c 100644 --- a/precog/utils/cm_data.py +++ b/precog/utils/cm_data.py @@ -1,6 +1,7 @@ -import pandas as pd +from datetime import date, datetime from typing import Optional, Union -from datetime import datetime, date + +import pandas as pd from coinmetrics.api_client import CoinMetricsClient @@ -124,7 +125,7 @@ def get_market_open_interest( Possible markets can be obtained from the get_open_interest_catalog() method Args: - markets (list): List of derivatives markets to get the Open Interest for. + markets (list): List of derivatives markets to get the Open Interest for. Note there is a character limit to the query, so may need to be done in chunks for a long list Returns: From 4559373274157b86ca2e8791334e8f6c5d284102 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Wed, 4 Dec 2024 10:55:30 -0500 Subject: [PATCH 13/55] Run pre-commit on this file --- precog/utils/cm_data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py index 0ac822b..da239ba 100644 --- a/precog/utils/cm_data.py +++ b/precog/utils/cm_data.py @@ -29,7 +29,7 @@ def get_CM_ReferenceRate( parallelize: bool = False, time_inc_parallel: pd.Timedelta = pd.Timedelta("1h"), **kwargs, - ) -> pd.DataFrame: + ) -> pd.DataFrame: """Fetches CM Reference Rate for specific asset ticker or list of tickers from CoinMetrics Python client. Args: @@ -39,7 +39,7 @@ def get_CM_ReferenceRate( end_inclusive (bool, optional): Whether to include a data point occuring at the "end" time. Defaults to True. frequency (str, optional): Frequency of prices - '200ms', '1s', '1m', '1m', '1d'. Defaults to "1s". page_size (int, optional): Page size of return, recommended 10000. Defaults to 10000. - parallelize (bool, optional): Whether to parallelize query into multiple queries. + parallelize (bool, optional): Whether to parallelize query into multiple queries. Can speed up retrieval but may go over usage limits. Defaults to False. time_inc_parallel (pd.Timedelta, optional): If using parallelize, time interval queried by each thread. Defaults to pd.Timedelta("1h"). From 6ecdd830b5508ca61b29eadeaa7bdb944af5708f Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:20:27 -0500 Subject: [PATCH 14/55] organized parameters in Makefile --- Makefile | 59 +++++++++++++------------------------ precog/utils/general.py | 16 ++++++++++ precog/validators/reward.py | 17 +---------- 3 files changed, 37 insertions(+), 55 deletions(-) diff --git a/Makefile b/Makefile index cee96c2..fdf4fda 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,19 @@ -network = ws://127.0.0.1:9944 -netuid = 1 +## Network Parameters ## +finney = wss://entrypoint-finney.opentensor.ai:443 +testnet = wss://test.finney.opentensor.ai:443 +locanet = ws://127.0.0.1:9944 + +testnet_netuid = 256 +localnet_netuid = 1 logging_level = trace # options= ['info', 'debug', 'trace'] -coldkey = cm-owner + +netuid = $(testnet_netuid) +network = $(testnet) + +## User Parameters +coldkey = default +validator_hotkey = validator +miner_hotkey = miner metagraph: btcli subnet metagraph --netuid $(netuid) --subtensor.chain_endpoint $(network) @@ -16,28 +28,18 @@ validator: python start_validator.py \ --neuron.name validator \ --wallet.name $(coldkey) \ - --wallet.hotkey validator \ - --subtensor.chain_endpoint $(network) \ + --wallet.hotkey $(validator_hotkey) \ + --network $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) -validator2: - python start_validator.py \ - --neuron.name validator2 \ - --wallet.name $(coldkey) \ - --wallet.hotkey validator2 \ - --subtensor.chain_endpoint $(network) \ - --axon.port 30339 \ - --netuid $(netuid) \ - --logging.level $(logging_level) - miner: python start_miner.py \ --neuron.name miner \ --wallet.name $(coldkey) \ - --wallet.hotkey miner \ - --subtensor.chain_endpoint $(network) \ + --wallet.hotkey $(miner_hotkey) \ + --network $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ @@ -49,31 +51,10 @@ miner2: --neuron.name miner2 \ --wallet.name $(coldkey) \ --wallet.hotkey miner2 \ - --subtensor.chain_endpoint $(network) \ + --network $(network) \ --axon.port 30337 \ --netuid $(netuid) \ --logging.level $(logging_level) \ --timeout 16 \ --forward_function forward_bad -miner3: - python start_miner.py \ - --neuron.name miner3 \ - --wallet.name $(coldkey) \ - --wallet.hotkey miner3 \ - --subtensor.chain_endpoint $(network) \ - --axon.port 30338 \ - --netuid $(netuid) \ - --logging.level $(logging_level) \ - --timeout 16 \ - --forward_function forward - -setup_local: - btcli wallet faucet --wallet.name $(coldkey) --subtensor.chain_endpoint $(network) ;\ - btcli subnet create --wallet.name $(coldkey) --subtensor.chain_endpoint $(network) ;\ - btcli subnet register \ - --wallet.name $(coldkey) \ - --wallet.hotkey validator \ - --netuid $(netuid) - --subtensor.chain_endpoint $(network) ;\ - btcli stake add --wallet.name $(coldkey) --wallet.hotkey validator --amount 1024 ;\ diff --git a/precog/utils/general.py b/precog/utils/general.py index fbe3d5c..912b5f1 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -84,3 +84,19 @@ def get_version() -> Optional[str]: raise Exception("Version information not found") return version_match.group() + + +def rank(vector): + if vector is None or len(vector) <= 1: + return np.array([0]) + else: + # Sort the array and get the indices that would sort it + sorted_indices = np.argsort(vector) + sorted_vector = vector[sorted_indices] + # Create a mask for where each new unique value starts in the sorted array + unique_mask = np.concatenate(([True], sorted_vector[1:] != sorted_vector[:-1])) + # Use cumulative sum of the unique mask to get the ranks, then assign back in original order + ranks = np.cumsum(unique_mask) - 1 + rank_vector = np.empty_like(vector, dtype=int) + rank_vector[sorted_indices] = ranks + return rank_vector diff --git a/precog/validators/reward.py b/precog/validators/reward.py index 22e14c8..bad11fc 100644 --- a/precog/validators/reward.py +++ b/precog/validators/reward.py @@ -5,6 +5,7 @@ import numpy as np from precog.protocol import Challenge +from precog.utils.general import rank from precog.utils.timestamp import align_timepoints, get_now, mature_dictionary, round_minute_down @@ -49,22 +50,6 @@ def calc_rewards( return rewards -def rank(vector): - if vector is None or len(vector) <= 1: - return np.array([0]) - else: - # Sort the array and get the indices that would sort it - sorted_indices = np.argsort(vector) - sorted_vector = vector[sorted_indices] - # Create a mask for where each new unique value starts in the sorted array - unique_mask = np.concatenate(([True], sorted_vector[1:] != sorted_vector[:-1])) - # Use cumulative sum of the unique mask to get the ranks, then assign back in original order - ranks = np.cumsum(unique_mask) - 1 - rank_vector = np.empty_like(vector, dtype=int) - rank_vector[sorted_indices] = ranks - return rank_vector - - def interval_error(intervals, cm_prices): if intervals is None: return np.array([0]) From c1b016cf5184fd177b608631fd855a88d68ef420 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:25:44 -0500 Subject: [PATCH 15/55] moved to more advanced loop handler --- precog/utils/general.py | 31 +++++++++++--- precog/validators/weight_setter.py | 69 +++++++++++------------------- 2 files changed, 52 insertions(+), 48 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 912b5f1..f0a343f 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -1,9 +1,11 @@ import argparse +import asyncio import re from typing import Optional import bittensor as bt import git +from numpy import argsort, array, concatenate, cumsum, empty_like import requests from precog.utils.classes import NestedNamespace @@ -88,15 +90,34 @@ def get_version() -> Optional[str]: def rank(vector): if vector is None or len(vector) <= 1: - return np.array([0]) + return array([0]) else: # Sort the array and get the indices that would sort it - sorted_indices = np.argsort(vector) + sorted_indices = argsort(vector) sorted_vector = vector[sorted_indices] # Create a mask for where each new unique value starts in the sorted array - unique_mask = np.concatenate(([True], sorted_vector[1:] != sorted_vector[:-1])) + unique_mask = concatenate(([True], sorted_vector[1:] != sorted_vector[:-1])) # Use cumulative sum of the unique mask to get the ranks, then assign back in original order - ranks = np.cumsum(unique_mask) - 1 - rank_vector = np.empty_like(vector, dtype=int) + ranks = cumsum(unique_mask) - 1 + rank_vector = empty_like(vector, dtype=int) rank_vector[sorted_indices] = ranks return rank_vector + + +async def loop_handler(self, func, sleep_time=120): + try: + while not self.stop_event.is_set(): + async with self.lock: + await func() + await asyncio.sleep(sleep_time) + except asyncio.CancelledError: + bt.logging.error(f"{func.__name__} cancelled") + raise + except KeyboardInterrupt: + raise + except Exception as e: + bt.logging.error(f"{func.__name__} raised error: {e}") + raise + finally: + async with self.lock: + self.stop_event.set() diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 7fcd768..54e3ab6 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -11,6 +11,7 @@ from precog.protocol import Challenge from precog.utils.bittensor import check_uid_availability, print_info, setup_bittensor_objects from precog.utils.classes import MinerHistory +from precog.utils.general import loop_handler from precog.utils.timestamp import elapsed_seconds, get_before, get_now, is_query_time, iso8601_to_datetime from precog.utils.wandb import log_wandb, setup_wandb from precog.validators.reward import calc_rewards @@ -51,10 +52,10 @@ def __init__(self, config=None, loop=None): self.stop_event = asyncio.Event() bt.logging.info("Setup complete, starting loop") self.loop.create_task( - self.loop_handler(self.scheduled_prediction_request, sleep_time=self.config.print_cadence) + loop_handler(self.scheduled_prediction_request, sleep_time=self.config.print_cadence) ) - self.loop.create_task(self.loop_handler(self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) - self.loop.create_task(self.loop_handler(self.set_weights, sleep_time=self.set_weights_rate)) + self.loop.create_task(loop_handler(self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) + self.loop.create_task(loop_handler(self.set_weights, sleep_time=self.set_weights_rate)) def __exit__(self, exc_type, exc_value, traceback): self.save_state() @@ -67,21 +68,6 @@ def __exit__(self, exc_type, exc_value, traceback): bt.logging.error(f"Error on __exit__ function: {e}") self.loop.stop() - async def loop_handler(self, func, sleep_time=120): - try: - while not self.stop_event.is_set(): - await func() - await asyncio.sleep(sleep_time) - except asyncio.exceptions.CancelledError: - raise - except KeyboardInterrupt: - raise - except Exception: - raise - finally: - async with self.lock: - self.stop_event.set() - self.__exit__(None, None, None) async def get_available_uids(self): miner_uids = [] @@ -93,23 +79,22 @@ async def get_available_uids(self): async def resync_metagraph(self, force=False): """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" - async with self.lock: - self.blocks_since_sync = self.current_block - self.last_sync - if self.blocks_since_sync >= self.resync_metagraph_rate or force: - bt.logging.info("Syncing Metagraph...") - self.metagraph.sync(subtensor=self.subtensor) - bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages") - # Zero out all hotkeys that have been replaced. - self.available_uids = asyncio.run(self.get_available_uids()) - for uid, hotkey in enumerate(self.metagraph.hotkeys): - if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey: - bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}") - self.hotkeys[uid] = hotkey - self.scores[uid] = 0 # hotkey has been replaced - self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone) - self.moving_average_scores[uid] = 0 - self.last_sync = self.subtensor.get_current_block() - self.save_state() + self.blocks_since_sync = self.current_block - self.last_sync + if self.blocks_since_sync >= self.resync_metagraph_rate or force: + bt.logging.info("Syncing Metagraph...") + self.metagraph.sync(subtensor=self.subtensor) + bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages") + # Zero out all hotkeys that have been replaced. + self.available_uids = asyncio.run(self.get_available_uids()) + for uid, hotkey in enumerate(self.metagraph.hotkeys): + if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey: + bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}") + self.hotkeys[uid] = hotkey + self.scores[uid] = 0 # hotkey has been replaced + self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone) + self.moving_average_scores[uid] = 0 + self.last_sync = self.subtensor.get_current_block() + self.save_state() def query_miners(self): timestamp = get_now().isoformat() @@ -133,9 +118,8 @@ def node_query(self, module, method, params): async def set_weights(self): if self.blocks_since_last_update >= self.set_weights_rate: - async with self.lock: - uids = array(self.available_uids) - weights = [self.moving_average_scores[uid] for uid in self.available_uids] + uids = array(self.available_uids) + weights = [self.moving_average_scores[uid] for uid in self.available_uids] for i, j in zip(weights, self.available_uids): bt.logging.debug(f"UID: {j} | Weight: {i}") if sum(weights) == 0: @@ -162,11 +146,10 @@ async def set_weights(self): "Failed to set weights this iteration with message:", msg, ) - async with self.lock: - self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = ( - self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] - ) + self.current_block = self.subtensor.get_current_block() + self.blocks_since_last_update = ( + self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] + ) async def scheduled_prediction_request(self): if not hasattr(self, "timestamp"): From 3d87eb63ce208b0f0673cd65368958ad33ea47b2 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:28:59 -0500 Subject: [PATCH 16/55] --network flag not appropriate for the parser --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index fdf4fda..aec52f8 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ validator: --neuron.name validator \ --wallet.name $(coldkey) \ --wallet.hotkey $(validator_hotkey) \ - --network $(network) \ + --subtensor.network $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) @@ -39,7 +39,7 @@ miner: --neuron.name miner \ --wallet.name $(coldkey) \ --wallet.hotkey $(miner_hotkey) \ - --network $(network) \ + --subtensor.network $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ @@ -51,7 +51,7 @@ miner2: --neuron.name miner2 \ --wallet.name $(coldkey) \ --wallet.hotkey miner2 \ - --network $(network) \ + --subtensor.network $(network) \ --axon.port 30337 \ --netuid $(netuid) \ --logging.level $(logging_level) \ From 8b428e0fbb030357dc305758dfd1f0db35e4bd9e Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:30:32 -0500 Subject: [PATCH 17/55] --network flag not appropriate for the parser --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index aec52f8..bf6383e 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ validator: --neuron.name validator \ --wallet.name $(coldkey) \ --wallet.hotkey $(validator_hotkey) \ - --subtensor.network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) @@ -39,7 +39,7 @@ miner: --neuron.name miner \ --wallet.name $(coldkey) \ --wallet.hotkey $(miner_hotkey) \ - --subtensor.network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ @@ -51,7 +51,7 @@ miner2: --neuron.name miner2 \ --wallet.name $(coldkey) \ --wallet.hotkey miner2 \ - --subtensor.network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30337 \ --netuid $(netuid) \ --logging.level $(logging_level) \ From 7df5c8fe257c5302e355702bd17e15eb4b10cea4 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:32:04 -0500 Subject: [PATCH 18/55] missing arg in loop handler --- precog/validators/weight_setter.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 54e3ab6..4986b03 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -51,11 +51,9 @@ def __init__(self, config=None, loop=None): setup_wandb(self) self.stop_event = asyncio.Event() bt.logging.info("Setup complete, starting loop") - self.loop.create_task( - loop_handler(self.scheduled_prediction_request, sleep_time=self.config.print_cadence) - ) - self.loop.create_task(loop_handler(self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) - self.loop.create_task(loop_handler(self.set_weights, sleep_time=self.set_weights_rate)) + self.loop.create_task(loop_handler(self, self.scheduled_prediction_request, sleep_time=self.config.print_cadence)) + self.loop.create_task(loop_handler(self, self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) + self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.set_weights_rate)) def __exit__(self, exc_type, exc_value, traceback): self.save_state() From 121d4185da6cb38ef54b64a8719f4b477b1fec5c Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:40:20 -0500 Subject: [PATCH 19/55] reduced vpermit limit to 2 in makefile --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index bf6383e..7e4c62f 100644 --- a/Makefile +++ b/Makefile @@ -44,6 +44,7 @@ miner: --netuid $(netuid) \ --logging.level $(logging_level) \ --timeout 16 \ + --vpermit_tao_limit 2 \ --forward_function forward miner2: From b16f89d57054fe06878645768ecfcc8685ca2df5 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:46:48 -0500 Subject: [PATCH 20/55] unnecessary asyncio lock blocks runtime --- precog/validators/weight_setter.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 4986b03..1199827 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -165,12 +165,11 @@ async def scheduled_prediction_request(self): except Exception as e: bt.logging.error(f"Failed to calculate rewards with error: {e}") # Adjust the scores based on responses from miners and update moving average. - async with self.lock: - for i, value in zip(self.available_uids, rewards): - self.moving_average_scores[i] = (1 - self.config.alpha) * self.moving_average_scores[ - i - ] + self.config.alpha * value - self.scores = list(self.moving_average_scores.values()) + for i, value in zip(self.available_uids, rewards): + self.moving_average_scores[i] = (1 - self.config.alpha) * self.moving_average_scores[ + i + ] + self.config.alpha * value + self.scores = list(self.moving_average_scores.values()) if self.config.wandb_on: log_wandb(responses, rewards, self.available_uids) else: From 4b68f67211a693486a895005dcd3be269756a748 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 12:53:25 -0500 Subject: [PATCH 21/55] linters --- Makefile | 1 - precog/utils/general.py | 2 +- precog/validators/weight_setter.py | 5 +++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 7e4c62f..721c621 100644 --- a/Makefile +++ b/Makefile @@ -58,4 +58,3 @@ miner2: --logging.level $(logging_level) \ --timeout 16 \ --forward_function forward_bad - diff --git a/precog/utils/general.py b/precog/utils/general.py index f0a343f..59b9fc2 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -5,8 +5,8 @@ import bittensor as bt import git -from numpy import argsort, array, concatenate, cumsum, empty_like import requests +from numpy import argsort, array, concatenate, cumsum, empty_like from precog.utils.classes import NestedNamespace diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 1199827..067aa5b 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -51,7 +51,9 @@ def __init__(self, config=None, loop=None): setup_wandb(self) self.stop_event = asyncio.Event() bt.logging.info("Setup complete, starting loop") - self.loop.create_task(loop_handler(self, self.scheduled_prediction_request, sleep_time=self.config.print_cadence)) + self.loop.create_task( + loop_handler(self, self.scheduled_prediction_request, sleep_time=self.config.print_cadence) + ) self.loop.create_task(loop_handler(self, self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.set_weights_rate)) @@ -66,7 +68,6 @@ def __exit__(self, exc_type, exc_value, traceback): bt.logging.error(f"Error on __exit__ function: {e}") self.loop.stop() - async def get_available_uids(self): miner_uids = [] for uid in range(len(self.metagraph.S)): From b8cb6bac0b0e203d5f056f0659c2bc72f2213ba5 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:02:03 -0500 Subject: [PATCH 22/55] invalid ip error on weight setting --- precog/validators/weight_setter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 067aa5b..eb2a4a4 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -47,6 +47,7 @@ def __init__(self, config=None, loop=None): self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] ) self.tempo = self.node_query("SubtensorModule", "Tempo", [self.config.netuid]) + bt.logging.debug(f"Config:{self.config}") if self.config.wandb_on: setup_wandb(self) self.stop_event = asyncio.Event() @@ -135,7 +136,6 @@ async def set_weights(self): uids=uint_uids, weights=uint_weights, wait_for_inclusion=True, - wait_for_finalization=True, version_key=__spec_version__, ) if result: From c34f9f28573353cf88545963b9947005f198b7fe Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:15:54 -0500 Subject: [PATCH 23/55] moved from chain_endpoint to network --- Makefile | 8 ++++---- precog/validators/weight_setter.py | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 721c621..bb1c0fc 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,7 @@ localnet_netuid = 1 logging_level = trace # options= ['info', 'debug', 'trace'] netuid = $(testnet_netuid) -network = $(testnet) +network = test ## User Parameters coldkey = default @@ -29,7 +29,7 @@ validator: --neuron.name validator \ --wallet.name $(coldkey) \ --wallet.hotkey $(validator_hotkey) \ - --subtensor.chain_endpoint $(network) \ + --network $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) @@ -39,7 +39,7 @@ miner: --neuron.name miner \ --wallet.name $(coldkey) \ --wallet.hotkey $(miner_hotkey) \ - --subtensor.chain_endpoint $(network) \ + --network $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ @@ -52,7 +52,7 @@ miner2: --neuron.name miner2 \ --wallet.name $(coldkey) \ --wallet.hotkey miner2 \ - --subtensor.chain_endpoint $(network) \ + --network $(network) \ --axon.port 30337 \ --netuid $(netuid) \ --logging.level $(logging_level) \ diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index eb2a4a4..86a0b89 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -47,7 +47,6 @@ def __init__(self, config=None, loop=None): self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] ) self.tempo = self.node_query("SubtensorModule", "Tempo", [self.config.netuid]) - bt.logging.debug(f"Config:{self.config}") if self.config.wandb_on: setup_wandb(self) self.stop_event = asyncio.Event() From a497a2701e8d28ae95fb74ee85fefefc2a19df8b Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:20:35 -0500 Subject: [PATCH 24/55] changed how network is handled in config --- precog/utils/bittensor.py | 10 ++++------ precog/utils/general.py | 6 ++---- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 7903a33..2d976df 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -5,15 +5,13 @@ def setup_bittensor_objects(self): # if chain enpoint isn't set, use the network arg - if self.config.subtensor.chain_endpoint is None: - self.config.subtensor.chain_endpoint = bt.subtensor.determine_chain_endpoint_and_network( - self.config.subtensor.network - )[1] + if self.config.chain_endpoint is None: + self.config.chain_endpoint = bt.subtensor.determine_chain_endpoint_and_network(self.config.network)[1] else: # if chain endpoint is set, overwrite network arg - self.config.subtensor.network = self.config.subtensor.chain_endpoint + self.config.network = self.config.subtensor.chain_endpoint # Initialize subtensor. - self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.network) + self.subtensor = bt.subtensor(config=self.config, network=self.config.network) self.metagraph = self.subtensor.metagraph(self.config.netuid) self.wallet = bt.wallet(name=self.config.wallet.name, hotkey=self.config.wallet.hotkey) self.dendrite = bt.dendrite(wallet=self.wallet) diff --git a/precog/utils/general.py b/precog/utils/general.py index 59b9fc2..af17e3e 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -27,11 +27,9 @@ def parse_arguments(parser: Optional[argparse.ArgumentParser] = None): """ if parser is None: parser = argparse.ArgumentParser(description="Configuration") + parser.add_argument("--chain_endpoint", type=str, default=None) # for testnet: wss://test.finney.opentensor.ai:443 parser.add_argument( - "--subtensor.chain_endpoint", type=str, default=None - ) # for testnet: wss://test.finney.opentensor.ai:443 - parser.add_argument( - "--subtensor.network", + "--network", choices=["finney", "test", "local"], default="finney", ) From 9313a46eb4998a41ac9449a18fcdd7e74f36a104 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:21:46 -0500 Subject: [PATCH 25/55] typo --- precog/utils/bittensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 2d976df..42790bc 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -9,7 +9,7 @@ def setup_bittensor_objects(self): self.config.chain_endpoint = bt.subtensor.determine_chain_endpoint_and_network(self.config.network)[1] else: # if chain endpoint is set, overwrite network arg - self.config.network = self.config.subtensor.chain_endpoint + self.config.network = self.config.chain_endpoint # Initialize subtensor. self.subtensor = bt.subtensor(config=self.config, network=self.config.network) self.metagraph = self.subtensor.metagraph(self.config.netuid) From 603c9fa3b966d5f0de24eb28cff76ecbf8bfd8d3 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:27:22 -0500 Subject: [PATCH 26/55] reverted some changes --- Makefile | 6 +++--- precog/utils/bittensor.py | 10 ++++++---- precog/utils/general.py | 6 ++++-- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index bb1c0fc..44b4b24 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,7 @@ localnet_netuid = 1 logging_level = trace # options= ['info', 'debug', 'trace'] netuid = $(testnet_netuid) -network = test +network = $(testnet) ## User Parameters coldkey = default @@ -29,7 +29,7 @@ validator: --neuron.name validator \ --wallet.name $(coldkey) \ --wallet.hotkey $(validator_hotkey) \ - --network $(network) \ + --subtensor.network $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) @@ -39,7 +39,7 @@ miner: --neuron.name miner \ --wallet.name $(coldkey) \ --wallet.hotkey $(miner_hotkey) \ - --network $(network) \ + --subtensor.network $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 42790bc..7903a33 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -5,13 +5,15 @@ def setup_bittensor_objects(self): # if chain enpoint isn't set, use the network arg - if self.config.chain_endpoint is None: - self.config.chain_endpoint = bt.subtensor.determine_chain_endpoint_and_network(self.config.network)[1] + if self.config.subtensor.chain_endpoint is None: + self.config.subtensor.chain_endpoint = bt.subtensor.determine_chain_endpoint_and_network( + self.config.subtensor.network + )[1] else: # if chain endpoint is set, overwrite network arg - self.config.network = self.config.chain_endpoint + self.config.subtensor.network = self.config.subtensor.chain_endpoint # Initialize subtensor. - self.subtensor = bt.subtensor(config=self.config, network=self.config.network) + self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.network) self.metagraph = self.subtensor.metagraph(self.config.netuid) self.wallet = bt.wallet(name=self.config.wallet.name, hotkey=self.config.wallet.hotkey) self.dendrite = bt.dendrite(wallet=self.wallet) diff --git a/precog/utils/general.py b/precog/utils/general.py index af17e3e..59b9fc2 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -27,9 +27,11 @@ def parse_arguments(parser: Optional[argparse.ArgumentParser] = None): """ if parser is None: parser = argparse.ArgumentParser(description="Configuration") - parser.add_argument("--chain_endpoint", type=str, default=None) # for testnet: wss://test.finney.opentensor.ai:443 parser.add_argument( - "--network", + "--subtensor.chain_endpoint", type=str, default=None + ) # for testnet: wss://test.finney.opentensor.ai:443 + parser.add_argument( + "--subtensor.network", choices=["finney", "test", "local"], default="finney", ) From 6aa5ed1cebc3df36df81543786a5326d5cdebc5b Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 13:28:40 -0500 Subject: [PATCH 27/55] fixed Makefile error --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 44b4b24..721c621 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ validator: --neuron.name validator \ --wallet.name $(coldkey) \ --wallet.hotkey $(validator_hotkey) \ - --subtensor.network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30335 \ --netuid $(netuid) \ --logging.level $(logging_level) @@ -39,7 +39,7 @@ miner: --neuron.name miner \ --wallet.name $(coldkey) \ --wallet.hotkey $(miner_hotkey) \ - --subtensor.network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30336 \ --netuid $(netuid) \ --logging.level $(logging_level) \ @@ -52,7 +52,7 @@ miner2: --neuron.name miner2 \ --wallet.name $(coldkey) \ --wallet.hotkey miner2 \ - --network $(network) \ + --subtensor.chain_endpoint $(network) \ --axon.port 30337 \ --netuid $(netuid) \ --logging.level $(logging_level) \ From 64c486a26d44f1cc6d50e933920026b4ac716f0d Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 15:48:41 -0500 Subject: [PATCH 28/55] added try block for when node-query fails --- precog/utils/bittensor.py | 4 ++-- precog/validators/weight_setter.py | 11 +++++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 7903a33..ef406b8 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -13,9 +13,9 @@ def setup_bittensor_objects(self): # if chain endpoint is set, overwrite network arg self.config.subtensor.network = self.config.subtensor.chain_endpoint # Initialize subtensor. - self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.network) + self.subtensor = bt.subtensor(config=self.config) self.metagraph = self.subtensor.metagraph(self.config.netuid) - self.wallet = bt.wallet(name=self.config.wallet.name, hotkey=self.config.wallet.hotkey) + self.wallet = bt.wallet(config=self.config) self.dendrite = bt.dendrite(wallet=self.wallet) self.axon = bt.axon(wallet=self.wallet, config=self.config, port=self.config.axon.port) # Connect the validator to the network. diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 86a0b89..d3f08fb 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -144,10 +144,13 @@ async def set_weights(self): "Failed to set weights this iteration with message:", msg, ) - self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = ( - self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] - ) + try: + self.current_block = self.subtensor.get_current_block() + self.blocks_since_last_update = ( + self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] + ) + except Exception: + bt.logging.error("Failed to get current block, skipping block update") async def scheduled_prediction_request(self): if not hasattr(self, "timestamp"): From db6e44dac4c9e0606dc9640aa20bb7d0cdf0989a Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 15:50:21 -0500 Subject: [PATCH 29/55] missing network info from subtensor call --- precog/utils/bittensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index ef406b8..29256de 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -13,7 +13,7 @@ def setup_bittensor_objects(self): # if chain endpoint is set, overwrite network arg self.config.subtensor.network = self.config.subtensor.chain_endpoint # Initialize subtensor. - self.subtensor = bt.subtensor(config=self.config) + self.subtensor = bt.subtensor(config=self.config, chain_endpoint=self.config.subtensor.chain_endpoint) self.metagraph = self.subtensor.metagraph(self.config.netuid) self.wallet = bt.wallet(config=self.config) self.dendrite = bt.dendrite(wallet=self.wallet) From 3b086787f254aa4d208b0d245bd2f3217101a682 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 15:56:39 -0500 Subject: [PATCH 30/55] set block update to before weight setting code in set_weights loop --- precog/utils/bittensor.py | 2 +- precog/validators/weight_setter.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 29256de..844ff70 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -13,7 +13,7 @@ def setup_bittensor_objects(self): # if chain endpoint is set, overwrite network arg self.config.subtensor.network = self.config.subtensor.chain_endpoint # Initialize subtensor. - self.subtensor = bt.subtensor(config=self.config, chain_endpoint=self.config.subtensor.chain_endpoint) + self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint) self.metagraph = self.subtensor.metagraph(self.config.netuid) self.wallet = bt.wallet(config=self.config) self.dendrite = bt.dendrite(wallet=self.wallet) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index d3f08fb..a704bd1 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -116,6 +116,13 @@ def node_query(self, module, method, params): return result async def set_weights(self): + try: + self.current_block = self.subtensor.get_current_block() + self.blocks_since_last_update = ( + self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] + ) + except Exception: + bt.logging.error("Failed to get current block, skipping block update") if self.blocks_since_last_update >= self.set_weights_rate: uids = array(self.available_uids) weights = [self.moving_average_scores[uid] for uid in self.available_uids] @@ -144,13 +151,6 @@ async def set_weights(self): "Failed to set weights this iteration with message:", msg, ) - try: - self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = ( - self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] - ) - except Exception: - bt.logging.error("Failed to get current block, skipping block update") async def scheduled_prediction_request(self): if not hasattr(self, "timestamp"): From fff5587d36686bc9db0a7c6b946117a881a861f8 Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 15:58:31 -0500 Subject: [PATCH 31/55] added reinitializing subtensor to resync metagraph code --- precog/validators/weight_setter.py | 1 + 1 file changed, 1 insertion(+) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index a704bd1..6981c7e 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -80,6 +80,7 @@ async def resync_metagraph(self, force=False): """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" self.blocks_since_sync = self.current_block - self.last_sync if self.blocks_since_sync >= self.resync_metagraph_rate or force: + self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint) bt.logging.info("Syncing Metagraph...") self.metagraph.sync(subtensor=self.subtensor) bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages") From 781080f473125fdbb6f3350e5894b5baff62505f Mon Sep 17 00:00:00 2001 From: hscott Date: Thu, 5 Dec 2024 16:53:08 -0500 Subject: [PATCH 32/55] reduced set_weights and resync_metagraph cadence --- precog/validators/weight_setter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 6981c7e..61454dd 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -27,8 +27,8 @@ def __init__(self, config=None, loop=None): self.prediction_interval = self.config.prediction_interval # in minutes self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.last_sync = 0 - self.set_weights_rate = 100 # in blocks - self.resync_metagraph_rate = 20 # in blocks + self.set_weights_rate = 150 # in blocks + self.resync_metagraph_rate = 25 # in blocks bt.logging.info( f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}" ) From 2d616c34d01ea51a9670dae00595e6acd0eca7ce Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 10:40:46 -0500 Subject: [PATCH 33/55] trying to catch and restart websocket closed exceptions --- precog/validators/validator.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/precog/validators/validator.py b/precog/validators/validator.py index d09c2c2..79aae3c 100755 --- a/precog/validators/validator.py +++ b/precog/validators/validator.py @@ -2,6 +2,7 @@ from pathlib import Path import bittensor as bt +import websocket from precog.utils.classes import Config from precog.utils.general import parse_arguments @@ -27,6 +28,9 @@ async def main(self): except BrokenPipeError: bt.logging.error("Recieved a Broken Pipe substrate error") asyncio.run(self.reset_instance()) + except websocket._exceptions.WebSocketConnectionClosedException: + bt.logging.error("Recieved a websocket closed error, restarting validator") + asyncio.run(self.reset_instance()) except Exception as e: bt.logging.error(f"Unhandled exception: {e}") finally: From 8390195754862efb235d40a1e5863dd93f7ad0d4 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Fri, 6 Dec 2024 11:00:48 -0500 Subject: [PATCH 34/55] Make iso8601 string format compatible with CM API --- precog/utils/timestamp.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/precog/utils/timestamp.py b/precog/utils/timestamp.py index 16d382d..11d77b6 100644 --- a/precog/utils/timestamp.py +++ b/precog/utils/timestamp.py @@ -21,12 +21,12 @@ def get_now() -> datetime: return datetime.now(get_timezone()) -def get_before(minutes: int = 5) -> datetime: +def get_before(minutes: int = 5, seconds: int = 0) -> datetime: """ Get the datetime x minutes before now """ now = get_now() - return now - timedelta(minutes=minutes) + return now - timedelta(minutes=minutes, seconds=seconds) def get_midnight() -> datetime: @@ -66,7 +66,7 @@ def datetime_to_iso8601(timestamp: datetime) -> str: """ Convert datetime to iso 8601 string """ - return timestamp.isoformat() + return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ") def iso8601_to_datetime(timestamp: str) -> datetime: From 02bf5f9f983e1904d442a1f9c48f848f3449a399 Mon Sep 17 00:00:00 2001 From: Matthew Trudeau Date: Fri, 6 Dec 2024 11:03:15 -0500 Subject: [PATCH 35/55] Implement naive base miner --- precog/miners/base_miner.py | 120 ++++++++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 precog/miners/base_miner.py diff --git a/precog/miners/base_miner.py b/precog/miners/base_miner.py new file mode 100644 index 0000000..33e933d --- /dev/null +++ b/precog/miners/base_miner.py @@ -0,0 +1,120 @@ +from datetime import timedelta +from typing import Tuple + +import bittensor as bt +import pandas as pd + +from precog.miners.miner import Miner +from precog.protocol import Challenge +from precog.utils.classes import Config +from precog.utils.cm_data import CMData +from precog.utils.general import parse_arguments +from precog.utils.timestamp import datetime_to_iso8601, iso8601_to_datetime + + +class BaseMiner(Miner): + + def get_point_estimate(self, timestamp: str) -> float: + """Make a naive forecast by predicting the most recent price + + Args: + timestamp (str): The current timestamp provided by the validator request + + Returns: + (float): The current BTC price tied to the provided timestamp + """ + # Create data gathering instance + cm = CMData() + + # Set the time range to be as small as possible for query speed + # Set the start time as 2 seconds prior to the provided time + start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(seconds=2)) + end_time: str = timestamp + + # Query CM API for a pandas dataframe with only one record + price_data: pd.DataFrame = cm.get_CM_ReferenceRate(assets="BTC", start=start_time, end=end_time) + + # Get current price closest to the provided timestamp + btc_price: float = float(price_data["ReferenceRateUSD"].iloc[-1]) + + # Return the current price of BTC as our point estimate + return btc_price + + def get_prediction_interval(self, timestamp: str, point_estimate: float) -> Tuple[float, float]: + """Make a naive multi-step prediction interval by estimating + the sample standard deviation + + Args: + timestamp (str): The current timestamp provided by the validator request + point_estimate (float): The center of the prediction interval + + Returns: + (float): The 90% naive prediction interval lower bound + (float): The 90% naive prediction interval upper bound + + Notes: + Make reasonable assumptions that the 1s BTC price residuals are + uncorrelated and normally distributed + """ + # Create data gathering instance + cm = CMData() + + # Set the time range to be 24 hours + start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(days=1)) + end_time: str = timestamp + + # Query CM API for sample standard deviation of the 1s residuals + historical_price_data: pd.DataFrame = cm.get_CM_ReferenceRate( + assets="BTC", start=start_time, end=end_time, frequency="1s" + ) + residuals: pd.Series = historical_price_data["ReferenceRateUSD"].diff() + sample_std_dev: float = float(residuals.std()) + + # We have the standard deviation of the 1s residuals + # We are forecasting forward 5m, which is 300s + # We must scale the 1s sample standard deviation to reflect a 300s forecast + # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed + # To do this naively, we multiply the std dev by the square root of the number of time steps + time_steps: int = 300 + naive_forecast_std_dev: float = sample_std_dev * (time_steps**0.5) + + # For a 90% prediction interval, we use the coefficient 1.64 + # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed + coefficient: float = 1.64 + + # Calculate the lower bound and upper bound + lower_bound: float = point_estimate - coefficient * naive_forecast_std_dev + upper_bound: float = point_estimate + coefficient * naive_forecast_std_dev + + # Return the naive prediction interval for our forecast + return lower_bound, upper_bound + + async def forward(self, synapse: Challenge) -> Challenge: + bt.logging.info( + f"πŸ‘ˆ Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}" + ) + + # Get the naive point estimate + point_estimate: float = self.get_point_estimate(timestamp=synapse.timestamp) + + # Get the naive prediction interval + prediction_interval: Tuple[float, float] = self.get_prediction_interval( + timestamp=synapse.timestamp, point_estimate=point_estimate + ) + + synapse.prediction = point_estimate + synapse.interval = prediction_interval + + if synapse.prediction is not None: + bt.logging.success(f"Predicted price: {synapse.prediction} | Predicted Interval: {synapse.interval}") + else: + bt.logging.info("No prediction for this request.") + return synapse + + +# Run the miner +if __name__ == "__main__": + args = parse_arguments() + config = Config(args) + miner = BaseMiner(config=config) + miner.loop.run_forever() From 813cc6c7d4e05b533e22bf29fc0d8af34e9c7212 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 11:40:43 -0500 Subject: [PATCH 36/55] added return_exceptions to asyncio gather --- precog/utils/general.py | 2 +- precog/validators/weight_setter.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 59b9fc2..91bcfb7 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -117,7 +117,7 @@ async def loop_handler(self, func, sleep_time=120): raise except Exception as e: bt.logging.error(f"{func.__name__} raised error: {e}") - raise + raise e finally: async with self.lock: self.stop_event.set() diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 61454dd..8df0458 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -63,7 +63,7 @@ def __exit__(self, exc_type, exc_value, traceback): pending = asyncio.all_tasks(self.loop) for task in pending: task.cancel() - asyncio.gather(*pending) + asyncio.gather(*pending, return_exceptions=True) except Exception as e: bt.logging.error(f"Error on __exit__ function: {e}") self.loop.stop() From c9677f1dc1a3b3d4cb7d32aade201b24130c6839 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 11:57:08 -0500 Subject: [PATCH 37/55] moved error handling to weight_setter --- precog/validators/validator.py | 15 --------------- precog/validators/weight_setter.py | 24 +++++++++++++++++++----- 2 files changed, 19 insertions(+), 20 deletions(-) diff --git a/precog/validators/validator.py b/precog/validators/validator.py index 79aae3c..b8bf9c3 100755 --- a/precog/validators/validator.py +++ b/precog/validators/validator.py @@ -1,9 +1,6 @@ import asyncio from pathlib import Path -import bittensor as bt -import websocket - from precog.utils.classes import Config from precog.utils.general import parse_arguments from precog.validators.weight_setter import weight_setter @@ -23,18 +20,6 @@ def __init__(self): async def main(self): loop = asyncio.get_event_loop() self.weight_setter = weight_setter(config=self.config, loop=loop) - try: - loop.run_forever() - except BrokenPipeError: - bt.logging.error("Recieved a Broken Pipe substrate error") - asyncio.run(self.reset_instance()) - except websocket._exceptions.WebSocketConnectionClosedException: - bt.logging.error("Recieved a websocket closed error, restarting validator") - asyncio.run(self.reset_instance()) - except Exception as e: - bt.logging.error(f"Unhandled exception: {e}") - finally: - bt.logging.info("Exiting Validator") async def reset_instance(self): self.__init__() diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 8df0458..7f2b3b9 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -56,6 +56,11 @@ def __init__(self, config=None, loop=None): ) self.loop.create_task(loop_handler(self, self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.set_weights_rate)) + try: + self.loop.run_forever() + except Exception as e: + bt.logging.error(f"Error on loop: {e}") + self.__reset_instance__() def __exit__(self, exc_type, exc_value, traceback): self.save_state() @@ -63,10 +68,15 @@ def __exit__(self, exc_type, exc_value, traceback): pending = asyncio.all_tasks(self.loop) for task in pending: task.cancel() - asyncio.gather(*pending, return_exceptions=True) except Exception as e: bt.logging.error(f"Error on __exit__ function: {e}") - self.loop.stop() + finally: + asyncio.gather(*pending, return_exceptions=True) + self.loop.stop() + + def __reset_instance__(self): + self.__exit__(None, None, None) + self.__init__(self.config, self.loop) async def get_available_uids(self): miner_uids = [] @@ -119,9 +129,7 @@ def node_query(self, module, method, params): async def set_weights(self): try: self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = ( - self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] - ) + self.blocks_since_last_update = self.current_block - self.last_update except Exception: bt.logging.error("Failed to get current block, skipping block update") if self.blocks_since_last_update >= self.set_weights_rate: @@ -147,6 +155,12 @@ async def set_weights(self): ) if result: bt.logging.success("βœ… Set Weights on chain successfully!") + try: + self.last_update = self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[ + self.my_uid + ] + except Exception: + pass else: bt.logging.debug( "Failed to set weights this iteration with message:", From 0b3467ee9ec50e0a251212adcc0ce2f2371c7d8c Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 12:11:07 -0500 Subject: [PATCH 38/55] removed node_query function --- precog/validators/weight_setter.py | 30 ++++++++---------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 7f2b3b9..219f26a 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -3,9 +3,9 @@ import pickle import bittensor as bt +import websocket from numpy import array from pytz import timezone -from substrateinterface import SubstrateInterface from precog import __spec_version__ from precog.protocol import Challenge @@ -41,12 +41,10 @@ def __init__(self, config=None, loop=None): self.save_state() else: self.load_state() - self.node = SubstrateInterface(url=self.config.subtensor.chain_endpoint) self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = ( - self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid] + self.blocks_since_last_update = self.subtensor.blocks_since_last_update( + neutid=self.config.netuid, uid=self.my_uid ) - self.tempo = self.node_query("SubtensorModule", "Tempo", [self.config.netuid]) if self.config.wandb_on: setup_wandb(self) self.stop_event = asyncio.Event() @@ -58,9 +56,10 @@ def __init__(self, config=None, loop=None): self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.set_weights_rate)) try: self.loop.run_forever() + except websocket._exceptions.WebSocketConnectionClosedException: + self.__reset_instance__() except Exception as e: bt.logging.error(f"Error on loop: {e}") - self.__reset_instance__() def __exit__(self, exc_type, exc_value, traceback): self.save_state() @@ -117,19 +116,12 @@ def query_miners(self): ) return responses, timestamp - def node_query(self, module, method, params): - try: - result = self.node.query(module, method, params).value - except Exception: - # reinitilize node - self.node = SubstrateInterface(url=self.subtensor.chain_endpoint) - result = self.node.query(module, method, params).value - return result - async def set_weights(self): try: self.current_block = self.subtensor.get_current_block() - self.blocks_since_last_update = self.current_block - self.last_update + self.blocks_since_last_update = self.subtensor.blocks_since_last_update( + neutid=self.config.netuid, uid=self.my_uid + ) except Exception: bt.logging.error("Failed to get current block, skipping block update") if self.blocks_since_last_update >= self.set_weights_rate: @@ -155,12 +147,6 @@ async def set_weights(self): ) if result: bt.logging.success("βœ… Set Weights on chain successfully!") - try: - self.last_update = self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[ - self.my_uid - ] - except Exception: - pass else: bt.logging.debug( "Failed to set weights this iteration with message:", From ca2b7b35d415b548314a20df7366b8737eab0ec8 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 12:12:43 -0500 Subject: [PATCH 39/55] typo --- precog/validators/weight_setter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 219f26a..c8c4dff 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -43,7 +43,7 @@ def __init__(self, config=None, loop=None): self.load_state() self.current_block = self.subtensor.get_current_block() self.blocks_since_last_update = self.subtensor.blocks_since_last_update( - neutid=self.config.netuid, uid=self.my_uid + netuid=self.config.netuid, uid=self.my_uid ) if self.config.wandb_on: setup_wandb(self) @@ -120,7 +120,7 @@ async def set_weights(self): try: self.current_block = self.subtensor.get_current_block() self.blocks_since_last_update = self.subtensor.blocks_since_last_update( - neutid=self.config.netuid, uid=self.my_uid + netuid=self.config.netuid, uid=self.my_uid ) except Exception: bt.logging.error("Failed to get current block, skipping block update") From 4a4cb8c0fac6c4d27a428e6e1e51ffc27f82787d Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 12:19:05 -0500 Subject: [PATCH 40/55] simplified block handling code --- precog/validators/weight_setter.py | 38 +++++++++++++----------------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index c8c4dff..c8de0eb 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -28,7 +28,7 @@ def __init__(self, config=None, loop=None): self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.last_sync = 0 self.set_weights_rate = 150 # in blocks - self.resync_metagraph_rate = 25 # in blocks + self.resync_metagraph_rate = 600 # in seconds bt.logging.info( f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}" ) @@ -41,7 +41,6 @@ def __init__(self, config=None, loop=None): self.save_state() else: self.load_state() - self.current_block = self.subtensor.get_current_block() self.blocks_since_last_update = self.subtensor.blocks_since_last_update( netuid=self.config.netuid, uid=self.my_uid ) @@ -57,6 +56,7 @@ def __init__(self, config=None, loop=None): try: self.loop.run_forever() except websocket._exceptions.WebSocketConnectionClosedException: + bt.logging.info("Caught websocket connection closed exception") self.__reset_instance__() except Exception as e: bt.logging.error(f"Error on loop: {e}") @@ -85,25 +85,22 @@ async def get_available_uids(self): miner_uids.append(uid) return miner_uids - async def resync_metagraph(self, force=False): + async def resync_metagraph(self): """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" - self.blocks_since_sync = self.current_block - self.last_sync - if self.blocks_since_sync >= self.resync_metagraph_rate or force: - self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint) - bt.logging.info("Syncing Metagraph...") - self.metagraph.sync(subtensor=self.subtensor) - bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages") - # Zero out all hotkeys that have been replaced. - self.available_uids = asyncio.run(self.get_available_uids()) - for uid, hotkey in enumerate(self.metagraph.hotkeys): - if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey: - bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}") - self.hotkeys[uid] = hotkey - self.scores[uid] = 0 # hotkey has been replaced - self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone) - self.moving_average_scores[uid] = 0 - self.last_sync = self.subtensor.get_current_block() - self.save_state() + self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint) + bt.logging.info("Syncing Metagraph...") + self.metagraph.sync(subtensor=self.subtensor) + bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages") + # Zero out all hotkeys that have been replaced. + self.available_uids = asyncio.run(self.get_available_uids()) + for uid, hotkey in enumerate(self.metagraph.hotkeys): + if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey: + bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}") + self.hotkeys[uid] = hotkey + self.scores[uid] = 0 # hotkey has been replaced + self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone) + self.moving_average_scores[uid] = 0 + self.save_state() def query_miners(self): timestamp = get_now().isoformat() @@ -118,7 +115,6 @@ def query_miners(self): async def set_weights(self): try: - self.current_block = self.subtensor.get_current_block() self.blocks_since_last_update = self.subtensor.blocks_since_last_update( netuid=self.config.netuid, uid=self.my_uid ) From ed937b4b5689fd613fc6d9a9ce57123cc113aa27 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 12:22:16 -0500 Subject: [PATCH 41/55] now updates current block on calls to set_weights --- precog/validators/weight_setter.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index c8de0eb..7d07e20 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -27,7 +27,7 @@ def __init__(self, config=None, loop=None): self.prediction_interval = self.config.prediction_interval # in minutes self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.last_sync = 0 - self.set_weights_rate = 150 # in blocks + self.set_weights_rate = 100 # in blocks self.resync_metagraph_rate = 600 # in seconds bt.logging.info( f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}" @@ -41,6 +41,7 @@ def __init__(self, config=None, loop=None): self.save_state() else: self.load_state() + self.current_block = self.subtensor.get_current_block() self.blocks_since_last_update = self.subtensor.blocks_since_last_update( netuid=self.config.netuid, uid=self.my_uid ) @@ -118,6 +119,7 @@ async def set_weights(self): self.blocks_since_last_update = self.subtensor.blocks_since_last_update( netuid=self.config.netuid, uid=self.my_uid ) + self.current_block = self.subtensor.get_current_block() except Exception: bt.logging.error("Failed to get current block, skipping block update") if self.blocks_since_last_update >= self.set_weights_rate: From 9573841332bc62127cdc8852c6c764a63499704f Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 12:36:14 -0500 Subject: [PATCH 42/55] resets blocks_since_last_update on successful set weights --- precog/validators/weight_setter.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 7d07e20..097cf3c 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -120,8 +120,8 @@ async def set_weights(self): netuid=self.config.netuid, uid=self.my_uid ) self.current_block = self.subtensor.get_current_block() - except Exception: - bt.logging.error("Failed to get current block, skipping block update") + except Exception as e: + bt.logging.error(f"Failed to get current block with error {e}, skipping block update") if self.blocks_since_last_update >= self.set_weights_rate: uids = array(self.available_uids) weights = [self.moving_average_scores[uid] for uid in self.available_uids] @@ -145,6 +145,7 @@ async def set_weights(self): ) if result: bt.logging.success("βœ… Set Weights on chain successfully!") + self.blocks_since_last_update = 0 else: bt.logging.debug( "Failed to set weights this iteration with message:", From 2908deaf601d9957de7948bbb1738f4b8a7968ba Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:06:19 -0500 Subject: [PATCH 43/55] now pulls weights_rate_limit from subnet hyperparams instead of setting own value --- precog/utils/general.py | 19 +++++++++++++++++-- precog/validators/weight_setter.py | 9 +++++---- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 91bcfb7..8e4f4e7 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -1,7 +1,7 @@ import argparse import asyncio import re -from typing import Optional +from typing import Any, Callable, Optional import bittensor as bt import git @@ -104,7 +104,7 @@ def rank(vector): return rank_vector -async def loop_handler(self, func, sleep_time=120): +async def loop_handler(self, func: Callable, sleep_time: float = 120): try: while not self.stop_event.is_set(): async with self.lock: @@ -121,3 +121,18 @@ async def loop_handler(self, func, sleep_time=120): finally: async with self.lock: self.stop_event.set() + + +def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 2, *args, **kwargs) -> Any: + attempt = 0 + while attempt < max_attempts: + try: + result = func(*args, **kwargs) + return result + except Exception as e: + attempt += 1 + print(f"Attempt {attempt} failed with error: {e}") + if attempt == max_attempts: + raise + else: + print(f"Retrying... (Attempt {attempt + 1})") diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 097cf3c..8cb881e 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -11,7 +11,7 @@ from precog.protocol import Challenge from precog.utils.bittensor import check_uid_availability, print_info, setup_bittensor_objects from precog.utils.classes import MinerHistory -from precog.utils.general import loop_handler +from precog.utils.general import func_with_retry, loop_handler from precog.utils.timestamp import elapsed_seconds, get_before, get_now, is_query_time, iso8601_to_datetime from precog.utils.wandb import log_wandb, setup_wandb from precog.validators.reward import calc_rewards @@ -26,8 +26,8 @@ def __init__(self, config=None, loop=None): self.timezone = timezone("UTC") self.prediction_interval = self.config.prediction_interval # in minutes self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict + self.hyperparams = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) self.last_sync = 0 - self.set_weights_rate = 100 # in blocks self.resync_metagraph_rate = 600 # in seconds bt.logging.info( f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}" @@ -53,10 +53,11 @@ def __init__(self, config=None, loop=None): loop_handler(self, self.scheduled_prediction_request, sleep_time=self.config.print_cadence) ) self.loop.create_task(loop_handler(self, self.resync_metagraph, sleep_time=self.resync_metagraph_rate)) - self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.set_weights_rate)) + self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.hyperparameters.weights_rate_limit)) try: self.loop.run_forever() except websocket._exceptions.WebSocketConnectionClosedException: + # TODO: Exceptions are not being caught in this loop bt.logging.info("Caught websocket connection closed exception") self.__reset_instance__() except Exception as e: @@ -122,7 +123,7 @@ async def set_weights(self): self.current_block = self.subtensor.get_current_block() except Exception as e: bt.logging.error(f"Failed to get current block with error {e}, skipping block update") - if self.blocks_since_last_update >= self.set_weights_rate: + if self.blocks_since_last_update >= self.hyperparameters.weights_rate_limit: uids = array(self.available_uids) weights = [self.moving_average_scores[uid] for uid in self.available_uids] for i, j in zip(weights, self.available_uids): From 855cd94656cb3bcfb689026ba67c52e5bf05c306 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:07:26 -0500 Subject: [PATCH 44/55] typo --- precog/validators/weight_setter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 8cb881e..0921933 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -26,7 +26,7 @@ def __init__(self, config=None, loop=None): self.timezone = timezone("UTC") self.prediction_interval = self.config.prediction_interval # in minutes self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict - self.hyperparams = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) + self.hyperparameters = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) self.last_sync = 0 self.resync_metagraph_rate = 600 # in seconds bt.logging.info( From 790e359113020d6ca8ded2c5bfc47c90a3c17158 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:08:42 -0500 Subject: [PATCH 45/55] missed assignment in utils function --- precog/utils/bittensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py index 844ff70..890e390 100644 --- a/precog/utils/bittensor.py +++ b/precog/utils/bittensor.py @@ -46,7 +46,7 @@ def setup_bittensor_objects(self): def print_info(self) -> None: if self.config.neuron.type == "Validator": - weight_timing = self.set_weights_rate - self.blocks_since_last_update + weight_timing = self.hyperparameters.weights_rate_limit - self.blocks_since_last_update if weight_timing <= 0: weight_timing = "a few" # hashtag aesthetic af log = ( From 756fda16e1763c3211ac38e9e3bb965516be5cec Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:10:08 -0500 Subject: [PATCH 46/55] unused variable removed --- precog/validators/weight_setter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 0921933..ecd0b80 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -27,7 +27,6 @@ def __init__(self, config=None, loop=None): self.prediction_interval = self.config.prediction_interval # in minutes self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.hyperparameters = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) - self.last_sync = 0 self.resync_metagraph_rate = 600 # in seconds bt.logging.info( f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}" From 64d4841d8373d601e80d1a330c59331a508e33ba Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:21:35 -0500 Subject: [PATCH 47/55] added retry wrapper to get_block calls in set_weights --- precog/validators/weight_setter.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index ecd0b80..56de5ef 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -24,8 +24,6 @@ def __init__(self, config=None, loop=None): self.lock = asyncio.Lock() setup_bittensor_objects(self) self.timezone = timezone("UTC") - self.prediction_interval = self.config.prediction_interval # in minutes - self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.hyperparameters = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) self.resync_metagraph_rate = 600 # in seconds bt.logging.info( @@ -61,6 +59,8 @@ def __init__(self, config=None, loop=None): self.__reset_instance__() except Exception as e: bt.logging.error(f"Error on loop: {e}") + finally: + self.__exit__(None, None, None) def __exit__(self, exc_type, exc_value, traceback): self.save_state() @@ -116,10 +116,10 @@ def query_miners(self): async def set_weights(self): try: - self.blocks_since_last_update = self.subtensor.blocks_since_last_update( - netuid=self.config.netuid, uid=self.my_uid + self.blocks_since_last_update = func_with_retry( + self.subtensor.blocks_since_last_update, netuid=self.config.netuid, uid=self.my_uid ) - self.current_block = self.subtensor.get_current_block() + self.current_block = func_with_retry(self.subtensor.get_current_block) except Exception as e: bt.logging.error(f"Failed to get current block with error {e}, skipping block update") if self.blocks_since_last_update >= self.hyperparameters.weights_rate_limit: From d4c58ec1e3ecaba66940a7694813d119c20c246e Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:24:03 -0500 Subject: [PATCH 48/55] added delay functionality to func_with_retry --- precog/utils/general.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 8e4f4e7..52352c6 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -1,6 +1,7 @@ import argparse import asyncio import re +import time from typing import Any, Callable, Optional import bittensor as bt @@ -123,7 +124,7 @@ async def loop_handler(self, func: Callable, sleep_time: float = 120): self.stop_event.set() -def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 2, *args, **kwargs) -> Any: +def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 1, *args, **kwargs) -> Any: attempt = 0 while attempt < max_attempts: try: @@ -135,4 +136,5 @@ def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 2, *ar if attempt == max_attempts: raise else: - print(f"Retrying... (Attempt {attempt + 1})") + print(f"Retrying... (Attempt {attempt + 1}) in {delay} seconds") + time.sleep(delay) From ef5f00b6c0aff39d56de8db288c8ceecec1ec979 Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:25:29 -0500 Subject: [PATCH 49/55] missing variable --- precog/validators/weight_setter.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 56de5ef..32d2fb9 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -24,6 +24,8 @@ def __init__(self, config=None, loop=None): self.lock = asyncio.Lock() setup_bittensor_objects(self) self.timezone = timezone("UTC") + self.prediction_interval = self.config.prediction_interval # in seconds + self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict self.hyperparameters = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid) self.resync_metagraph_rate = 600 # in seconds bt.logging.info( From 189062be08cf1720be6782c019e3c8701e1b542b Mon Sep 17 00:00:00 2001 From: hscott Date: Fri, 6 Dec 2024 13:32:50 -0500 Subject: [PATCH 50/55] changed print statements to bt.logging statements --- precog/utils/general.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 52352c6..11e6485 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -132,9 +132,9 @@ def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 1, *ar return result except Exception as e: attempt += 1 - print(f"Attempt {attempt} failed with error: {e}") + bt.logging.debug(f"Function {func} failed: Attempt {attempt} of {max_attempts} with error: {e}") if attempt == max_attempts: + bt.logging.error(f"Function {func} failed {max_attempts} times, skipping.") raise else: - print(f"Retrying... (Attempt {attempt + 1}) in {delay} seconds") time.sleep(delay) From 1bdf196c513ddb62035f0b817bf95b000821633b Mon Sep 17 00:00:00 2001 From: hscott Date: Mon, 9 Dec 2024 09:35:45 -0500 Subject: [PATCH 51/55] initial api integration into reward.py function --- precog/utils/general.py | 8 ++++++++ precog/utils/timestamp.py | 22 ++++++---------------- precog/validators/reward.py | 25 +++++++++++++++---------- 3 files changed, 29 insertions(+), 26 deletions(-) diff --git a/precog/utils/general.py b/precog/utils/general.py index 11e6485..00e7c79 100644 --- a/precog/utils/general.py +++ b/precog/utils/general.py @@ -8,6 +8,7 @@ import git import requests from numpy import argsort, array, concatenate, cumsum, empty_like +from pandas import DataFrame from precog.utils.classes import NestedNamespace @@ -138,3 +139,10 @@ def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 1, *ar raise else: time.sleep(delay) + + +def pd_to_dict(data: DataFrame) -> dict: + price_dict = {} + for i in range(len(data)): + price_dict[data.time[i].to_pydatetime()] = data.iloc[i]["ReferenceRateUSD"].item() + return price_dict diff --git a/precog/utils/timestamp.py b/precog/utils/timestamp.py index 11d77b6..9a5d071 100644 --- a/precog/utils/timestamp.py +++ b/precog/utils/timestamp.py @@ -150,13 +150,12 @@ def is_query_time(prediction_interval: int, timestamp: str, tolerance: int = 120 return beginning_of_epoch -def align_timepoints(filtered_pred_dict, cm_data, cm_timestamps): +def align_timepoints(filtered_pred_dict, cm_dict): """Takes in a dictionary of predictions and aligns them to a list of coinmetrics prices. Args: filtered_pred_dict (dict): {datetime: float} dictionary of predictions. - cm_data (List[float]): price data from coinmetrics corresponding to the datetimes in cm_timestamps. - cm_timestamps (List[datetime]): timestamps corresponding to the values in cm_data. + cm_data (dict): {datetime: float} dictionary of coinmetrics prices. Returns: @@ -164,23 +163,14 @@ def align_timepoints(filtered_pred_dict, cm_data, cm_timestamps): aligned_cm_data (List[float]): The values in cm_data where cm_timestamps matches the timestamps in filtered_pred_dict. aligned_timestamps (List[datetime]): The timestamps corresponding to the values in aligned_pred_values and aligned_cm_data. """ - if len(cm_data) != len(cm_timestamps): - raise ValueError("cm_data and cm_timepoints must be of the same length.") - aligned_pred_values = [] aligned_cm_data = [] aligned_timestamps = [] - - # Convert cm_timepoints to a set for faster lookup - cm_timestamps_set = set(cm_timestamps) - # Loop through filtered_pred_dict to find matching datetime keys - for timestamp, pred_value in filtered_pred_dict.items(): - if timestamp in cm_timestamps_set: - # Find the index in cm_timepoints to get corresponding cm_data - index = cm_timestamps.index(timestamp) - aligned_pred_values.append(pred_value) + for timestamp, value in filtered_pred_dict.items(): + if timestamp in cm_dict: + aligned_pred_values.append(value) aligned_timestamps.append(timestamp) - aligned_cm_data.append(cm_data[index]) + aligned_cm_data.append(cm_dict[timestamp]) return aligned_pred_values, aligned_cm_data, aligned_timestamps diff --git a/precog/validators/reward.py b/precog/validators/reward.py index bad11fc..b758340 100644 --- a/precog/validators/reward.py +++ b/precog/validators/reward.py @@ -3,10 +3,12 @@ import bittensor as bt import numpy as np +from pandas import DataFrame from precog.protocol import Challenge -from precog.utils.general import rank -from precog.utils.timestamp import align_timepoints, get_now, mature_dictionary, round_minute_down +from precog.utils.cm_data import CMData +from precog.utils.general import pd_to_dict, rank +from precog.utils.timestamp import align_timepoints, datetime_to_iso8601, iso8601_to_datetime, mature_dictionary ################################################################################ @@ -20,21 +22,24 @@ def calc_rewards( decay = 0.9 weights = np.linspace(0, len(self.available_uids) - 1, len(self.available_uids)) decayed_weights = decay**weights - # cm_prices, cm_timestamps = get_cm_prices() # fake placeholder to get the past hours prices - cm_prices = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] - cm_timestamps = [ - round_minute_down(get_now()) - timedelta(minutes=(i + 1) * 5) for i in range(12) - ] # placeholder to align cm price timepoints to the timestamps in history - cm_timestamps.reverse() + timestamp = responses[0].timestamp + cm = CMData() + start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(hours=1)) + end_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting + # Query CM API for sample standard deviation of the 1s residuals + historical_price_data: DataFrame = cm.get_CM_ReferenceRate( + assets="BTC", start=start_time, end=end_time, frequency="1s" + ) + cm_data = pd_to_dict(historical_price_data) for uid, response in zip(self.available_uids, responses): current_miner = self.MinerHistory[uid] self.MinerHistory[uid].add_prediction(response.timestamp, response.prediction, response.interval) prediction_dict, interval_dict = current_miner.format_predictions(response.timestamp) mature_time_dict = mature_dictionary(prediction_dict) - preds, price, aligned_pred_timestamps = align_timepoints(mature_time_dict, cm_prices, cm_timestamps) + preds, price, aligned_pred_timestamps = align_timepoints(mature_time_dict, cm_data) for i, j, k in zip(preds, price, aligned_pred_timestamps): bt.logging.debug(f"Prediction: {i} | Price: {j} | Aligned Prediction: {k}") - inters, interval_prices, aligned_int_timestamps = align_timepoints(interval_dict, cm_prices, cm_timestamps) + inters, interval_prices, aligned_int_timestamps = align_timepoints(interval_dict, cm_data) for i, j, k in zip(inters, interval_prices, aligned_int_timestamps): bt.logging.debug(f"Interval: {i} | Interval Price: {j} | Aligned TS: {k}") point_errors.append(point_error(preds, price)) From cf13fc17fe59cb1c20817f10c8ff9889677ebc66 Mon Sep 17 00:00:00 2001 From: hscott Date: Mon, 9 Dec 2024 09:45:30 -0500 Subject: [PATCH 52/55] turned base miner into a forward function for importing --- precog/miners/base_miner.py | 203 +++++++++++++++++------------------ precog/miners/forward_bad.py | 30 ------ 2 files changed, 96 insertions(+), 137 deletions(-) delete mode 100644 precog/miners/forward_bad.py diff --git a/precog/miners/base_miner.py b/precog/miners/base_miner.py index 33e933d..ba00ba0 100644 --- a/precog/miners/base_miner.py +++ b/precog/miners/base_miner.py @@ -4,117 +4,106 @@ import bittensor as bt import pandas as pd -from precog.miners.miner import Miner from precog.protocol import Challenge -from precog.utils.classes import Config from precog.utils.cm_data import CMData -from precog.utils.general import parse_arguments from precog.utils.timestamp import datetime_to_iso8601, iso8601_to_datetime -class BaseMiner(Miner): +def get_point_estimate(timestamp: str) -> float: + """Make a naive forecast by predicting the most recent price - def get_point_estimate(self, timestamp: str) -> float: - """Make a naive forecast by predicting the most recent price + Args: + timestamp (str): The current timestamp provided by the validator request - Args: - timestamp (str): The current timestamp provided by the validator request - - Returns: - (float): The current BTC price tied to the provided timestamp - """ - # Create data gathering instance - cm = CMData() - - # Set the time range to be as small as possible for query speed - # Set the start time as 2 seconds prior to the provided time - start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(seconds=2)) - end_time: str = timestamp - - # Query CM API for a pandas dataframe with only one record - price_data: pd.DataFrame = cm.get_CM_ReferenceRate(assets="BTC", start=start_time, end=end_time) - - # Get current price closest to the provided timestamp - btc_price: float = float(price_data["ReferenceRateUSD"].iloc[-1]) - - # Return the current price of BTC as our point estimate - return btc_price - - def get_prediction_interval(self, timestamp: str, point_estimate: float) -> Tuple[float, float]: - """Make a naive multi-step prediction interval by estimating - the sample standard deviation - - Args: - timestamp (str): The current timestamp provided by the validator request - point_estimate (float): The center of the prediction interval - - Returns: - (float): The 90% naive prediction interval lower bound - (float): The 90% naive prediction interval upper bound - - Notes: - Make reasonable assumptions that the 1s BTC price residuals are - uncorrelated and normally distributed - """ - # Create data gathering instance - cm = CMData() - - # Set the time range to be 24 hours - start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(days=1)) - end_time: str = timestamp - - # Query CM API for sample standard deviation of the 1s residuals - historical_price_data: pd.DataFrame = cm.get_CM_ReferenceRate( - assets="BTC", start=start_time, end=end_time, frequency="1s" - ) - residuals: pd.Series = historical_price_data["ReferenceRateUSD"].diff() - sample_std_dev: float = float(residuals.std()) - - # We have the standard deviation of the 1s residuals - # We are forecasting forward 5m, which is 300s - # We must scale the 1s sample standard deviation to reflect a 300s forecast - # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed - # To do this naively, we multiply the std dev by the square root of the number of time steps - time_steps: int = 300 - naive_forecast_std_dev: float = sample_std_dev * (time_steps**0.5) - - # For a 90% prediction interval, we use the coefficient 1.64 - # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed - coefficient: float = 1.64 - - # Calculate the lower bound and upper bound - lower_bound: float = point_estimate - coefficient * naive_forecast_std_dev - upper_bound: float = point_estimate + coefficient * naive_forecast_std_dev - - # Return the naive prediction interval for our forecast - return lower_bound, upper_bound - - async def forward(self, synapse: Challenge) -> Challenge: - bt.logging.info( - f"πŸ‘ˆ Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}" - ) - - # Get the naive point estimate - point_estimate: float = self.get_point_estimate(timestamp=synapse.timestamp) - - # Get the naive prediction interval - prediction_interval: Tuple[float, float] = self.get_prediction_interval( - timestamp=synapse.timestamp, point_estimate=point_estimate - ) - - synapse.prediction = point_estimate - synapse.interval = prediction_interval - - if synapse.prediction is not None: - bt.logging.success(f"Predicted price: {synapse.prediction} | Predicted Interval: {synapse.interval}") - else: - bt.logging.info("No prediction for this request.") - return synapse - - -# Run the miner -if __name__ == "__main__": - args = parse_arguments() - config = Config(args) - miner = BaseMiner(config=config) - miner.loop.run_forever() + Returns: + (float): The current BTC price tied to the provided timestamp + """ + # Create data gathering instance + cm = CMData() + + # Set the time range to be as small as possible for query speed + # Set the start time as 2 seconds prior to the provided time + start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(seconds=2)) + end_time: str = timestamp + + # Query CM API for a pandas dataframe with only one record + price_data: pd.DataFrame = cm.get_CM_ReferenceRate(assets="BTC", start=start_time, end=end_time) + + # Get current price closest to the provided timestamp + btc_price: float = float(price_data["ReferenceRateUSD"].iloc[-1]) + + # Return the current price of BTC as our point estimate + return btc_price + + +def get_prediction_interval(timestamp: str, point_estimate: float) -> Tuple[float, float]: + """Make a naive multi-step prediction interval by estimating + the sample standard deviation + + Args: + timestamp (str): The current timestamp provided by the validator request + point_estimate (float): The center of the prediction interval + + Returns: + (float): The 90% naive prediction interval lower bound + (float): The 90% naive prediction interval upper bound + + Notes: + Make reasonable assumptions that the 1s BTC price residuals are + uncorrelated and normally distributed + """ + # Create data gathering instance + cm = CMData() + + # Set the time range to be 24 hours + start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(days=1)) + end_time: str = timestamp + + # Query CM API for sample standard deviation of the 1s residuals + historical_price_data: pd.DataFrame = cm.get_CM_ReferenceRate( + assets="BTC", start=start_time, end=end_time, frequency="1s" + ) + residuals: pd.Series = historical_price_data["ReferenceRateUSD"].diff() + sample_std_dev: float = float(residuals.std()) + + # We have the standard deviation of the 1s residuals + # We are forecasting forward 5m, which is 300s + # We must scale the 1s sample standard deviation to reflect a 300s forecast + # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed + # To do this naively, we multiply the std dev by the square root of the number of time steps + time_steps: int = 300 + naive_forecast_std_dev: float = sample_std_dev * (time_steps**0.5) + + # For a 90% prediction interval, we use the coefficient 1.64 + # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed + coefficient: float = 1.64 + + # Calculate the lower bound and upper bound + lower_bound: float = point_estimate - coefficient * naive_forecast_std_dev + upper_bound: float = point_estimate + coefficient * naive_forecast_std_dev + + # Return the naive prediction interval for our forecast + return lower_bound, upper_bound + + +async def forward(synapse: Challenge) -> Challenge: + bt.logging.info( + f"πŸ‘ˆ Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}" + ) + + # Get the naive point estimate + point_estimate: float = get_point_estimate(timestamp=synapse.timestamp) + + # Get the naive prediction interval + prediction_interval: Tuple[float, float] = get_prediction_interval( + timestamp=synapse.timestamp, point_estimate=point_estimate + ) + + synapse.prediction = point_estimate + synapse.interval = prediction_interval + + if synapse.prediction is not None: + bt.logging.success(f"Predicted price: {synapse.prediction} | Predicted Interval: {synapse.interval}") + else: + bt.logging.info("No prediction for this request.") + return synapse diff --git a/precog/miners/forward_bad.py b/precog/miners/forward_bad.py deleted file mode 100644 index 831c62c..0000000 --- a/precog/miners/forward_bad.py +++ /dev/null @@ -1,30 +0,0 @@ -import bittensor as bt - -from precog.protocol import Challenge - - -def forward(synapse: Challenge) -> Challenge: - """ - Optimized forward function for low latency and caching. - """ - bt.logging.info( - f"πŸ‘ˆ Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}" - ) - middle_intervals = [ - [0, 30], - [5, 7], - [2, 20], - [3, 12], - [1, 15], - [0, 30], - [5, 20], - [7, 10], - [1, 12], - [9, 12], - [4, 13], - [0, 30], - ] - synapse.prediction = [30] - synapse.interval = middle_intervals[0] - - return synapse From 3d21396707c65395491a082e30feca21550d8063 Mon Sep 17 00:00:00 2001 From: hscott Date: Mon, 9 Dec 2024 10:22:49 -0500 Subject: [PATCH 53/55] fixed iso_to_datetime function to handle z formatted strings --- precog/miners/base_miner.py | 2 +- precog/miners/miner.py | 2 +- precog/utils/timestamp.py | 2 +- precog/validators/weight_setter.py | 13 ++++++++++--- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/precog/miners/base_miner.py b/precog/miners/base_miner.py index ba00ba0..620c7cb 100644 --- a/precog/miners/base_miner.py +++ b/precog/miners/base_miner.py @@ -86,7 +86,7 @@ def get_prediction_interval(timestamp: str, point_estimate: float) -> Tuple[floa return lower_bound, upper_bound -async def forward(synapse: Challenge) -> Challenge: +def forward(synapse: Challenge) -> Challenge: bt.logging.info( f"πŸ‘ˆ Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}" ) diff --git a/precog/miners/miner.py b/precog/miners/miner.py index 535c073..a603728 100644 --- a/precog/miners/miner.py +++ b/precog/miners/miner.py @@ -22,7 +22,7 @@ class Miner: def __init__(self, config=None): args = parse_arguments() config = Config(args) - self.forward_module = importlib.import_module(f"precog.miners.{config.forward_function}", package="forward") + self.forward_module = importlib.import_module(f"precog.miners.{config.forward_function}") self.config = config self.config.neuron.type = "Miner" setup_bittensor_objects(self) diff --git a/precog/utils/timestamp.py b/precog/utils/timestamp.py index 9a5d071..e0d2d30 100644 --- a/precog/utils/timestamp.py +++ b/precog/utils/timestamp.py @@ -73,7 +73,7 @@ def iso8601_to_datetime(timestamp: str) -> datetime: """ Convert iso 8601 string to datetime """ - return datetime.fromisoformat(timestamp) + return datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") def posix_to_datetime(timestamp: float) -> datetime: diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py index 32d2fb9..9f99398 100755 --- a/precog/validators/weight_setter.py +++ b/precog/validators/weight_setter.py @@ -12,7 +12,14 @@ from precog.utils.bittensor import check_uid_availability, print_info, setup_bittensor_objects from precog.utils.classes import MinerHistory from precog.utils.general import func_with_retry, loop_handler -from precog.utils.timestamp import elapsed_seconds, get_before, get_now, is_query_time, iso8601_to_datetime +from precog.utils.timestamp import ( + datetime_to_iso8601, + elapsed_seconds, + get_before, + get_now, + is_query_time, + iso8601_to_datetime, +) from precog.utils.wandb import log_wandb, setup_wandb from precog.validators.reward import calc_rewards @@ -106,7 +113,7 @@ async def resync_metagraph(self): self.save_state() def query_miners(self): - timestamp = get_now().isoformat() + timestamp = datetime_to_iso8601(get_now()) synapse = Challenge(timestamp=timestamp) responses = self.dendrite.query( # Send the query to selected miner axons in the network. @@ -156,7 +163,7 @@ async def set_weights(self): async def scheduled_prediction_request(self): if not hasattr(self, "timestamp"): - self.timestamp = get_before(minutes=self.prediction_interval).isoformat() + self.timestamp = datetime_to_iso8601(get_before(minutes=self.prediction_interval)) query_lag = elapsed_seconds(get_now(), iso8601_to_datetime(self.timestamp)) if len(self.available_uids) == 0: bt.logging.info("No miners available. Sleeping for 10 minutes...") From 52e3901dd3aa31b394be3eb34db6f469b31c08c1 Mon Sep 17 00:00:00 2001 From: hscott Date: Mon, 9 Dec 2024 10:38:44 -0500 Subject: [PATCH 54/55] added new function to handle cm-specific timestamp formatting --- precog/miners/base_miner.py | 10 +++++----- precog/utils/timestamp.py | 13 ++++++++++--- precog/validators/reward.py | 6 +++--- 3 files changed, 18 insertions(+), 11 deletions(-) diff --git a/precog/miners/base_miner.py b/precog/miners/base_miner.py index 620c7cb..4a2281e 100644 --- a/precog/miners/base_miner.py +++ b/precog/miners/base_miner.py @@ -6,7 +6,7 @@ from precog.protocol import Challenge from precog.utils.cm_data import CMData -from precog.utils.timestamp import datetime_to_iso8601, iso8601_to_datetime +from precog.utils.timestamp import datetime_to_CM_timestamp, iso8601_to_datetime def get_point_estimate(timestamp: str) -> float: @@ -23,8 +23,8 @@ def get_point_estimate(timestamp: str) -> float: # Set the time range to be as small as possible for query speed # Set the start time as 2 seconds prior to the provided time - start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(seconds=2)) - end_time: str = timestamp + start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(days=1)) + end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting # Query CM API for a pandas dataframe with only one record price_data: pd.DataFrame = cm.get_CM_ReferenceRate(assets="BTC", start=start_time, end=end_time) @@ -56,8 +56,8 @@ def get_prediction_interval(timestamp: str, point_estimate: float) -> Tuple[floa cm = CMData() # Set the time range to be 24 hours - start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(days=1)) - end_time: str = timestamp + start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(days=1)) + end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting # Query CM API for sample standard deviation of the 1s residuals historical_price_data: pd.DataFrame = cm.get_CM_ReferenceRate( diff --git a/precog/utils/timestamp.py b/precog/utils/timestamp.py index e0d2d30..f253601 100644 --- a/precog/utils/timestamp.py +++ b/precog/utils/timestamp.py @@ -66,14 +66,14 @@ def datetime_to_iso8601(timestamp: datetime) -> str: """ Convert datetime to iso 8601 string """ - return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + return timestamp.isoformat() def iso8601_to_datetime(timestamp: str) -> datetime: """ Convert iso 8601 string to datetime """ - return datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") + return datetime.fromisoformat(timestamp) def posix_to_datetime(timestamp: float) -> datetime: @@ -83,6 +83,13 @@ def posix_to_datetime(timestamp: float) -> datetime: return datetime.fromtimestamp(timestamp, tz=get_timezone()) +def datetime_to_CM_timestamp(timestamp: datetime) -> str: + """ + Convert iso 8601 string to coinmetrics timestamp + """ + return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + ############################### # FUNCTIONS # ############################### @@ -155,7 +162,7 @@ def align_timepoints(filtered_pred_dict, cm_dict): Args: filtered_pred_dict (dict): {datetime: float} dictionary of predictions. - cm_data (dict): {datetime: float} dictionary of coinmetrics prices. + cm_data (dict): {datetime: float} dictionary of prices. Returns: diff --git a/precog/validators/reward.py b/precog/validators/reward.py index b758340..e63e928 100644 --- a/precog/validators/reward.py +++ b/precog/validators/reward.py @@ -8,7 +8,7 @@ from precog.protocol import Challenge from precog.utils.cm_data import CMData from precog.utils.general import pd_to_dict, rank -from precog.utils.timestamp import align_timepoints, datetime_to_iso8601, iso8601_to_datetime, mature_dictionary +from precog.utils.timestamp import align_timepoints, datetime_to_CM_timestamp, iso8601_to_datetime, mature_dictionary ################################################################################ @@ -24,8 +24,8 @@ def calc_rewards( decayed_weights = decay**weights timestamp = responses[0].timestamp cm = CMData() - start_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp) - timedelta(hours=1)) - end_time: str = datetime_to_iso8601(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting + start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(hours=1)) + end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting # Query CM API for sample standard deviation of the 1s residuals historical_price_data: DataFrame = cm.get_CM_ReferenceRate( assets="BTC", start=start_time, end=end_time, frequency="1s" From c0dd328729dbd4b7f420b9e89f4e7bf8139ea91f Mon Sep 17 00:00:00 2001 From: hscott Date: Mon, 9 Dec 2024 10:42:40 -0500 Subject: [PATCH 55/55] changed challenge prediction field from list to float --- precog/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/precog/protocol.py b/precog/protocol.py index e90e4ab..7fd3e45 100644 --- a/precog/protocol.py +++ b/precog/protocol.py @@ -35,7 +35,7 @@ class Challenge(bt.Synapse): ) # Optional request output, filled by recieving axon. - prediction: Optional[List[float]] = pydantic.Field( + prediction: Optional[float] = pydantic.Field( default=None, title="Predictions", description="The predictions to send to the dendrite caller",