diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..4af059f
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,84 @@
+name: Cache Build
+
+on:
+ workflow_call:
+ inputs:
+ command:
+ required: true
+ type: string
+ name:
+ required: true
+ type: string
+
+jobs:
+ reusable-build:
+ name: ${{ inputs.name }}
+ runs-on: ubuntu-latest
+ steps:
+
+ #------------------------------------------------
+ # check-out repo and set-up python
+ #------------------------------------------------
+ - name: Check out repository
+ uses: actions/checkout@v4
+ - name: Set up python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+
+ #------------------------------------------------
+ # ----- install & configure poetry -----
+ #------------------------------------------------
+ - name: Load cached Poetry installation
+ id: cached-poetry
+ uses: actions/cache@v4
+ with:
+ path: ~/.local # the path depends on the OS
+ key: poetry-0 # increment to reset cache
+
+ - name: Install Poetry
+ if: steps.cached-poetry.outputs.cache-hit != 'true'
+ uses: snok/install-poetry@v1
+ with:
+ version: latest
+ virtualenvs-create: true
+ virtualenvs-in-project: true
+ virtualenvs-path: .venv
+ installer-parallel: true
+
+ # If cache was loaded, we must redo configuration
+ - name: Configure poetry
+ if: steps.cached-poetry.outputs.cache-hit == 'true'
+ run: |
+ poetry config virtualenvs.create true
+ poetry config virtualenvs.in-project true
+ poetry config virtualenvs.path .venv
+
+ #------------------------------------------------
+ # Load cached venv if exists
+ #------------------------------------------------
+ - name: Load cached venv
+ id: cached-poetry-dependencies
+ uses: actions/cache@v4
+ with:
+ path: .venv
+ key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}
+
+ #------------------------------------------------
+ # Install dependencies if cache does not exist
+ #------------------------------------------------
+ - name: Install dependencies
+ if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
+ run: poetry install --no-interaction --no-root
+
+ #------------------------------------------------
+ # Install your root project
+ #------------------------------------------------
+ - name: Install project
+ run: poetry install --no-interaction
+
+ #------------------------------------------------
+ # Run custom command(s) within venv
+ #------------------------------------------------
+ - name: Run commands
+ run: ${{ inputs.command }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..63e9043
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,71 @@
+name: CI
+
+on:
+ pull_request:
+ push:
+ branches: [master, staging, dev]
+
+jobs:
+ #----------------------------------------------
+ # Build Environment
+ #----------------------------------------------
+ build:
+ name: Build
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Cache
+ command: |
+ poetry run python -m pip list
+ poetry run python --version
+ poetry --version
+ poetry run echo "Build successful"
+
+ #----------------------------------------------
+ # Run Linters
+ #----------------------------------------------
+ lint-black:
+ name: Linter
+ needs: build
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Black
+ command: poetry run python -m black --check .
+ lint-isort:
+ name: Linter
+ needs: build
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Isort
+ command: poetry run python -m isort --check-only .
+ lint-mypy:
+ name: Linter
+ needs: build
+ if: false # This condition ensures the job is never executed
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Mypy
+ command: poetry run python -m mypy --verbose 0 .
+ lint-flake8:
+ name: Linter
+ needs: build
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Flake8
+ command: poetry run python -m flake8 .
+
+ #----------------------------------------------
+ # Run Tests
+ #----------------------------------------------
+ test-unittest:
+ name: Tests
+ needs: [
+ lint-black,
+ lint-isort,
+ lint-mypy,
+ lint-flake8,
+ ]
+ if: ${{ always() }} # will run the tests regardless of linting success
+ uses: ./.github/workflows/build.yml
+ with:
+ name: Unittests
+ command: poetry run pytest tests/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 704824e..3ddb7af 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -94,7 +94,7 @@ repos:
language: system
pass_filenames: false
always_run: true
- args: [--branch, main, --branch, staging, --branch, dev]
+ args: [--branch, master, --branch, staging, --branch, dev]
- id: trailing-whitespace
name: trim trailing whitespace
description: trims trailing whitespace.
diff --git a/.python-version b/.python-version
new file mode 100644
index 0000000..2c07333
--- /dev/null
+++ b/.python-version
@@ -0,0 +1 @@
+3.11
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..7562375
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Opentensor
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Makefile b/Makefile
index cee96c2..721c621 100644
--- a/Makefile
+++ b/Makefile
@@ -1,7 +1,19 @@
-network = ws://127.0.0.1:9944
-netuid = 1
+## Network Parameters ##
+finney = wss://entrypoint-finney.opentensor.ai:443
+testnet = wss://test.finney.opentensor.ai:443
+locanet = ws://127.0.0.1:9944
+
+testnet_netuid = 256
+localnet_netuid = 1
logging_level = trace # options= ['info', 'debug', 'trace']
-coldkey = cm-owner
+
+netuid = $(testnet_netuid)
+network = $(testnet)
+
+## User Parameters
+coldkey = default
+validator_hotkey = validator
+miner_hotkey = miner
metagraph:
btcli subnet metagraph --netuid $(netuid) --subtensor.chain_endpoint $(network)
@@ -16,32 +28,23 @@ validator:
python start_validator.py \
--neuron.name validator \
--wallet.name $(coldkey) \
- --wallet.hotkey validator \
+ --wallet.hotkey $(validator_hotkey) \
--subtensor.chain_endpoint $(network) \
--axon.port 30335 \
--netuid $(netuid) \
--logging.level $(logging_level)
-validator2:
- python start_validator.py \
- --neuron.name validator2 \
- --wallet.name $(coldkey) \
- --wallet.hotkey validator2 \
- --subtensor.chain_endpoint $(network) \
- --axon.port 30339 \
- --netuid $(netuid) \
- --logging.level $(logging_level)
-
miner:
python start_miner.py \
--neuron.name miner \
--wallet.name $(coldkey) \
- --wallet.hotkey miner \
+ --wallet.hotkey $(miner_hotkey) \
--subtensor.chain_endpoint $(network) \
--axon.port 30336 \
--netuid $(netuid) \
--logging.level $(logging_level) \
--timeout 16 \
+ --vpermit_tao_limit 2 \
--forward_function forward
miner2:
@@ -55,25 +58,3 @@ miner2:
--logging.level $(logging_level) \
--timeout 16 \
--forward_function forward_bad
-
-miner3:
- python start_miner.py \
- --neuron.name miner3 \
- --wallet.name $(coldkey) \
- --wallet.hotkey miner3 \
- --subtensor.chain_endpoint $(network) \
- --axon.port 30338 \
- --netuid $(netuid) \
- --logging.level $(logging_level) \
- --timeout 16 \
- --forward_function forward
-
-setup_local:
- btcli wallet faucet --wallet.name $(coldkey) --subtensor.chain_endpoint $(network) ;\
- btcli subnet create --wallet.name $(coldkey) --subtensor.chain_endpoint $(network) ;\
- btcli subnet register \
- --wallet.name $(coldkey) \
- --wallet.hotkey validator \
- --netuid $(netuid)
- --subtensor.chain_endpoint $(network) ;\
- btcli stake add --wallet.name $(coldkey) --wallet.hotkey validator --amount 1024 ;\
diff --git a/README.md b/README.md
index 35b40d9..ad58612 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,10 @@
| | |
| :-: | :-: |
-Badges here?
+| **Status** |
|
+| **Activity** |
|
+| **Compatibility** |
|
+| **Social** | |
@@ -20,8 +23,8 @@ Badges here?
- [Running a Miner](#running-a-miner)
- [Running a Validator](#running-a-validator)
- [About the Rewards Mechanism](#about-the-rewards-mechanism)
-- [Compute Requirements](#compute-requirements)
- [Roadmap](#roadmap)
+- [Compute Requirements](#compute-requirements)
- [License](#license)
---
@@ -56,7 +59,7 @@ cd precog
Create and source a python virtual environment:
```
-python3 -m venv
+python3 -m venv
source .venv/bin/activate
```
@@ -75,23 +78,23 @@ Start by editing the Makefile with you wallet and network information.
### Running a Miner
TODO: write this \
-Base miner:
+Base miner:
1. Run the command:
- ```
- make miner
+ ```
+ make miner
```
Custom miner:
-1. Write a custom forward function stored in precog/miners/your_function.py
+1. Write a custom forward function stored in precog/miners/your_function.py
- This function should handle how the miner responds to requests from the validator
- Within the function, synapse.predictions and synapse.interval should be set.
- - See [forward.py](https://github.com/coinmetrics/precog/blob/master/precog/miners/forward.py) for an example
+ - See [forward.py](https://github.com/coinmetrics/precog/blob/master/precog/miners/forward.py) for an example
2. Add a command to Makefile.
- copy the miner command and rename it (e.g. miner_custom) in Makefile
- replace the --forward_function argument with your_function
3. Run the Command:
- ```
- make miner_custom
+ ```
+ make miner_custom
```
@@ -119,7 +122,7 @@ TODO: update these
This repository is licensed under the MIT License.
```text
# The MIT License (MIT)
-# Copyright © 2024 Foundry Digital LLC
+# Copyright © 2024 CoinMetrics LLC
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the “Software”), to deal in the Software without restriction, including without limitation
diff --git a/docs/Release Notes.md b/docs/Release Notes.md
new file mode 100644
index 0000000..439a8fc
--- /dev/null
+++ b/docs/Release Notes.md
@@ -0,0 +1,8 @@
+Release Notes
+=============
+
+1.0.0
+-----
+Release on
+- ...
+- ...
diff --git a/poetry.lock b/poetry.lock
index 4b7abfe..a1d31cb 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -774,6 +774,28 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
+[[package]]
+name = "coinmetrics-api-client"
+version = "2024.11.21.20"
+description = "Python client for Coin Metrics API v4."
+optional = false
+python-versions = "<4.0.0,>=3.7.1"
+files = [
+ {file = "coinmetrics_api_client-2024.11.21.20-py3-none-any.whl", hash = "sha256:21ec15cba06416b450e7c5168c8a2dc42767f9df970ab23330f151205f6cd81f"},
+ {file = "coinmetrics_api_client-2024.11.21.20.tar.gz", hash = "sha256:5dae5e5b3724865006c9f035734f82b35d4294ead17d892cda6773db006e3f2a"},
+]
+
+[package.dependencies]
+orjson = ">=3.6.0,<4.0.0"
+python-dateutil = ">=2.8.2,<3.0.0"
+requests = ">=2.24.0,<3.0.0"
+tqdm = ">=4.64.1,<5.0.0"
+typer = ">=0.7.0"
+websocket-client = ">=1.2.1,<2.0.0"
+
+[package.extras]
+pandas = ["pandas (>=1.3.3,<2.0.0)"]
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -1954,6 +1976,90 @@ files = [
{file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"},
]
+[[package]]
+name = "orjson"
+version = "3.10.12"
+description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "orjson-3.10.12-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ece01a7ec71d9940cc654c482907a6b65df27251255097629d0dea781f255c6d"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34ec9aebc04f11f4b978dd6caf697a2df2dd9b47d35aa4cc606cabcb9df69d7"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6ec8658da3480939c79b9e9e27e0db31dffcd4ba69c334e98c9976ac29140e"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17e6baf4cf01534c9de8a16c0c611f3d94925d1701bf5f4aff17003677d8ced"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6402ebb74a14ef96f94a868569f5dccf70d791de49feb73180eb3c6fda2ade56"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0000758ae7c7853e0a4a6063f534c61656ebff644391e1f81698c1b2d2fc8cd2"},
+ {file = "orjson-3.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:888442dcee99fd1e5bd37a4abb94930915ca6af4db50e23e746cdf4d1e63db13"},
+ {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c1f7a3ce79246aa0e92f5458d86c54f257fb5dfdc14a192651ba7ec2c00f8a05"},
+ {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:802a3935f45605c66fb4a586488a38af63cb37aaad1c1d94c982c40dcc452e85"},
+ {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1da1ef0113a2be19bb6c557fb0ec2d79c92ebd2fed4cfb1b26bab93f021fb885"},
+ {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a3273e99f367f137d5b3fecb5e9f45bcdbfac2a8b2f32fbc72129bbd48789c2"},
+ {file = "orjson-3.10.12-cp310-none-win32.whl", hash = "sha256:475661bf249fd7907d9b0a2a2421b4e684355a77ceef85b8352439a9163418c3"},
+ {file = "orjson-3.10.12-cp310-none-win_amd64.whl", hash = "sha256:87251dc1fb2b9e5ab91ce65d8f4caf21910d99ba8fb24b49fd0c118b2362d509"},
+ {file = "orjson-3.10.12-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a734c62efa42e7df94926d70fe7d37621c783dea9f707a98cdea796964d4cf74"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:750f8b27259d3409eda8350c2919a58b0cfcd2054ddc1bd317a643afc646ef23"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb52c22bfffe2857e7aa13b4622afd0dd9d16ea7cc65fd2bf318d3223b1b6252"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:440d9a337ac8c199ff8251e100c62e9488924c92852362cd27af0e67308c16ef"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e15c06491c69997dfa067369baab3bf094ecb74be9912bdc4339972323f252"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:362d204ad4b0b8724cf370d0cd917bb2dc913c394030da748a3bb632445ce7c4"},
+ {file = "orjson-3.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b57cbb4031153db37b41622eac67329c7810e5f480fda4cfd30542186f006ae"},
+ {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:165c89b53ef03ce0d7c59ca5c82fa65fe13ddf52eeb22e859e58c237d4e33b9b"},
+ {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5dee91b8dfd54557c1a1596eb90bcd47dbcd26b0baaed919e6861f076583e9da"},
+ {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a4e1cfb72de6f905bdff061172adfb3caf7a4578ebf481d8f0530879476c07"},
+ {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038d42c7bc0606443459b8fe2d1f121db474c49067d8d14c6a075bbea8bf14dd"},
+ {file = "orjson-3.10.12-cp311-none-win32.whl", hash = "sha256:03b553c02ab39bed249bedd4abe37b2118324d1674e639b33fab3d1dafdf4d79"},
+ {file = "orjson-3.10.12-cp311-none-win_amd64.whl", hash = "sha256:8b8713b9e46a45b2af6b96f559bfb13b1e02006f4242c156cbadef27800a55a8"},
+ {file = "orjson-3.10.12-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:53206d72eb656ca5ac7d3a7141e83c5bbd3ac30d5eccfe019409177a57634b0d"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac8010afc2150d417ebda810e8df08dd3f544e0dd2acab5370cfa6bcc0662f8f"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed459b46012ae950dd2e17150e838ab08215421487371fa79d0eced8d1461d70"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dcb9673f108a93c1b52bfc51b0af422c2d08d4fc710ce9c839faad25020bb69"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22a51ae77680c5c4652ebc63a83d5255ac7d65582891d9424b566fb3b5375ee9"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910fdf2ac0637b9a77d1aad65f803bac414f0b06f720073438a7bd8906298192"},
+ {file = "orjson-3.10.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:24ce85f7100160936bc2116c09d1a8492639418633119a2224114f67f63a4559"},
+ {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a76ba5fc8dd9c913640292df27bff80a685bed3a3c990d59aa6ce24c352f8fc"},
+ {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ff70ef093895fd53f4055ca75f93f047e088d1430888ca1229393a7c0521100f"},
+ {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4244b7018b5753ecd10a6d324ec1f347da130c953a9c88432c7fbc8875d13be"},
+ {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:16135ccca03445f37921fa4b585cff9a58aa8d81ebcb27622e69bfadd220b32c"},
+ {file = "orjson-3.10.12-cp312-none-win32.whl", hash = "sha256:2d879c81172d583e34153d524fcba5d4adafbab8349a7b9f16ae511c2cee8708"},
+ {file = "orjson-3.10.12-cp312-none-win_amd64.whl", hash = "sha256:fc23f691fa0f5c140576b8c365bc942d577d861a9ee1142e4db468e4e17094fb"},
+ {file = "orjson-3.10.12-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47962841b2a8aa9a258b377f5188db31ba49af47d4003a32f55d6f8b19006543"},
+ {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6334730e2532e77b6054e87ca84f3072bee308a45a452ea0bffbbbc40a67e296"},
+ {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:accfe93f42713c899fdac2747e8d0d5c659592df2792888c6c5f829472e4f85e"},
+ {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7974c490c014c48810d1dede6c754c3cc46598da758c25ca3b4001ac45b703f"},
+ {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3f250ce7727b0b2682f834a3facff88e310f52f07a5dcfd852d99637d386e79e"},
+ {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f31422ff9486ae484f10ffc51b5ab2a60359e92d0716fcce1b3593d7bb8a9af6"},
+ {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5f29c5d282bb2d577c2a6bbde88d8fdcc4919c593f806aac50133f01b733846e"},
+ {file = "orjson-3.10.12-cp313-none-win32.whl", hash = "sha256:f45653775f38f63dc0e6cd4f14323984c3149c05d6007b58cb154dd080ddc0dc"},
+ {file = "orjson-3.10.12-cp313-none-win_amd64.whl", hash = "sha256:229994d0c376d5bdc91d92b3c9e6be2f1fbabd4cc1b59daae1443a46ee5e9825"},
+ {file = "orjson-3.10.12-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7d69af5b54617a5fac5c8e5ed0859eb798e2ce8913262eb522590239db6c6763"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ed119ea7d2953365724a7059231a44830eb6bbb0cfead33fcbc562f5fd8f935"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5fc1238ef197e7cad5c91415f524aaa51e004be5a9b35a1b8a84ade196f73f"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43509843990439b05f848539d6f6198d4ac86ff01dd024b2f9a795c0daeeab60"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72e27a62041cfb37a3de512247ece9f240a561e6c8662276beaf4d53d406db4"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a904f9572092bb6742ab7c16c623f0cdccbad9eeb2d14d4aa06284867bddd31"},
+ {file = "orjson-3.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:855c0833999ed5dc62f64552db26f9be767434917d8348d77bacaab84f787d7b"},
+ {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:897830244e2320f6184699f598df7fb9db9f5087d6f3f03666ae89d607e4f8ed"},
+ {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0b32652eaa4a7539f6f04abc6243619c56f8530c53bf9b023e1269df5f7816dd"},
+ {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b4aa31e0f6a1aeeb6f8377769ca5d125db000f05c20e54163aef1d3fe8e833"},
+ {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5535163054d6cbf2796f93e4f0dbc800f61914c0e3c4ed8499cf6ece22b4a3da"},
+ {file = "orjson-3.10.12-cp38-none-win32.whl", hash = "sha256:90a5551f6f5a5fa07010bf3d0b4ca2de21adafbbc0af6cb700b63cd767266cb9"},
+ {file = "orjson-3.10.12-cp38-none-win_amd64.whl", hash = "sha256:703a2fb35a06cdd45adf5d733cf613cbc0cb3ae57643472b16bc22d325b5fb6c"},
+ {file = "orjson-3.10.12-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f29de3ef71a42a5822765def1febfb36e0859d33abf5c2ad240acad5c6a1b78d"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de365a42acc65d74953f05e4772c974dad6c51cfc13c3240899f534d611be967"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a5a0158648a67ff0004cb0df5df7dcc55bfc9ca154d9c01597a23ad54c8d0c"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c47ce6b8d90fe9646a25b6fb52284a14ff215c9595914af63a5933a49972ce36"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0eee4c2c5bfb5c1b47a5db80d2ac7aaa7e938956ae88089f098aff2c0f35d5d8"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d3081bbe8b86587eb5c98a73b97f13d8f9fea685cf91a579beddacc0d10566"},
+ {file = "orjson-3.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c23a6e90383884068bc2dba83d5222c9fcc3b99a0ed2411d38150734236755"},
+ {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5472be7dc3269b4b52acba1433dac239215366f89dc1d8d0e64029abac4e714e"},
+ {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7319cda750fca96ae5973efb31b17d97a5c5225ae0bc79bf5bf84df9e1ec2ab6"},
+ {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:74d5ca5a255bf20b8def6a2b96b1e18ad37b4a122d59b154c458ee9494377f80"},
+ {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ff31d22ecc5fb85ef62c7d4afe8301d10c558d00dd24274d4bbe464380d3cd69"},
+ {file = "orjson-3.10.12-cp39-none-win32.whl", hash = "sha256:c22c3ea6fba91d84fcb4cda30e64aff548fcf0c44c876e681f47d61d24b12e6b"},
+ {file = "orjson-3.10.12-cp39-none-win_amd64.whl", hash = "sha256:be604f60d45ace6b0b33dd990a66b4526f1a7a186ac411c942674625456ca548"},
+ {file = "orjson-3.10.12.tar.gz", hash = "sha256:0a78bbda3aea0f9f079057ee1ee8a1ecf790d4f1af88dd67493c6b8ee52506ff"},
+]
+
[[package]]
name = "packaging"
version = "24.2"
@@ -3506,6 +3612,27 @@ files = [
{file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"},
]
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+description = "Fast, Extensible Progress Meter"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
+ {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"]
+discord = ["requests"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
[[package]]
name = "typer"
version = "0.13.0"
@@ -3989,4 +4116,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.0"
python-versions = ">= 3.9, < 3.12"
-content-hash = "365cbd298623955c424f43db374933ddd86d2b18c592a95cb44d48fd821b100a"
+content-hash = "e812a8e353fe8f309bc25e6eb9b48fcc7ad2b5c95c135ad640b29afd1ad12163"
diff --git a/precog/__init__.py b/precog/__init__.py
index 29e2dca..e51a7ec 100644
--- a/precog/__init__.py
+++ b/precog/__init__.py
@@ -1,5 +1,5 @@
-import json
+import importlib.metadata
-__version__ = "0.0.0"
+__version__ = importlib.metadata.version(__name__ or __package__)
version_split = __version__.split(".")
__spec_version__ = (1000 * int(version_split[0])) + (10 * int(version_split[1])) + (1 * int(version_split[2]))
diff --git a/precog/miners/base_miner.py b/precog/miners/base_miner.py
new file mode 100644
index 0000000..4a2281e
--- /dev/null
+++ b/precog/miners/base_miner.py
@@ -0,0 +1,109 @@
+from datetime import timedelta
+from typing import Tuple
+
+import bittensor as bt
+import pandas as pd
+
+from precog.protocol import Challenge
+from precog.utils.cm_data import CMData
+from precog.utils.timestamp import datetime_to_CM_timestamp, iso8601_to_datetime
+
+
+def get_point_estimate(timestamp: str) -> float:
+ """Make a naive forecast by predicting the most recent price
+
+ Args:
+ timestamp (str): The current timestamp provided by the validator request
+
+ Returns:
+ (float): The current BTC price tied to the provided timestamp
+ """
+ # Create data gathering instance
+ cm = CMData()
+
+ # Set the time range to be as small as possible for query speed
+ # Set the start time as 2 seconds prior to the provided time
+ start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(days=1))
+ end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting
+
+ # Query CM API for a pandas dataframe with only one record
+ price_data: pd.DataFrame = cm.get_CM_ReferenceRate(assets="BTC", start=start_time, end=end_time)
+
+ # Get current price closest to the provided timestamp
+ btc_price: float = float(price_data["ReferenceRateUSD"].iloc[-1])
+
+ # Return the current price of BTC as our point estimate
+ return btc_price
+
+
+def get_prediction_interval(timestamp: str, point_estimate: float) -> Tuple[float, float]:
+ """Make a naive multi-step prediction interval by estimating
+ the sample standard deviation
+
+ Args:
+ timestamp (str): The current timestamp provided by the validator request
+ point_estimate (float): The center of the prediction interval
+
+ Returns:
+ (float): The 90% naive prediction interval lower bound
+ (float): The 90% naive prediction interval upper bound
+
+ Notes:
+ Make reasonable assumptions that the 1s BTC price residuals are
+ uncorrelated and normally distributed
+ """
+ # Create data gathering instance
+ cm = CMData()
+
+ # Set the time range to be 24 hours
+ start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(days=1))
+ end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting
+
+ # Query CM API for sample standard deviation of the 1s residuals
+ historical_price_data: pd.DataFrame = cm.get_CM_ReferenceRate(
+ assets="BTC", start=start_time, end=end_time, frequency="1s"
+ )
+ residuals: pd.Series = historical_price_data["ReferenceRateUSD"].diff()
+ sample_std_dev: float = float(residuals.std())
+
+ # We have the standard deviation of the 1s residuals
+ # We are forecasting forward 5m, which is 300s
+ # We must scale the 1s sample standard deviation to reflect a 300s forecast
+ # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed
+ # To do this naively, we multiply the std dev by the square root of the number of time steps
+ time_steps: int = 300
+ naive_forecast_std_dev: float = sample_std_dev * (time_steps**0.5)
+
+ # For a 90% prediction interval, we use the coefficient 1.64
+ # Make reasonable assumptions that the 1s residuals are uncorrelated and normally distributed
+ coefficient: float = 1.64
+
+ # Calculate the lower bound and upper bound
+ lower_bound: float = point_estimate - coefficient * naive_forecast_std_dev
+ upper_bound: float = point_estimate + coefficient * naive_forecast_std_dev
+
+ # Return the naive prediction interval for our forecast
+ return lower_bound, upper_bound
+
+
+def forward(synapse: Challenge) -> Challenge:
+ bt.logging.info(
+ f"👈 Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}"
+ )
+
+ # Get the naive point estimate
+ point_estimate: float = get_point_estimate(timestamp=synapse.timestamp)
+
+ # Get the naive prediction interval
+ prediction_interval: Tuple[float, float] = get_prediction_interval(
+ timestamp=synapse.timestamp, point_estimate=point_estimate
+ )
+
+ synapse.prediction = point_estimate
+ synapse.interval = prediction_interval
+
+ if synapse.prediction is not None:
+ bt.logging.success(f"Predicted price: {synapse.prediction} | Predicted Interval: {synapse.interval}")
+ else:
+ bt.logging.info("No prediction for this request.")
+ return synapse
diff --git a/precog/miners/forward_bad.py b/precog/miners/forward_bad.py
deleted file mode 100644
index 831c62c..0000000
--- a/precog/miners/forward_bad.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import bittensor as bt
-
-from precog.protocol import Challenge
-
-
-def forward(synapse: Challenge) -> Challenge:
- """
- Optimized forward function for low latency and caching.
- """
- bt.logging.info(
- f"👈 Received prediction request from: {synapse.dendrite.hotkey} for timestamp: {synapse.timestamp}"
- )
- middle_intervals = [
- [0, 30],
- [5, 7],
- [2, 20],
- [3, 12],
- [1, 15],
- [0, 30],
- [5, 20],
- [7, 10],
- [1, 12],
- [9, 12],
- [4, 13],
- [0, 30],
- ]
- synapse.prediction = [30]
- synapse.interval = middle_intervals[0]
-
- return synapse
diff --git a/precog/miners/miner.py b/precog/miners/miner.py
index 535c073..a603728 100644
--- a/precog/miners/miner.py
+++ b/precog/miners/miner.py
@@ -22,7 +22,7 @@ class Miner:
def __init__(self, config=None):
args = parse_arguments()
config = Config(args)
- self.forward_module = importlib.import_module(f"precog.miners.{config.forward_function}", package="forward")
+ self.forward_module = importlib.import_module(f"precog.miners.{config.forward_function}")
self.config = config
self.config.neuron.type = "Miner"
setup_bittensor_objects(self)
diff --git a/precog/protocol.py b/precog/protocol.py
index e90e4ab..7fd3e45 100644
--- a/precog/protocol.py
+++ b/precog/protocol.py
@@ -35,7 +35,7 @@ class Challenge(bt.Synapse):
)
# Optional request output, filled by recieving axon.
- prediction: Optional[List[float]] = pydantic.Field(
+ prediction: Optional[float] = pydantic.Field(
default=None,
title="Predictions",
description="The predictions to send to the dendrite caller",
diff --git a/precog/utils/bittensor.py b/precog/utils/bittensor.py
index 7903a33..890e390 100644
--- a/precog/utils/bittensor.py
+++ b/precog/utils/bittensor.py
@@ -13,9 +13,9 @@ def setup_bittensor_objects(self):
# if chain endpoint is set, overwrite network arg
self.config.subtensor.network = self.config.subtensor.chain_endpoint
# Initialize subtensor.
- self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.network)
+ self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint)
self.metagraph = self.subtensor.metagraph(self.config.netuid)
- self.wallet = bt.wallet(name=self.config.wallet.name, hotkey=self.config.wallet.hotkey)
+ self.wallet = bt.wallet(config=self.config)
self.dendrite = bt.dendrite(wallet=self.wallet)
self.axon = bt.axon(wallet=self.wallet, config=self.config, port=self.config.axon.port)
# Connect the validator to the network.
@@ -46,7 +46,7 @@ def setup_bittensor_objects(self):
def print_info(self) -> None:
if self.config.neuron.type == "Validator":
- weight_timing = self.set_weights_rate - self.blocks_since_last_update
+ weight_timing = self.hyperparameters.weights_rate_limit - self.blocks_since_last_update
if weight_timing <= 0:
weight_timing = "a few" # hashtag aesthetic af
log = (
diff --git a/precog/utils/cm_data.py b/precog/utils/cm_data.py
index 8ea492f..da239ba 100644
--- a/precog/utils/cm_data.py
+++ b/precog/utils/cm_data.py
@@ -1,3 +1,6 @@
+from datetime import date, datetime
+from typing import Optional, Union
+
import pandas as pd
from coinmetrics.api_client import CoinMetricsClient
@@ -15,31 +18,141 @@ def api_key(self):
def client(self):
return self._client
- def get_pair_candles(self, pairs: list, page_size: int = 10000, **kwargs) -> pd.DataFrame:
+ def get_CM_ReferenceRate(
+ self,
+ assets: Union[list, str],
+ start: Optional[Union[datetime, date, str]] = None,
+ end: Optional[Union[datetime, date, str]] = None,
+ end_inclusive: bool = True,
+ frequency: str = "1s",
+ page_size: int = 10000,
+ parallelize: bool = False,
+ time_inc_parallel: pd.Timedelta = pd.Timedelta("1h"),
+ **kwargs,
+ ) -> pd.DataFrame:
+ """Fetches CM Reference Rate for specific asset ticker or list of tickers from CoinMetrics Python client.
+
+ Args:
+ assets (Union[list, str]): Asset ticker or list of tickers to retrieve CM Reference Rates for
+ start (Optional[Union[datetime, date, str]], optional): Start time of data, if None will return earliest available. Defaults to None.
+ end (Optional[Union[datetime, date, str]], optional): End time of data, if None will return earliest available. Defaults to None.
+ end_inclusive (bool, optional): Whether to include a data point occuring at the "end" time. Defaults to True.
+ frequency (str, optional): Frequency of prices - '200ms', '1s', '1m', '1m', '1d'. Defaults to "1s".
+ page_size (int, optional): Page size of return, recommended 10000. Defaults to 10000.
+ parallelize (bool, optional): Whether to parallelize query into multiple queries.
+ Can speed up retrieval but may go over usage limits. Defaults to False.
+ time_inc_parallel (pd.Timedelta, optional): If using parallelize, time interval queried by each thread. Defaults to pd.Timedelta("1h").
+
+ Returns:
+ pd.DataFrame: Reference Rate of assets over time, with columns
+ ['asset', 'time', 'ReferenceRateUSD']
+
+ Notes:
+ CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_pair_candles
+ """
+
+ reference_rate = self.client.get_asset_metrics(
+ assets,
+ metrics="ReferenceRateUSD",
+ start_time=start,
+ end_time=end,
+ end_inclusive=end_inclusive,
+ frequency=frequency,
+ page_size=page_size,
+ **kwargs,
+ )
+
+ if parallelize:
+ reference_rate_df = reference_rate.parallel(time_increment=time_inc_parallel).to_dataframe()
+ else:
+ reference_rate_df = reference_rate.to_dataframe()
+
+ reference_rate_df = reference_rate_df.sort_values("time").reset_index(drop=True)
+ return reference_rate_df
+
+ def get_pair_candles(
+ self,
+ pairs: Union[list, str],
+ start: Optional[Union[datetime, date, str]] = None,
+ end: Optional[Union[datetime, date, str]] = None,
+ end_inclusive: bool = True,
+ frequency: str = "1h",
+ page_size: int = 10000,
+ parallelize: bool = False,
+ time_inc_parallel: pd.Timedelta = pd.Timedelta("1d"),
+ **kwargs,
+ ) -> pd.DataFrame:
"""Fetches candles for specific asset pairs from CoinMetrics Python client.
+ Note 'pair' must be in format {base}-{quote} (ie. pair='btc-usd')
Returns:
- DataFrame: Available pair candles
+ DataFrame: Available pair candles with columns:
+ ['pair', 'time', 'price_open', 'price_close', 'price_high', 'price_low']
Notes:
CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_pair_candles
"""
- pair_candles = self.client.get_pair_candles(pairs, page_size, **kwargs)
- return pair_candles.to_dataframe()
+ pair_candles = self.client.get_pair_candles(
+ pairs,
+ start_time=start,
+ end_time=end,
+ end_inclusive=end_inclusive,
+ frequency=frequency,
+ page_size=page_size,
+ **kwargs,
+ )
+
+ if parallelize:
+ pair_candles_df = pair_candles.parallel(time_increment=time_inc_parallel).to_dataframe()
+ else:
+ pair_candles_df = pair_candles.to_dataframe()
+
+ pair_candles_df = pair_candles_df.sort_values("time").reset_index(drop=True)
+ return pair_candles_df
+
+ def get_open_interest_catalog(self, base: str = "btc", quote: str = "usd", market_type: str = "future", **kwargs):
+ """Returns the CM Catalog for active markets by base asset, quote asset, and type ('spot', 'option', or 'future')
+
+ Args:
+ base (str, optional): Base Asset of Market. Defaults to "btc".
+ quote (str, optional): Quote Asset of Market. Defaults to "usd".
+ market_type (str, optional): Market type ('spot', 'option', 'future'). Defaults to "spot".
+
+ Returns:
+ catalog (pd.DataFrame): Dataframe containing active markets with columns
+ ['market', 'min_time', 'max_time']
+ """
+ catalog = self.client.catalog_market_open_interest_v2(
+ base=base, quote=quote, market_type=market_type, page_size=10000, paging_from="end"
+ ).to_dataframe()
+
+ return catalog
- def get_market_open_interest(self, markets: list, page_size: int = 10000, **kwargs) -> pd.DataFrame:
+ def get_market_open_interest(
+ self, markets: list, page_size: int = 10000, parallelize=False, **kwargs
+ ) -> pd.DataFrame:
"""Fetches available market open interest from CoinMetrics Python client.
+ Possible markets can be obtained from the get_open_interest_catalog() method
+
+ Args:
+ markets (list): List of derivatives markets to get the Open Interest for.
+ Note there is a character limit to the query, so may need to be done in chunks for a long list
Returns:
- DataFrame: Available market open interest
+ DataFrame: Open Interest of unsettled derivatives contracts. Columns are:
+ [market, time, contract_count, value_usd, database_time, exchange_time]
Notes:
CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_market_open_interest
"""
- market_open_interest = self.client.get_market_open_interest(markets, page_size, **kwargs)
- return market_open_interest.to_dataframe()
+ market_open_interest = self.client.get_market_open_interest(markets, page_size=page_size, **kwargs)
+
+ if parallelize:
+ return market_open_interest.parallel().to_dataframe()
+ else:
+ return market_open_interest.to_dataframe()
def get_market_funding_rates(self, markets: list, page_size: int = 10000, **kwargs) -> pd.DataFrame:
"""Fetches available market funding rates from CoinMetrics Python client.
@@ -51,7 +164,7 @@ def get_market_funding_rates(self, markets: list, page_size: int = 10000, **kwar
CM API Reference: https://coinmetrics.github.io/api-client-python/site/api_client.html#get_market_funding_rates
"""
- market_funding_rates = self.client.get_market_funding_rates(markets, page_size, **kwargs)
+ market_funding_rates = self.client.get_market_funding_rates(markets, page_size=page_size, **kwargs)
return market_funding_rates.to_dataframe()
# def get_time_reference_rate(self):
diff --git a/precog/utils/general.py b/precog/utils/general.py
index fbe3d5c..00e7c79 100644
--- a/precog/utils/general.py
+++ b/precog/utils/general.py
@@ -1,10 +1,14 @@
import argparse
+import asyncio
import re
-from typing import Optional
+import time
+from typing import Any, Callable, Optional
import bittensor as bt
import git
import requests
+from numpy import argsort, array, concatenate, cumsum, empty_like
+from pandas import DataFrame
from precog.utils.classes import NestedNamespace
@@ -84,3 +88,61 @@ def get_version() -> Optional[str]:
raise Exception("Version information not found")
return version_match.group()
+
+
+def rank(vector):
+ if vector is None or len(vector) <= 1:
+ return array([0])
+ else:
+ # Sort the array and get the indices that would sort it
+ sorted_indices = argsort(vector)
+ sorted_vector = vector[sorted_indices]
+ # Create a mask for where each new unique value starts in the sorted array
+ unique_mask = concatenate(([True], sorted_vector[1:] != sorted_vector[:-1]))
+ # Use cumulative sum of the unique mask to get the ranks, then assign back in original order
+ ranks = cumsum(unique_mask) - 1
+ rank_vector = empty_like(vector, dtype=int)
+ rank_vector[sorted_indices] = ranks
+ return rank_vector
+
+
+async def loop_handler(self, func: Callable, sleep_time: float = 120):
+ try:
+ while not self.stop_event.is_set():
+ async with self.lock:
+ await func()
+ await asyncio.sleep(sleep_time)
+ except asyncio.CancelledError:
+ bt.logging.error(f"{func.__name__} cancelled")
+ raise
+ except KeyboardInterrupt:
+ raise
+ except Exception as e:
+ bt.logging.error(f"{func.__name__} raised error: {e}")
+ raise e
+ finally:
+ async with self.lock:
+ self.stop_event.set()
+
+
+def func_with_retry(func: Callable, max_attempts: int = 3, delay: float = 1, *args, **kwargs) -> Any:
+ attempt = 0
+ while attempt < max_attempts:
+ try:
+ result = func(*args, **kwargs)
+ return result
+ except Exception as e:
+ attempt += 1
+ bt.logging.debug(f"Function {func} failed: Attempt {attempt} of {max_attempts} with error: {e}")
+ if attempt == max_attempts:
+ bt.logging.error(f"Function {func} failed {max_attempts} times, skipping.")
+ raise
+ else:
+ time.sleep(delay)
+
+
+def pd_to_dict(data: DataFrame) -> dict:
+ price_dict = {}
+ for i in range(len(data)):
+ price_dict[data.time[i].to_pydatetime()] = data.iloc[i]["ReferenceRateUSD"].item()
+ return price_dict
diff --git a/precog/utils/timestamp.py b/precog/utils/timestamp.py
index 16d382d..f253601 100644
--- a/precog/utils/timestamp.py
+++ b/precog/utils/timestamp.py
@@ -21,12 +21,12 @@ def get_now() -> datetime:
return datetime.now(get_timezone())
-def get_before(minutes: int = 5) -> datetime:
+def get_before(minutes: int = 5, seconds: int = 0) -> datetime:
"""
Get the datetime x minutes before now
"""
now = get_now()
- return now - timedelta(minutes=minutes)
+ return now - timedelta(minutes=minutes, seconds=seconds)
def get_midnight() -> datetime:
@@ -83,6 +83,13 @@ def posix_to_datetime(timestamp: float) -> datetime:
return datetime.fromtimestamp(timestamp, tz=get_timezone())
+def datetime_to_CM_timestamp(timestamp: datetime) -> str:
+ """
+ Convert iso 8601 string to coinmetrics timestamp
+ """
+ return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+
+
###############################
# FUNCTIONS #
###############################
@@ -150,13 +157,12 @@ def is_query_time(prediction_interval: int, timestamp: str, tolerance: int = 120
return beginning_of_epoch
-def align_timepoints(filtered_pred_dict, cm_data, cm_timestamps):
+def align_timepoints(filtered_pred_dict, cm_dict):
"""Takes in a dictionary of predictions and aligns them to a list of coinmetrics prices.
Args:
filtered_pred_dict (dict): {datetime: float} dictionary of predictions.
- cm_data (List[float]): price data from coinmetrics corresponding to the datetimes in cm_timestamps.
- cm_timestamps (List[datetime]): timestamps corresponding to the values in cm_data.
+ cm_data (dict): {datetime: float} dictionary of prices.
Returns:
@@ -164,23 +170,14 @@ def align_timepoints(filtered_pred_dict, cm_data, cm_timestamps):
aligned_cm_data (List[float]): The values in cm_data where cm_timestamps matches the timestamps in filtered_pred_dict.
aligned_timestamps (List[datetime]): The timestamps corresponding to the values in aligned_pred_values and aligned_cm_data.
"""
- if len(cm_data) != len(cm_timestamps):
- raise ValueError("cm_data and cm_timepoints must be of the same length.")
-
aligned_pred_values = []
aligned_cm_data = []
aligned_timestamps = []
-
- # Convert cm_timepoints to a set for faster lookup
- cm_timestamps_set = set(cm_timestamps)
- # Loop through filtered_pred_dict to find matching datetime keys
- for timestamp, pred_value in filtered_pred_dict.items():
- if timestamp in cm_timestamps_set:
- # Find the index in cm_timepoints to get corresponding cm_data
- index = cm_timestamps.index(timestamp)
- aligned_pred_values.append(pred_value)
+ for timestamp, value in filtered_pred_dict.items():
+ if timestamp in cm_dict:
+ aligned_pred_values.append(value)
aligned_timestamps.append(timestamp)
- aligned_cm_data.append(cm_data[index])
+ aligned_cm_data.append(cm_dict[timestamp])
return aligned_pred_values, aligned_cm_data, aligned_timestamps
diff --git a/precog/validators/reward.py b/precog/validators/reward.py
index 22e14c8..e63e928 100644
--- a/precog/validators/reward.py
+++ b/precog/validators/reward.py
@@ -3,9 +3,12 @@
import bittensor as bt
import numpy as np
+from pandas import DataFrame
from precog.protocol import Challenge
-from precog.utils.timestamp import align_timepoints, get_now, mature_dictionary, round_minute_down
+from precog.utils.cm_data import CMData
+from precog.utils.general import pd_to_dict, rank
+from precog.utils.timestamp import align_timepoints, datetime_to_CM_timestamp, iso8601_to_datetime, mature_dictionary
################################################################################
@@ -19,21 +22,24 @@ def calc_rewards(
decay = 0.9
weights = np.linspace(0, len(self.available_uids) - 1, len(self.available_uids))
decayed_weights = decay**weights
- # cm_prices, cm_timestamps = get_cm_prices() # fake placeholder to get the past hours prices
- cm_prices = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
- cm_timestamps = [
- round_minute_down(get_now()) - timedelta(minutes=(i + 1) * 5) for i in range(12)
- ] # placeholder to align cm price timepoints to the timestamps in history
- cm_timestamps.reverse()
+ timestamp = responses[0].timestamp
+ cm = CMData()
+ start_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp) - timedelta(hours=1))
+ end_time: str = datetime_to_CM_timestamp(iso8601_to_datetime(timestamp)) # built-ins handle CM API's formatting
+ # Query CM API for sample standard deviation of the 1s residuals
+ historical_price_data: DataFrame = cm.get_CM_ReferenceRate(
+ assets="BTC", start=start_time, end=end_time, frequency="1s"
+ )
+ cm_data = pd_to_dict(historical_price_data)
for uid, response in zip(self.available_uids, responses):
current_miner = self.MinerHistory[uid]
self.MinerHistory[uid].add_prediction(response.timestamp, response.prediction, response.interval)
prediction_dict, interval_dict = current_miner.format_predictions(response.timestamp)
mature_time_dict = mature_dictionary(prediction_dict)
- preds, price, aligned_pred_timestamps = align_timepoints(mature_time_dict, cm_prices, cm_timestamps)
+ preds, price, aligned_pred_timestamps = align_timepoints(mature_time_dict, cm_data)
for i, j, k in zip(preds, price, aligned_pred_timestamps):
bt.logging.debug(f"Prediction: {i} | Price: {j} | Aligned Prediction: {k}")
- inters, interval_prices, aligned_int_timestamps = align_timepoints(interval_dict, cm_prices, cm_timestamps)
+ inters, interval_prices, aligned_int_timestamps = align_timepoints(interval_dict, cm_data)
for i, j, k in zip(inters, interval_prices, aligned_int_timestamps):
bt.logging.debug(f"Interval: {i} | Interval Price: {j} | Aligned TS: {k}")
point_errors.append(point_error(preds, price))
@@ -49,22 +55,6 @@ def calc_rewards(
return rewards
-def rank(vector):
- if vector is None or len(vector) <= 1:
- return np.array([0])
- else:
- # Sort the array and get the indices that would sort it
- sorted_indices = np.argsort(vector)
- sorted_vector = vector[sorted_indices]
- # Create a mask for where each new unique value starts in the sorted array
- unique_mask = np.concatenate(([True], sorted_vector[1:] != sorted_vector[:-1]))
- # Use cumulative sum of the unique mask to get the ranks, then assign back in original order
- ranks = np.cumsum(unique_mask) - 1
- rank_vector = np.empty_like(vector, dtype=int)
- rank_vector[sorted_indices] = ranks
- return rank_vector
-
-
def interval_error(intervals, cm_prices):
if intervals is None:
return np.array([0])
diff --git a/precog/validators/validator.py b/precog/validators/validator.py
index d09c2c2..b8bf9c3 100755
--- a/precog/validators/validator.py
+++ b/precog/validators/validator.py
@@ -1,8 +1,6 @@
import asyncio
from pathlib import Path
-import bittensor as bt
-
from precog.utils.classes import Config
from precog.utils.general import parse_arguments
from precog.validators.weight_setter import weight_setter
@@ -22,15 +20,6 @@ def __init__(self):
async def main(self):
loop = asyncio.get_event_loop()
self.weight_setter = weight_setter(config=self.config, loop=loop)
- try:
- loop.run_forever()
- except BrokenPipeError:
- bt.logging.error("Recieved a Broken Pipe substrate error")
- asyncio.run(self.reset_instance())
- except Exception as e:
- bt.logging.error(f"Unhandled exception: {e}")
- finally:
- bt.logging.info("Exiting Validator")
async def reset_instance(self):
self.__init__()
diff --git a/precog/validators/weight_setter.py b/precog/validators/weight_setter.py
index 7fcd768..9f99398 100755
--- a/precog/validators/weight_setter.py
+++ b/precog/validators/weight_setter.py
@@ -3,15 +3,23 @@
import pickle
import bittensor as bt
+import websocket
from numpy import array
from pytz import timezone
-from substrateinterface import SubstrateInterface
from precog import __spec_version__
from precog.protocol import Challenge
from precog.utils.bittensor import check_uid_availability, print_info, setup_bittensor_objects
from precog.utils.classes import MinerHistory
-from precog.utils.timestamp import elapsed_seconds, get_before, get_now, is_query_time, iso8601_to_datetime
+from precog.utils.general import func_with_retry, loop_handler
+from precog.utils.timestamp import (
+ datetime_to_iso8601,
+ elapsed_seconds,
+ get_before,
+ get_now,
+ is_query_time,
+ iso8601_to_datetime,
+)
from precog.utils.wandb import log_wandb, setup_wandb
from precog.validators.reward import calc_rewards
@@ -23,11 +31,10 @@ def __init__(self, config=None, loop=None):
self.lock = asyncio.Lock()
setup_bittensor_objects(self)
self.timezone = timezone("UTC")
- self.prediction_interval = self.config.prediction_interval # in minutes
+ self.prediction_interval = self.config.prediction_interval # in seconds
self.N_TIMEPOINTS = self.config.N_TIMEPOINTS # number of timepoints to predict
- self.last_sync = 0
- self.set_weights_rate = 100 # in blocks
- self.resync_metagraph_rate = 20 # in blocks
+ self.hyperparameters = func_with_retry(self.subtensor.get_subnet_hyperparameters, netuid=self.config.netuid)
+ self.resync_metagraph_rate = 600 # in seconds
bt.logging.info(
f"Running validator for subnet: {self.config.netuid} on network: {self.config.subtensor.network}"
)
@@ -40,21 +47,29 @@ def __init__(self, config=None, loop=None):
self.save_state()
else:
self.load_state()
- self.node = SubstrateInterface(url=self.config.subtensor.chain_endpoint)
self.current_block = self.subtensor.get_current_block()
- self.blocks_since_last_update = (
- self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid]
+ self.blocks_since_last_update = self.subtensor.blocks_since_last_update(
+ netuid=self.config.netuid, uid=self.my_uid
)
- self.tempo = self.node_query("SubtensorModule", "Tempo", [self.config.netuid])
if self.config.wandb_on:
setup_wandb(self)
self.stop_event = asyncio.Event()
bt.logging.info("Setup complete, starting loop")
self.loop.create_task(
- self.loop_handler(self.scheduled_prediction_request, sleep_time=self.config.print_cadence)
+ loop_handler(self, self.scheduled_prediction_request, sleep_time=self.config.print_cadence)
)
- self.loop.create_task(self.loop_handler(self.resync_metagraph, sleep_time=self.resync_metagraph_rate))
- self.loop.create_task(self.loop_handler(self.set_weights, sleep_time=self.set_weights_rate))
+ self.loop.create_task(loop_handler(self, self.resync_metagraph, sleep_time=self.resync_metagraph_rate))
+ self.loop.create_task(loop_handler(self, self.set_weights, sleep_time=self.hyperparameters.weights_rate_limit))
+ try:
+ self.loop.run_forever()
+ except websocket._exceptions.WebSocketConnectionClosedException:
+ # TODO: Exceptions are not being caught in this loop
+ bt.logging.info("Caught websocket connection closed exception")
+ self.__reset_instance__()
+ except Exception as e:
+ bt.logging.error(f"Error on loop: {e}")
+ finally:
+ self.__exit__(None, None, None)
def __exit__(self, exc_type, exc_value, traceback):
self.save_state()
@@ -62,26 +77,15 @@ def __exit__(self, exc_type, exc_value, traceback):
pending = asyncio.all_tasks(self.loop)
for task in pending:
task.cancel()
- asyncio.gather(*pending)
except Exception as e:
bt.logging.error(f"Error on __exit__ function: {e}")
- self.loop.stop()
-
- async def loop_handler(self, func, sleep_time=120):
- try:
- while not self.stop_event.is_set():
- await func()
- await asyncio.sleep(sleep_time)
- except asyncio.exceptions.CancelledError:
- raise
- except KeyboardInterrupt:
- raise
- except Exception:
- raise
finally:
- async with self.lock:
- self.stop_event.set()
- self.__exit__(None, None, None)
+ asyncio.gather(*pending, return_exceptions=True)
+ self.loop.stop()
+
+ def __reset_instance__(self):
+ self.__exit__(None, None, None)
+ self.__init__(self.config, self.loop)
async def get_available_uids(self):
miner_uids = []
@@ -91,28 +95,25 @@ async def get_available_uids(self):
miner_uids.append(uid)
return miner_uids
- async def resync_metagraph(self, force=False):
+ async def resync_metagraph(self):
"""Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph."""
- async with self.lock:
- self.blocks_since_sync = self.current_block - self.last_sync
- if self.blocks_since_sync >= self.resync_metagraph_rate or force:
- bt.logging.info("Syncing Metagraph...")
- self.metagraph.sync(subtensor=self.subtensor)
- bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages")
- # Zero out all hotkeys that have been replaced.
- self.available_uids = asyncio.run(self.get_available_uids())
- for uid, hotkey in enumerate(self.metagraph.hotkeys):
- if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey:
- bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}")
- self.hotkeys[uid] = hotkey
- self.scores[uid] = 0 # hotkey has been replaced
- self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone)
- self.moving_average_scores[uid] = 0
- self.last_sync = self.subtensor.get_current_block()
- self.save_state()
+ self.subtensor = bt.subtensor(config=self.config, network=self.config.subtensor.chain_endpoint)
+ bt.logging.info("Syncing Metagraph...")
+ self.metagraph.sync(subtensor=self.subtensor)
+ bt.logging.info("Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages")
+ # Zero out all hotkeys that have been replaced.
+ self.available_uids = asyncio.run(self.get_available_uids())
+ for uid, hotkey in enumerate(self.metagraph.hotkeys):
+ if (uid not in self.MinerHistory and uid in self.available_uids) or self.hotkeys[uid] != hotkey:
+ bt.logging.info(f"Replacing hotkey on {uid} with {self.metagraph.hotkeys[uid]}")
+ self.hotkeys[uid] = hotkey
+ self.scores[uid] = 0 # hotkey has been replaced
+ self.MinerHistory[uid] = MinerHistory(uid, timezone=self.timezone)
+ self.moving_average_scores[uid] = 0
+ self.save_state()
def query_miners(self):
- timestamp = get_now().isoformat()
+ timestamp = datetime_to_iso8601(get_now())
synapse = Challenge(timestamp=timestamp)
responses = self.dendrite.query(
# Send the query to selected miner axons in the network.
@@ -122,20 +123,17 @@ def query_miners(self):
)
return responses, timestamp
- def node_query(self, module, method, params):
- try:
- result = self.node.query(module, method, params).value
- except Exception:
- # reinitilize node
- self.node = SubstrateInterface(url=self.subtensor.chain_endpoint)
- result = self.node.query(module, method, params).value
- return result
-
async def set_weights(self):
- if self.blocks_since_last_update >= self.set_weights_rate:
- async with self.lock:
- uids = array(self.available_uids)
- weights = [self.moving_average_scores[uid] for uid in self.available_uids]
+ try:
+ self.blocks_since_last_update = func_with_retry(
+ self.subtensor.blocks_since_last_update, netuid=self.config.netuid, uid=self.my_uid
+ )
+ self.current_block = func_with_retry(self.subtensor.get_current_block)
+ except Exception as e:
+ bt.logging.error(f"Failed to get current block with error {e}, skipping block update")
+ if self.blocks_since_last_update >= self.hyperparameters.weights_rate_limit:
+ uids = array(self.available_uids)
+ weights = [self.moving_average_scores[uid] for uid in self.available_uids]
for i, j in zip(weights, self.available_uids):
bt.logging.debug(f"UID: {j} | Weight: {i}")
if sum(weights) == 0:
@@ -152,25 +150,20 @@ async def set_weights(self):
uids=uint_uids,
weights=uint_weights,
wait_for_inclusion=True,
- wait_for_finalization=True,
version_key=__spec_version__,
)
if result:
bt.logging.success("✅ Set Weights on chain successfully!")
+ self.blocks_since_last_update = 0
else:
bt.logging.debug(
"Failed to set weights this iteration with message:",
msg,
)
- async with self.lock:
- self.current_block = self.subtensor.get_current_block()
- self.blocks_since_last_update = (
- self.current_block - self.node_query("SubtensorModule", "LastUpdate", [self.config.netuid])[self.my_uid]
- )
async def scheduled_prediction_request(self):
if not hasattr(self, "timestamp"):
- self.timestamp = get_before(minutes=self.prediction_interval).isoformat()
+ self.timestamp = datetime_to_iso8601(get_before(minutes=self.prediction_interval))
query_lag = elapsed_seconds(get_now(), iso8601_to_datetime(self.timestamp))
if len(self.available_uids) == 0:
bt.logging.info("No miners available. Sleeping for 10 minutes...")
@@ -184,12 +177,11 @@ async def scheduled_prediction_request(self):
except Exception as e:
bt.logging.error(f"Failed to calculate rewards with error: {e}")
# Adjust the scores based on responses from miners and update moving average.
- async with self.lock:
- for i, value in zip(self.available_uids, rewards):
- self.moving_average_scores[i] = (1 - self.config.alpha) * self.moving_average_scores[
- i
- ] + self.config.alpha * value
- self.scores = list(self.moving_average_scores.values())
+ for i, value in zip(self.available_uids, rewards):
+ self.moving_average_scores[i] = (1 - self.config.alpha) * self.moving_average_scores[
+ i
+ ] + self.config.alpha * value
+ self.scores = list(self.moving_average_scores.values())
if self.config.wandb_on:
log_wandb(responses, rewards, self.available_uids)
else:
diff --git a/pyproject.toml b/pyproject.toml
index 58c07c7..c4943cf 100755
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,7 +23,7 @@ pydantic = "^2.3.0"
requests = "^2.32.3"
# Subnet Specific Dependencies
-# coinmetrics-api-client = "^2024.10.31.17"
+coinmetrics-api-client = "^2024.11.21.20"
pytz = "^2024.2"
pandas = "^2.2.3"
gitpython = "^3.1.43"
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/test_package.py b/tests/test_package.py
new file mode 100644
index 0000000..e3e55f8
--- /dev/null
+++ b/tests/test_package.py
@@ -0,0 +1,14 @@
+import unittest
+
+from precog import __version__
+
+
+class TestPackage(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def test_package_version(self):
+ # Check that version is as expected
+ # Must update to increment package version successfully
+ self.assertEqual(__version__, "0.1.0")