Skip to content

Commit

Permalink
Merge pull request #2548 from opentensor/release/8.5.1
Browse files Browse the repository at this point in the history
Release/8.5.1
  • Loading branch information
ibraheem-opentensor authored Dec 17, 2024
2 parents f3c68ef + 4c74af0 commit cd2ccc2
Show file tree
Hide file tree
Showing 12 changed files with 80 additions and 103 deletions.
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## 8.5.1 /2024-12-16

## What's Changed
* 8.5.0 bugfixes by @thewhaleking in https://github.com/opentensor/bittensor/pull/2541
* Removes substrate call in format_error_message by @thewhaleking in https://github.com/opentensor/bittensor/pull/2542
* Remove torch from the weights calls by @thewhaleking in https://github.com/opentensor/bittensor/pull/2543
* optional arg fix by @thewhaleking in https://github.com/opentensor/bittensor/pull/2544
* async cr3 not implemented by @thewhaleking in https://github.com/opentensor/bittensor/pull/2545
* Backmerge master to staging 851 by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/2546
* Adds retry in CRv3 by @ibraheem-opentensor in https://github.com/opentensor/bittensor/pull/2547

**Full Changelog**: https://github.com/opentensor/bittensor/compare/v8.5.0...v8.5.1

## 8.5.0 /2024-12-12

## What's Changed
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8.5.0
8.5.1
6 changes: 4 additions & 2 deletions bittensor/core/async_subtensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1596,9 +1596,11 @@ async def set_weights(
This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】.
"""
if self.commit_reveal_enabled(netuid=netuid) is True:
if (await self.commit_reveal_enabled(netuid=netuid)) is True:
# go with `commit reveal v3` extrinsic
raise NotImplemented("Not implemented yet for AsyncSubtensor. Coming soon.")
raise NotImplementedError(
"Not implemented yet for AsyncSubtensor. Coming soon."
)
else:
# go with classic `set weights extrinsic`
uid = await self.get_uid_for_hotkey_on_subnet(
Expand Down
15 changes: 11 additions & 4 deletions bittensor/core/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,15 @@

from __future__ import annotations

from bittensor.core.synapse import Synapse
from typing import Optional, TYPE_CHECKING

from substrateinterface.exceptions import SubstrateRequestException

class ChainError(BaseException):
if TYPE_CHECKING:
from bittensor.core.synapse import Synapse


class ChainError(SubstrateRequestException):
"""Base error for any chain related errors."""


Expand Down Expand Up @@ -81,7 +86,9 @@ class InvalidRequestNameError(Exception):


class SynapseException(Exception):
def __init__(self, message="Synapse Exception", synapse: "Synapse" | None = None):
def __init__(
self, message="Synapse Exception", synapse: Optional["Synapse"] = None
):
self.message = message
self.synapse = synapse
super().__init__(self.message)
Expand Down Expand Up @@ -123,7 +130,7 @@ class SynapseDendriteNoneException(SynapseException):
def __init__(
self,
message="Synapse Dendrite is None",
synapse: "Synapse" | None = None,
synapse: Optional["Synapse"] = None,
):
self.message = message
super().__init__(self.message, synapse)
16 changes: 5 additions & 11 deletions bittensor/core/extrinsics/async_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@
from bittensor.core.settings import version_as_int
from bittensor.utils import format_error_message
from bittensor.utils.btlogging import logging
from bittensor.utils.registration import torch, use_torch

if TYPE_CHECKING:
from bittensor_wallet import Wallet
from bittensor.core.async_subtensor import AsyncSubtensor
from bittensor.utils.registration import torch


async def _do_set_weights(
Expand Down Expand Up @@ -106,16 +106,10 @@ async def set_weights_extrinsic(
success (bool): Flag is ``true`` if extrinsic was finalized or included in the block. If we did not wait for finalization / inclusion, the response is ``true``.
"""
# First convert types.
if use_torch():
if isinstance(uids, list):
uids = torch.tensor(uids, dtype=torch.int64)
if isinstance(weights, list):
weights = torch.tensor(weights, dtype=torch.float32)
else:
if isinstance(uids, list):
uids = np.array(uids, dtype=np.int64)
if isinstance(weights, list):
weights = np.array(weights, dtype=np.float32)
if isinstance(uids, list):
uids = np.array(uids, dtype=np.int64)
if isinstance(weights, list):
weights = np.array(weights, dtype=np.float32)

# Reformat and normalize.
weight_uids, weight_vals = weight_utils.convert_weights_and_uids_for_emit(
Expand Down
20 changes: 6 additions & 14 deletions bittensor/core/extrinsics/commit_reveal.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from bittensor.utils import format_error_message
from bittensor.utils.btlogging import logging
from bittensor.utils.networking import ensure_connected
from bittensor.utils.registration import torch, use_torch
from bittensor.utils.weight_utils import convert_weights_and_uids_for_emit

if TYPE_CHECKING:
from bittensor_wallet import Wallet
from bittensor.core.subtensor import Subtensor
from bittensor.utils.registration import torch


@ensure_connected
Expand Down Expand Up @@ -74,9 +74,7 @@ def _do_commit_reveal_v3(
if response.is_success:
return True, None
else:
return False, format_error_message(
response.error_message, substrate=self.substrate
)
return False, format_error_message(response.error_message)


def commit_reveal_v3_extrinsic(
Expand Down Expand Up @@ -107,16 +105,10 @@ def commit_reveal_v3_extrinsic(
"""
try:
# Convert uids and weights
if use_torch():
if isinstance(uids, list):
uids = torch.tensor(uids, dtype=torch.int64)
if isinstance(weights, list):
weights = torch.tensor(weights, dtype=torch.float32)
else:
if isinstance(uids, list):
uids = np.array(uids, dtype=np.int64)
if isinstance(weights, list):
weights = np.array(weights, dtype=np.float32)
if isinstance(uids, list):
uids = np.array(uids, dtype=np.int64)
if isinstance(weights, list):
weights = np.array(weights, dtype=np.float32)

# Reformat and normalize.
uids, weights = convert_weights_and_uids_for_emit(uids, weights)
Expand Down
2 changes: 1 addition & 1 deletion bittensor/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.

__version__ = "8.5.0"
__version__ = "8.5.1"

import os
import re
Expand Down
45 changes: 29 additions & 16 deletions bittensor/core/subtensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,7 @@ def query_runtime_api(
self,
runtime_api: str,
method: str,
params: Optional[Union[list[int], dict[str, int]]],
params: Optional[Union[list[int], dict[str, int]]] = None,
block: Optional[int] = None,
) -> Optional[str]:
"""
Expand Down Expand Up @@ -956,13 +956,13 @@ def get_neuron_certificate(

@networking.ensure_connected
def neuron_for_uid(
self, uid: Optional[int], netuid: int, block: Optional[int] = None
self, uid: int, netuid: int, block: Optional[int] = None
) -> "NeuronInfo":
"""
Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status.
Args:
uid (Optional[int]): The unique identifier of the neuron.
uid (int): The unique identifier of the neuron.
netuid (int): The unique identifier of the subnet.
block (Optional[int]): The blockchain block number for the query.
Expand Down Expand Up @@ -1814,23 +1814,36 @@ def set_weights(
This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】.
"""
retries = 0
success = False
uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid)

if self.commit_reveal_enabled(netuid=netuid) is True:
# go with `commit reveal v3` extrinsic
return commit_reveal_v3_extrinsic(
subtensor=self,
wallet=wallet,
netuid=netuid,
uids=uids,
weights=weights,
version_key=version_key,
wait_for_inclusion=wait_for_inclusion,
wait_for_finalization=wait_for_finalization,
)
message = "No attempt made. Perhaps it is too soon to commit weights!"
while (
self.blocks_since_last_update(netuid, uid) # type: ignore
> self.weights_rate_limit(netuid) # type: ignore
and retries < max_retries
and success is False
):
logging.info(
f"Committing weights for subnet #{netuid}. Attempt {retries + 1} of {max_retries}."
)
success, message = commit_reveal_v3_extrinsic(
subtensor=self,
wallet=wallet,
netuid=netuid,
uids=uids,
weights=weights,
version_key=version_key,
wait_for_inclusion=wait_for_inclusion,
wait_for_finalization=wait_for_finalization,
)
retries += 1
return success, message
else:
# go with classic `set weights` logic
uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid)
retries = 0
success = False
message = "No attempt made. Perhaps it is too soon to set weights!"
while (
self.blocks_since_last_update(netuid, uid) # type: ignore
Expand Down
4 changes: 3 additions & 1 deletion bittensor/utils/axon_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@
NANOSECONDS_IN_SECOND = 1_000_000_000


def allowed_nonce_window_ns(current_time_ns: int, synapse_timeout: Optional[float]):
def allowed_nonce_window_ns(
current_time_ns: int, synapse_timeout: Optional[float] = None
) -> int:
"""
Calculates the allowed window for a nonce in nanoseconds.
Expand Down
47 changes: 0 additions & 47 deletions tests/unit_tests/extrinsics/test_async_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,53 +278,6 @@ async def test_set_weights_extrinsic_exception(subtensor, mocker):
assert message == "Unexpected error"


@pytest.mark.asyncio
async def test_set_weights_extrinsic_if_use_torch(subtensor, mocker):
"""Tests set_weights_extrinsic when use_torch is True."""
# Preps
fake_wallet = mocker.Mock(autospec=Wallet)
fake_netuid = 1
fake_uids = [1, 2, 3]
fake_weights = [0.1, 0.2, 0.7]

mocked_use_torch = mocker.patch.object(
async_weights, "use_torch", return_value=True
)
mocked_torch_tensor = mocker.patch.object(
async_weights.torch, "tensor", return_value=mocker.Mock()
)

mocked_do_set_weights = mocker.patch.object(
async_weights, "_do_set_weights", return_value=(False, "Test error message")
)
mocked_convert_weights_and_uids_for_emit = mocker.patch.object(
async_weights.weight_utils,
"convert_weights_and_uids_for_emit",
return_value=(mocker.Mock(), mocker.Mock()),
)

# Call
result, message = await async_weights.set_weights_extrinsic(
subtensor=subtensor,
wallet=fake_wallet,
netuid=fake_netuid,
uids=fake_uids,
weights=fake_weights,
wait_for_inclusion=True,
wait_for_finalization=True,
)

# Asserts
mocked_do_set_weights.assert_called_once()
mocked_use_torch.assert_called_once()
mocked_convert_weights_and_uids_for_emit.assert_called()
mocked_torch_tensor.assert_called_with(
fake_weights, dtype=async_weights.torch.float32
)
assert result is False
assert message == "Test error message"


@pytest.mark.asyncio
async def test_do_commit_weights_success(subtensor, mocker):
"""Tests _do_commit_weights when the commit is successful."""
Expand Down
7 changes: 1 addition & 6 deletions tests/unit_tests/extrinsics/test_commit_reveal.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,7 @@ def test_do_commit_reveal_v3_failure_due_to_error(mocker, subtensor):
wait_for_inclusion=True,
wait_for_finalization=True,
)
mocked_format_error_message.assert_called_once_with(
"Mocked error", substrate=subtensor.substrate
)
mocked_format_error_message.assert_called_once_with("Mocked error")
assert result == (False, "Formatted error")


Expand All @@ -163,7 +161,6 @@ def test_commit_reveal_v3_extrinsic_success_with_torch(mocker, subtensor, hyperp
fake_reveal_round = 1

# Mocks
mocker.patch.object(commit_reveal, "use_torch", return_value=True)

mocked_uids = mocker.Mock()
mocked_weights = mocker.Mock()
Expand Down Expand Up @@ -235,7 +232,6 @@ def test_commit_reveal_v3_extrinsic_success_with_numpy(mocker, subtensor, hyperp
fake_uids = np.array([1, 2, 3], dtype=np.int64)
fake_weights = np.array([0.1, 0.2, 0.7], dtype=np.float32)

mocker.patch.object(commit_reveal, "use_torch", return_value=False)
mock_convert = mocker.patch.object(
commit_reveal,
"convert_weights_and_uids_for_emit",
Expand Down Expand Up @@ -284,7 +280,6 @@ def test_commit_reveal_v3_extrinsic_response_false(mocker, subtensor, hyperparam
fake_reveal_round = 1

# Mocks
mocker.patch.object(commit_reveal, "use_torch", return_value=True)
mocker.patch.object(
commit_reveal,
"convert_weights_and_uids_for_emit",
Expand Down
6 changes: 6 additions & 0 deletions tests/unit_tests/test_subtensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -2850,6 +2850,12 @@ def test_set_weights_with_commit_reveal_enabled(subtensor, mocker):
mocked_commit_reveal_v3_extrinsic = mocker.patch.object(
subtensor_module, "commit_reveal_v3_extrinsic"
)
mocked_commit_reveal_v3_extrinsic.return_value = (
True,
"Weights committed successfully",
)
mocker.patch.object(subtensor, "blocks_since_last_update", return_value=181)
mocker.patch.object(subtensor, "weights_rate_limit", return_value=180)

# Call
result = subtensor.set_weights(
Expand Down

0 comments on commit cd2ccc2

Please sign in to comment.