Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
dc85442
Adds py.typed file for mypy to complain less
thewhaleking Feb 4, 2026
17282f6
Merge pull request #265 from opentensor/feat/thewhaleking/mypy
thewhaleking Feb 5, 2026
763fb14
Extend the records attribute for each iteration, otherwise the only r…
thewhaleking Feb 10, 2026
b740788
Adds new method to retrieve all records from query map results
thewhaleking Feb 10, 2026
430defe
Add unit tests
thewhaleking Feb 10, 2026
3f00397
Adds RuntimeCache.blocks_reverse which is just the {block_hash: block…
thewhaleking Feb 12, 2026
989fbce
Optimises the fetching of sync block->block hash and vice versa to en…
thewhaleking Feb 12, 2026
1fd809a
Same logic applied to async
thewhaleking Feb 12, 2026
1020c82
Add/fix tests
thewhaleking Feb 12, 2026
f4f176b
Fix for test
thewhaleking Feb 12, 2026
c3d4f93
Ensure ordereddict
thewhaleking Feb 12, 2026
6ce03d1
Test fixes
thewhaleking Feb 12, 2026
9eab469
Merge pull request #267 from opentensor/feat/thewhaleking/runtime-cac…
thewhaleking Feb 12, 2026
44d5da4
Merge branch 'staging' into feat/thewhaleking/improve-query-map-result
thewhaleking Feb 12, 2026
8f64dbe
Ruff
thewhaleking Feb 12, 2026
9f2d67e
Merge pull request #266 from opentensor/feat/thewhaleking/improve-que…
thewhaleking Feb 13, 2026
aa5f5a2
Use threaded bt-decode
thewhaleking Feb 13, 2026
cf58c58
Corrected
thewhaleking Feb 13, 2026
ca4acd9
Merge pull request #268 from opentensor/feat/thewhaleking/threaded-bt…
thewhaleking Feb 17, 2026
4dcf82b
handle new error fmt
ibraheem-abe Feb 19, 2026
e464b97
update sync_substrate
ibraheem-abe Feb 19, 2026
2901c97
Merge pull request #269 from opentensor/feat/mev-shield-error-handling
ibraheem-abe Feb 20, 2026
8a4fe3f
update changelog
ibraheem-abe Feb 20, 2026
09b7ce4
bump version
ibraheem-abe Feb 20, 2026
baaddf2
remove python 3.9 supp
ibraheem-abe Feb 20, 2026
3eb4eba
update workflows
ibraheem-abe Feb 20, 2026
2b7a5a9
Merge pull request #271 from opentensor/update-remove-python-3.9
ibraheem-abe Feb 20, 2026
63c3caa
update changelog
ibraheem-abe Feb 20, 2026
3696344
Merge pull request #270 from opentensor/changelog/162
ibraheem-abe Feb 20, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/e2e-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ jobs:
os:
- ubuntu-latest
test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }}
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
steps:
- name: Check-out repository
uses: actions/checkout@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit-and-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
fail-fast: false
max-parallel: 5
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]

steps:
- name: Checkout repository
Expand Down
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## 1.6.2 /2025-02-19

## What's Changed
* Typing by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/265
* Cache Improvements by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/267
* improve (async) query map result by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/266
* Use threaded bt-decode by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/268
* Feat: Handle new error message fmt for InBlock source by @ibraheem-abe in https://github.com/opentensor/async-substrate-interface/pull/269
* Update: Remove python 3.9 support by @ibraheem-abe in https://github.com/opentensor/async-substrate-interface/pull/271

**Full Changelog**: https://github.com/opentensor/async-substrate-interface/compare/v1.6.1...v1.6.2

## 1.6.1 /2025-02-03
* RuntimeCache updates by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/260
* fix memory leak by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/261
Expand Down
113 changes: 83 additions & 30 deletions async_substrate_interface/async_substrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from scalecodec.types import (
GenericCall,
GenericExtrinsic,
GenericRuntimeCallDefinition,
ss58_encode,
MultiAccountId,
)
Expand Down Expand Up @@ -74,12 +73,10 @@
_bt_decode_to_dict_or_list,
legacy_scale_decode,
convert_account_ids,
decode_query_map_async,
)
from async_substrate_interface.utils.storage import StorageKey
from async_substrate_interface.type_registry import _TYPE_REGISTRY
from async_substrate_interface.utils.decoding import (
decode_query_map,
)

ResultHandler = Callable[[dict, Any], Awaitable[tuple[dict, bool]]]

Expand Down Expand Up @@ -526,6 +523,18 @@ async def retrieve_next_page(self, start_key) -> list:
self.last_key = result.last_key
return result.records

async def retrieve_all_records(self) -> list[Any]:
"""
Retrieves all records from all subsequent pages for the AsyncQueryMapResult,
returning them as a list.

Side effect:
The self.records list will be populated fully after running this method.
"""
async for _ in self:
pass
return self.records

def __aiter__(self):
return self

Expand Down Expand Up @@ -558,6 +567,7 @@ async def __anext__(self):
self.loading_complete = True
raise StopAsyncIteration

self.records.extend(next_page)
# Update the buffer with the newly fetched records
self._buffer = iter(next_page)
return next(self._buffer)
Expand Down Expand Up @@ -1408,7 +1418,9 @@ async def decode_scale(
if runtime is None:
runtime = await self.init_runtime(block_hash=block_hash)
if runtime.metadata_v15 is not None and force_legacy is False:
obj = decode_by_type_string(type_string, runtime.registry, scale_bytes)
obj = await asyncio.to_thread(
decode_by_type_string, type_string, runtime.registry, scale_bytes
)
if self.decode_ss58:
try:
type_str_int = int(type_string.split("::")[1])
Expand Down Expand Up @@ -2762,19 +2774,34 @@ async def rpc_request(
logger.error(f"Substrate Request Exception: {result[payload_id]}")
raise SubstrateRequestException(result[payload_id][0])

@cached_fetcher(max_size=SUBSTRATE_CACHE_METHOD_SIZE)
async def get_block_hash(self, block_id: int) -> str:
async def get_block_hash(self, block_id: Optional[int]) -> str:
"""
Retrieves the hash of the specified block number
Retrieves the hash of the specified block number, or the chaintip if None
Args:
block_id: block number

Returns:
Hash of the block
"""
if block_id is None:
return await self.get_chain_head()
else:
if (block_hash := self.runtime_cache.blocks.get(block_id)) is not None:
return block_hash

block_hash = await self._cached_get_block_hash(block_id)
self.runtime_cache.add_item(block_hash=block_hash, block=block_id)
return block_hash

@cached_fetcher(max_size=SUBSTRATE_CACHE_METHOD_SIZE)
async def _cached_get_block_hash(self, block_id: int) -> str:
"""
The design of this method is as such, because it allows for an easy drop-in for a different cache, such
as is the case with DiskCachedAsyncSubstrateInterface._cached_get_block_hash
"""
return await self._get_block_hash(block_id)

async def _get_block_hash(self, block_id: int) -> str:
async def _get_block_hash(self, block_id: Optional[int]) -> str:
return (await self.rpc_request("chain_getBlockHash", [block_id]))["result"]

async def get_chain_head(self) -> str:
Expand Down Expand Up @@ -3852,18 +3879,20 @@ async def query_map(
params=[result_keys, block_hash],
runtime=runtime,
)
changes = []
for result_group in response["result"]:
result = decode_query_map(
result_group["changes"],
prefix,
runtime,
param_types,
params,
value_type,
key_hashers,
ignore_decoding_errors,
self.decode_ss58,
)
changes.extend(result_group["changes"])
result = await decode_query_map_async(
changes,
prefix,
runtime,
param_types,
params,
value_type,
key_hashers,
ignore_decoding_errors,
self.decode_ss58,
)
else:
# storage item and value scale type are not included here because this is batch-decoded in rust
page_batches = [
Expand All @@ -3881,8 +3910,8 @@ async def query_map(
results: RequestResults = await self._make_rpc_request(
payloads, runtime=runtime
)
for result in results.values():
res = result[0]
for result_ in results.values():
res = result_[0]
if "error" in res:
err_msg = res["error"]["message"]
if (
Expand All @@ -3900,7 +3929,7 @@ async def query_map(
else:
for result_group in res["result"]:
changes.extend(result_group["changes"])
result = decode_query_map(
result = await decode_query_map_async(
changes,
prefix,
runtime,
Expand Down Expand Up @@ -4113,6 +4142,14 @@ async def result_handler(message: dict, subscription_id) -> tuple[dict, bool]:
"extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()),
"finalized": False,
}, True

elif "params" in message and message["params"].get("result") == "invalid":
failure_message = f"Subscription {subscription_id} invalid: {message}"
async with self.ws as ws:
await ws.unsubscribe(subscription_id)
logger.error(failure_message)
raise SubstrateRequestException(failure_message)

return message, False

if wait_for_inclusion or wait_for_finalization:
Expand Down Expand Up @@ -4250,13 +4287,25 @@ async def get_metadata_event(

async def get_block_number(self, block_hash: Optional[str] = None) -> int:
"""Async version of `substrateinterface.base.get_block_number` method."""
response = await self.rpc_request("chain_getHeader", [block_hash])
if block_hash is None:
return await self._get_block_number(None)
if (block := self.runtime_cache.blocks_reverse.get(block_hash)) is not None:
return block
block = await self._cached_get_block_number(block_hash)
self.runtime_cache.add_item(block_hash=block_hash, block=block)
return block

if response["result"]:
return int(response["result"]["number"], 16)
raise SubstrateRequestException(
f"Unable to retrieve block number for {block_hash}"
)
@cached_fetcher(max_size=SUBSTRATE_CACHE_METHOD_SIZE)
async def _cached_get_block_number(self, block_hash: str) -> int:
"""
The design of this method is as such, because it allows for an easy drop-in for a different cache, such
as is the case with DiskCachedAsyncSubstrateInterface._cached_get_block_number
"""
return await self._get_block_number(block_hash=block_hash)

async def _get_block_number(self, block_hash: Optional[str]) -> int:
response = await self.rpc_request("chain_getHeader", [block_hash])
return int(response["result"]["number"], 16)

async def close(self):
"""
Expand Down Expand Up @@ -4351,9 +4400,13 @@ async def get_block_runtime_version_for(self, block_hash: str):
return await self._get_block_runtime_version_for(block_hash)

@async_sql_lru_cache(maxsize=SUBSTRATE_CACHE_METHOD_SIZE)
async def get_block_hash(self, block_id: int) -> str:
async def _cached_get_block_hash(self, block_id: int) -> str:
return await self._get_block_hash(block_id)

@async_sql_lru_cache(maxsize=SUBSTRATE_CACHE_METHOD_SIZE)
async def _cached_get_block_number(self, block_hash: str) -> int:
return await self._get_block_number(block_hash=block_hash)


async def get_async_substrate_interface(
url: str,
Expand Down
Empty file.
66 changes: 56 additions & 10 deletions async_substrate_interface/sync_substrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,6 +482,18 @@ def retrieve_next_page(self, start_key) -> list:
self.last_key = result.last_key
return result.records

def retrieve_all_records(self) -> list[Any]:
"""
Retrieves all records from all subsequent pages for the QueryMapResult,
returning them as a list.

Side effect:
The self.records list will be populated fully after running this method.
"""
for _ in self:
pass
return self.records

def __iter__(self):
return self

Expand Down Expand Up @@ -511,6 +523,7 @@ def __next__(self):
self.loading_complete = True
raise StopIteration

self.records.extend(next_page)
# Update the buffer with the newly fetched records
self._buffer = iter(next_page)
return next(self._buffer)
Expand Down Expand Up @@ -2052,8 +2065,21 @@ def rpc_request(
else:
raise SubstrateRequestException(result[payload_id][0])

def get_block_hash(self, block_id: Optional[int]) -> str:
"""
Retrieves the block hash for a given block number, or the chaintip hash if None
"""
if block_id is None:
return self.get_chain_head()
else:
if (block_hash := self.runtime_cache.blocks.get(block_id)) is not None:
return block_hash
block_hash = self._get_block_hash(block_id)
self.runtime_cache.add_item(block_hash=block_hash, block=block_id)
return block_hash

@functools.lru_cache(maxsize=SUBSTRATE_CACHE_METHOD_SIZE)
def get_block_hash(self, block_id: int) -> str:
def _get_block_hash(self, block_id: int) -> str:
return self.rpc_request("chain_getBlockHash", [block_id])["result"]

def get_chain_head(self) -> str:
Expand Down Expand Up @@ -3247,6 +3273,13 @@ def result_handler(message: dict, subscription_id) -> tuple[dict, bool]:
"extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()),
"finalized": False,
}, True

elif "params" in message and message["params"].get("result") == "invalid":
failure_message = f"Subscription {subscription_id} invalid: {message}"
self.rpc_request("author_unwatchExtrinsic", [subscription_id])
logger.error(failure_message)
raise SubstrateRequestException(failure_message)

return message, False

if wait_for_inclusion or wait_for_finalization:
Expand Down Expand Up @@ -3380,15 +3413,27 @@ def get_metadata_event(
return self._get_metadata_event(module_name, event_name, runtime)

def get_block_number(self, block_hash: Optional[str] = None) -> int:
"""Async version of `substrateinterface.base.get_block_number` method."""
response = self.rpc_request("chain_getHeader", [block_hash])

if response["result"]:
return int(response["result"]["number"], 16)
"""
Retrieves the block number for a given block hash or chaintip.
"""
if block_hash is None:
return self._get_block_number(None)
else:
raise SubstrateRequestException(
f"Unable to determine block number for {block_hash}"
)
if (
block_number := self.runtime_cache.blocks_reverse.get(block_hash)
) is not None:
return block_number
block_number = self._cached_get_block_number(block_hash=block_hash)
self.runtime_cache.add_item(block_hash=block_hash, block=block_number)
return block_number

@functools.lru_cache(maxsize=SUBSTRATE_CACHE_METHOD_SIZE)
def _cached_get_block_number(self, block_hash: Optional[str]) -> int:
return self._get_block_number(block_hash=block_hash)

def _get_block_number(self, block_hash: Optional[str]) -> int:
response = self.rpc_request("chain_getHeader", [block_hash])
return int(response["result"]["number"], 16)

def close(self):
"""
Expand All @@ -3404,6 +3449,7 @@ def close(self):
self.get_block_runtime_info.cache_clear()
self.get_block_runtime_version_for.cache_clear()
self.supports_rpc_method.cache_clear()
self.get_block_hash.cache_clear()
self._get_block_hash.cache_clear()
self._cached_get_block_number.cache_clear()

encode_scale = SubstrateMixin._encode_scale
Loading
Loading