From 0040f515a0caf378f0e78db27ccb6496c1da28e4 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 27 Dec 2023 15:29:23 -0300 Subject: [PATCH 01/38] chore(feature-activation): improve phased testing logs (#905) --- hathor/manager.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/hathor/manager.py b/hathor/manager.py index 43963478a..f31478232 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -1072,6 +1072,7 @@ def _log_feature_states(self, vertex: BaseTransaction) -> None: self.log.info( 'New block accepted with feature activation states', + block_hash=vertex.hash_hex, block_height=vertex.get_height(), features_states=state_by_feature ) @@ -1083,7 +1084,12 @@ def _log_feature_states(self, vertex: BaseTransaction) -> None: def _log_if_feature_is_active(self, block: Block, feature: Feature) -> None: """Log if a feature is ACTIVE for a block. Used as part of the Feature Activation Phased Testing.""" if self._feature_service.is_feature_active(block=block, feature=feature): - self.log.info('Feature is ACTIVE for block', feature=feature.value, block_height=block.get_height()) + self.log.info( + 'Feature is ACTIVE for block', + feature=feature.value, + block_hash=block.hash_hex, + block_height=block.get_height() + ) def has_sync_version_capability(self) -> bool: return self._settings.CAPABILITY_SYNC_VERSION in self.capabilities From c0364ba7a10d28bed7502214dc86b0de73740c43 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 28 Dec 2023 11:50:57 -0300 Subject: [PATCH 02/38] fix(reactor): fix reactor initialization on CLI tools (#906) --- hathor/cli/events_simulator/events_simulator.py | 4 ++-- hathor/cli/stratum_mining.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index 600232429..9db73db66 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -44,7 +44,7 @@ def execute(args: Namespace) -> None: os.environ['HATHOR_CONFIG_YAML'] = UNITTESTS_SETTINGS_FILEPATH from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor.cli.events_simulator.scenario import Scenario - from hathor.reactor import get_global_reactor + from hathor.reactor import initialize_global_reactor from hathor.simulator import Simulator try: @@ -53,7 +53,7 @@ def execute(args: Namespace) -> None: possible_scenarios = [scenario.name for scenario in Scenario] raise ValueError(f'Invalid scenario "{args.scenario}". Choose one of {possible_scenarios}') from e - reactor = get_global_reactor() + reactor = initialize_global_reactor() log = logger.new() simulator = Simulator(args.seed) simulator.start() diff --git a/hathor/cli/stratum_mining.py b/hathor/cli/stratum_mining.py index 799a210dc..679bba1ee 100644 --- a/hathor/cli/stratum_mining.py +++ b/hathor/cli/stratum_mining.py @@ -30,7 +30,7 @@ def create_parser() -> ArgumentParser: def execute(args: Namespace) -> None: from hathor.crypto.util import decode_address - from hathor.reactor import get_global_reactor + from hathor.reactor import initialize_global_reactor from hathor.stratum import StratumClient from hathor.wallet.exceptions import InvalidAddress @@ -43,7 +43,7 @@ def execute(args: Namespace) -> None: print('The given address is invalid') sys.exit(-1) - reactor = get_global_reactor() + reactor = initialize_global_reactor() miner = StratumClient(proc_count=args.nproc, address=address, reactor=reactor) miner.start() point = TCP4ClientEndpoint(reactor, args.host, args.port) From 126bae3647811ffe0cdc0cdf22d92726fa508bbb Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 29 Dec 2023 13:40:14 -0300 Subject: [PATCH 03/38] fix(events): fix events simulator CLI (#907) --- .../event_forwarding_websocket_factory.py | 1 + .../cli/events_simulator/events_simulator.py | 12 ++++-- tests/cli/test_events_simulator.py | 38 +++++++++++++++++++ 3 files changed, 47 insertions(+), 4 deletions(-) create mode 100644 tests/cli/test_events_simulator.py diff --git a/hathor/cli/events_simulator/event_forwarding_websocket_factory.py b/hathor/cli/events_simulator/event_forwarding_websocket_factory.py index 5b1dad9b3..f07a5b8e2 100644 --- a/hathor/cli/events_simulator/event_forwarding_websocket_factory.py +++ b/hathor/cli/events_simulator/event_forwarding_websocket_factory.py @@ -29,6 +29,7 @@ def __init__(self, simulator: 'Simulator', *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) def buildProtocol(self, _: IAddress) -> 'EventForwardingWebsocketProtocol': + from hathor.cli.events_simulator.event_forwarding_websocket_protocol import EventForwardingWebsocketProtocol protocol = EventForwardingWebsocketProtocol(self._simulator) protocol.factory = self return protocol diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index 9db73db66..135a95296 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -14,12 +14,16 @@ import os from argparse import ArgumentParser, Namespace +from typing import TYPE_CHECKING from autobahn.twisted.resource import WebSocketResource from structlog import get_logger from twisted.web.resource import Resource from twisted.web.server import Site +if TYPE_CHECKING: + from hathor.reactor import ReactorProtocol + DEFAULT_PORT = 8080 logger = get_logger() @@ -39,12 +43,11 @@ def create_parser() -> ArgumentParser: return parser -def execute(args: Namespace) -> None: +def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: from hathor.conf import UNITTESTS_SETTINGS_FILEPATH os.environ['HATHOR_CONFIG_YAML'] = UNITTESTS_SETTINGS_FILEPATH from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor.cli.events_simulator.scenario import Scenario - from hathor.reactor import initialize_global_reactor from hathor.simulator import Simulator try: @@ -53,7 +56,6 @@ def execute(args: Namespace) -> None: possible_scenarios = [scenario.name for scenario in Scenario] raise ValueError(f'Invalid scenario "{args.scenario}". Choose one of {possible_scenarios}') from e - reactor = initialize_global_reactor() log = logger.new() simulator = Simulator(args.seed) simulator.start() @@ -90,6 +92,8 @@ def execute(args: Namespace) -> None: def main(): + from hathor.reactor import initialize_global_reactor parser = create_parser() args = parser.parse_args() - execute(args) + reactor = initialize_global_reactor() + execute(args, reactor) diff --git a/tests/cli/test_events_simulator.py b/tests/cli/test_events_simulator.py new file mode 100644 index 000000000..83f6049e9 --- /dev/null +++ b/tests/cli/test_events_simulator.py @@ -0,0 +1,38 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from unittest.mock import Mock + +from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory +from hathor.cli.events_simulator.events_simulator import create_parser, execute +from tests.test_memory_reactor_clock import TestMemoryReactorClock + + +def test_events_simulator() -> None: + parser = create_parser() + args = parser.parse_args(['--scenario', 'ONLY_LOAD']) + reactor = TestMemoryReactorClock() + + execute(args, reactor) + reactor.advance(1) + + factory = EventForwardingWebsocketFactory( + simulator=Mock(), + peer_id='test_peer_id', + network='test_network', + reactor=reactor, + event_storage=Mock() + ) + protocol = factory.buildProtocol(Mock()) + + assert protocol is not None From abbd05403ef45de1c2e9f68ef04b0d03a53f14ff Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 8 Jan 2024 18:25:48 -0300 Subject: [PATCH 04/38] tests(p2p): improve protocol test (#918) --- tests/p2p/test_protocol.py | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index 0cf572ec6..f643ed7d2 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -1,12 +1,13 @@ from json import JSONDecodeError from typing import Optional +from unittest.mock import Mock, patch from twisted.internet.defer import inlineCallbacks from twisted.python.failure import Failure from hathor.conf import HathorSettings from hathor.p2p.peer_id import PeerId -from hathor.p2p.protocol import HathorProtocol +from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps from tests import unittest @@ -103,12 +104,26 @@ def test_rate_limit(self): def test_invalid_size(self): self.conn.tr1.clear() - # Creating big payload - big_payload = '[' - for x in range(65536): - big_payload = '{}{}'.format(big_payload, x) - big_payload = '{}]'.format(big_payload) - self._send_cmd(self.conn.proto1, 'HELLO', big_payload) + cmd = b'HELLO ' + max_payload_bytes = HathorLineReceiver.MAX_LENGTH - len(cmd) + line_length_exceeded_wrapped = Mock(wraps=self.conn.proto1.lineLengthExceeded) + + biggest_valid_payload = bytes([1] * max_payload_bytes) + line = cmd + biggest_valid_payload + b'\r\n' + + with patch.object(self.conn.proto1, 'lineLengthExceeded', line_length_exceeded_wrapped): + self.conn.proto1.dataReceived(line) + + line_length_exceeded_wrapped.assert_not_called() + line_length_exceeded_wrapped.reset_mock() + + smallest_invalid_payload = bytes([1] * (max_payload_bytes + 1)) + line = cmd + smallest_invalid_payload + b'\r\n' + + with patch.object(self.conn.proto1, 'lineLengthExceeded', line_length_exceeded_wrapped): + self.conn.proto1.dataReceived(line) + + line_length_exceeded_wrapped.assert_called_once() self.assertTrue(self.conn.tr1.disconnecting) def test_invalid_payload(self): From 109ffc725fc99e6e1d7205563d727e66bba7a890 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Wed, 10 Jan 2024 16:58:00 +0100 Subject: [PATCH 05/38] tests(event): fix missing hash update --- hathor/cli/events_simulator/scenario.py | 1 + hathor/transaction/base_transaction.py | 2 ++ tests/event/test_event_simulation_scenarios.py | 6 +++--- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 5d029e309..7c08a72bc 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -134,6 +134,7 @@ def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManage settings.GENESIS_TX1_HASH, not_none(tx2.hash), ] + block.update_hash() assert manager.propagate_tx(block, fails_silently=False) simulator.run(60) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 453deab09..ae65a5000 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -586,6 +586,8 @@ def update_hash(self) -> None: """ Update the hash of the transaction. """ self.hash = self.calculate_hash() + if metadata := getattr(self, '_metadata', None): + metadata.hash = self.hash def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) -> TransactionMetadata: """Return this tx's metadata. diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 9acaab38d..65847491d 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -262,10 +262,10 @@ def test_unvoided_transaction(self): # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each twin tx, inverting the voided state of them. # noqa E501 # The order of events is important, we receive the voided txs first, then reverse topological ordering. EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 ] responses = _remove_timestamp(responses) From 2410024b7225fdc3b60ee9fe4c3a814745784567 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 8 Jan 2024 17:44:55 +0100 Subject: [PATCH 06/38] feat(debug): make it possible to connect a remote ipython shell --- hathor/builder/cli_builder.py | 16 ++ hathor/cli/run_node.py | 6 +- hathor/cli/run_node_args.py | 1 + hathor/cli/util.py | 13 +- hathor/ipykernel.py | 83 +++++++++ poetry.lock | 327 +++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 7 files changed, 435 insertions(+), 13 deletions(-) create mode 100644 hathor/ipykernel.py diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 8d3463176..368846ce7 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -270,6 +270,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: cpu_mining_service=cpu_mining_service ) + if self._args.x_ipython_kernel: + self.check_or_raise(self._args.x_asyncio_reactor, + '--x-ipython-kernel must be used with --x-asyncio-reactor') + self._start_ipykernel() + p2p_manager.set_manager(self.manager) if self._args.stratum: @@ -376,3 +381,14 @@ def create_wallet(self) -> BaseWallet: return wallet else: raise BuilderError('Invalid type of wallet') + + def _start_ipykernel(self) -> None: + # breakpoints are not expected to be used with the embeded ipykernel, to prevent this warning from being + # unnecessarily annoying, PYDEVD_DISABLE_FILE_VALIDATION should be set to 1 before debugpy is imported, or in + # practice, before importing hathor.ipykernel, if for any reason support for breakpoints is needed, the flag + # -Xfrozen_modules=off has to be passed to the python interpreter + # see: + # https://github.com/microsoft/debugpy/blob/main/src/debugpy/_vendored/pydevd/pydevd_file_utils.py#L587-L592 + os.environ['PYDEVD_DISABLE_FILE_VALIDATION'] = '1' + from hathor.ipykernel import embed_kernel + embed_kernel(self.manager, runtime_dir=self._args.data, extra_ns=dict(run_node=self)) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 55b9c1730..30bab7fb2 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -34,7 +34,8 @@ class RunNode: ('--x-sync-bridge', lambda args: bool(args.x_sync_bridge)), ('--x-sync-v2-only', lambda args: bool(args.x_sync_v2_only)), ('--x-enable-event-queue', lambda args: bool(args.x_enable_event_queue)), - ('--x-asyncio-reactor', lambda args: bool(args.x_asyncio_reactor)) + ('--x-asyncio-reactor', lambda args: bool(args.x_asyncio_reactor)), + ('--x-ipython-kernel', lambda args: bool(args.x_ipython_kernel)), ] @classmethod @@ -120,6 +121,9 @@ def create_parser(cls) -> ArgumentParser: help=f'Signal not support for a feature. One of {possible_features}') parser.add_argument('--x-asyncio-reactor', action='store_true', help='Use asyncio reactor instead of Twisted\'s default.') + # XXX: this is temporary, should be added as a sysctl instead before merging + parser.add_argument('--x-ipython-kernel', action='store_true', + help='Launch embedded IPython kernel for remote debugging') return parser def prepare(self, *, register_resources: bool = True) -> None: diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index 897555fbb..ca581bfed 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -73,3 +73,4 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): signal_support: set[Feature] signal_not_support: set[Feature] x_asyncio_reactor: bool + x_ipython_kernel: bool diff --git a/hathor/cli/util.py b/hathor/cli/util.py index 38555a294..84ac878fe 100644 --- a/hathor/cli/util.py +++ b/hathor/cli/util.py @@ -206,18 +206,25 @@ def setup_logging( 'level': 'INFO' if debug else 'WARN', 'propagate': False, }, - '': { + 'tornado': { # used by ipykernel's zmq 'handlers': handlers, - 'level': 'DEBUG' if debug else 'INFO', + 'level': 'INFO' if debug else 'WARN', + 'propagate': False, }, 'hathor.p2p.sync_v1': { 'handlers': handlers, 'level': 'DEBUG' if debug_sync else 'INFO', + 'propagate': False, }, 'hathor.p2p.sync_v2': { 'handlers': handlers, 'level': 'DEBUG' if debug_sync else 'INFO', - } + 'propagate': False, + }, + '': { + 'handlers': handlers, + 'level': 'DEBUG' if debug else 'INFO', + }, } }) diff --git a/hathor/ipykernel.py b/hathor/ipykernel.py new file mode 100644 index 000000000..31e942346 --- /dev/null +++ b/hathor/ipykernel.py @@ -0,0 +1,83 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from logging import getLogger +from typing import TYPE_CHECKING, Any, Optional + +from ipykernel.kernelapp import IPKernelApp as OriginalIPKernelApp + +if TYPE_CHECKING: + from hathor.manager import HathorManager + + +class IPKernelApp(OriginalIPKernelApp): + def __init__(self, runtime_dir: Optional[str] = None): + super().__init__() + # https://traitlets.readthedocs.io/en/stable/config-api.html#traitlets.config.Application.logging_config + self.logging_config: dict[str, Any] = {} # empty out logging config + # https://traitlets.readthedocs.io/en/stable/config-api.html#traitlets.config.LoggingConfigurable.log + self.log = getLogger('hathor.ipykernel') # use custom name for the logging adapter + if runtime_dir is not None: + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.connection_dir + # https://github.com/ipython/ipykernel/blob/main/ipykernel/kernelapp.py#L301-L320 + # if not defined now, when init_connection_file is called it will be set to 'kernel-.json', it is + # defined now because it's more convinient to have a fixed path that doesn't depend on the PID of the + # running process, which doesn't benefit us anyway since the data dir + self.connection_dir = runtime_dir + self.connection_file = 'kernel.json' + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.no_stderr + self.no_stderr = True # disable forwarding of stderr (because we use it for logging) + + # https://traitlets.readthedocs.io/en/stable/config-api.html#traitlets.config.Application.get_default_logging_config + def get_default_logging_config(self) -> dict[str, Any]: + # XXX: disable original logging setup + return {"version": 1, "disable_existing_loggers": False} + + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.init_signal + def init_signal(self) -> None: + # XXX: ignore registering of signals + pass + + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.log_connection_info + def log_connection_info(self) -> None: + # XXX: this method is only used to log this info, we can customize it freely + self.log.info(f'ipykernel connection enabled at {self.abs_connection_file}') + + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.configure_tornado_logger + def configure_tornado_logger(self) -> None: + # XXX: we already setup tornago logging on hathor.cli.util.setup_logging prevent this class from overriding it + pass + + # https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.kernelapp.IPKernelApp.start + def start(self): + # XXX: custom start to prevent it from running an event loop and capturing KeyboardInterrupt + self.kernel.start() + + +# https://ipykernel.readthedocs.io/en/stable/api/ipykernel.html#ipykernel.embed.embed_kernel +def embed_kernel(manager: 'HathorManager', *, + runtime_dir: Optional[str] = None, extra_ns: dict[str, Any] = {}) -> None: + """ Customized version of ipykernel.embed.embed_kernel that takes parameters specific to this project. + + In theory this method could be called multiple times, like the original ipykernel.embed.embed_kernel. + """ + # get the app if it exists, or set it up if it doesn't + if IPKernelApp.initialized(): + app = IPKernelApp.instance() + else: + app = IPKernelApp.instance(runtime_dir=runtime_dir) + app.initialize([]) + app.kernel.user_ns = dict(manager=manager) | extra_ns + app.shell.set_completer_frame() + app.start() diff --git a/poetry.lock b/poetry.lock index 53b74c73a..bed67e61f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -361,6 +361,23 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "comm" +version = "0.2.0" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + [[package]] name = "configargparse" version = "1.5.3" @@ -498,6 +515,33 @@ sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +[[package]] +name = "debugpy" +version = "1.8.0" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, +] + [[package]] name = "decorator" version = "5.1.1" @@ -760,6 +804,39 @@ files = [ [package.dependencies] sortedcontainers = ">=2.0,<3.0" +[[package]] +name = "ipykernel" +version = "6.27.1" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, + {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + [[package]] name = "ipython" version = "8.7.0" @@ -776,6 +853,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +ipykernel = {version = "*", optional = true, markers = "extra == \"kernel\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} @@ -837,6 +915,48 @@ docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alab qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +[[package]] +name = "jupyter-client" +version = "8.6.0" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.5.1" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.5.1-py3-none-any.whl", hash = "sha256:220dfb00c45f0d780ce132bb7976b58263f81a3ada6e90a9b6823785a424f739"}, + {file = "jupyter_core-5.5.1.tar.gz", hash = "sha256:1553311a97ccd12936037f36b9ab4d6ae8ceea6ad2d5c90d94a909e752178e40"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "matplotlib-inline" version = "0.1.6" @@ -1032,6 +1152,17 @@ mypy = ">=1.0.0,<1.6.0" [package.extras] test = ["lxml", "pytest (>=4.6)", "pytest-cov"] +[[package]] +name = "nest-asyncio" +version = "1.5.8" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, +] + [[package]] name = "packaging" version = "22.0" @@ -1094,6 +1225,21 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + [[package]] name = "pluggy" version = "1.0.0" @@ -1137,6 +1283,34 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "psutil" +version = "5.9.7" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -1374,6 +1548,20 @@ psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "python-healthchecklib" version = "0.1.0" @@ -1467,6 +1655,111 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "pyzmq" +version = "25.1.2" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + [[package]] name = "requests" version = "2.28.1" @@ -1490,7 +1783,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rocksdb" -version = "0.9.1" +version = "0.9.2" description = "Python bindings for RocksDB" optional = false python-versions = "*" @@ -1753,22 +2046,40 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + [[package]] name = "traitlets" -version = "5.7.0" +version = "5.6.0" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.7" files = [ - {file = "traitlets-5.7.0-py3-none-any.whl", hash = "sha256:61832ea7b7f910f5745e27e9bb269a181fd15af76027d99560299209d5b17c94"}, - {file = "traitlets-5.7.0.tar.gz", hash = "sha256:bd0fca5c890a09bf66b33cce67ca14156b080429bc39c7ef26b075a4bd4f9fc3"}, + {file = "traitlets-5.6.0-py3-none-any.whl", hash = "sha256:1410755385d778aed847d68deb99b3ba30fbbf489e17a1e8cbb753060d5cce73"}, + {file = "traitlets-5.6.0.tar.gz", hash = "sha256:10b6ed1c9cedee83e795db70a8b9c2db157bb3778ec4587a349ecb7ef3b1033b"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] test = ["pre-commit", "pytest"] -typing = ["mypy (>=0.990)"] [[package]] name = "twisted" @@ -2146,4 +2457,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "2b20a90cf75e75bd32568e722489db53b4a4b490f4e3f084ff5734ea8137c37e" +content-hash = "608fb10f02f72ce1ad8e3d20b85ee32c725a867aadd5a5e27ea4a2faf3a06848" diff --git a/pyproject.toml b/pyproject.toml index f40746b0d..56bee4d19 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ colorama = "~0.4.6" configargparse = "~1.5.3" cryptography = "~38.0.3" graphviz = "~0.20.1" -ipython = "~8.7.0" +ipython = {version = "~8.7.0", extras = ["kernel"]} mnemonic = "~0.20" prometheus_client = "~0.15.0" pyopenssl = "=22.1.0" From 8e579c442895ccb601366d2cc301b36fc9d11eb4 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Mon, 30 Oct 2023 18:43:03 +0100 Subject: [PATCH 07/38] chore: add support for Python 3.12 --- .github/workflows/docker.yml | 7 +- .github/workflows/main.yml | 4 +- hathor/event/storage/rocksdb_storage.py | 4 + hathor/util.py | 12 +- poetry.lock | 1525 ++++++++++++----------- pyproject.toml | 7 +- 6 files changed, 798 insertions(+), 761 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 7e121df01..ef6351562 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -22,14 +22,13 @@ jobs: matrix: python-impl: - python - - pypy python-version: - '3.10' - '3.11' - exclude: - # XXX: pypy-3.11 does exist yet + - '3.12' + include: - python-impl: pypy - python-version: '3.11' + python-version: '3.10' steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7527712e6..63c98bd26 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,7 +23,7 @@ jobs: import os import json full_matrix = { - 'python': ['3.10', '3.11'], + 'python': ['3.10', '3.11', '3.12'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on 'os': ['ubuntu-22.04', 'macos-12', 'windows-2022'], 'include': [ @@ -33,7 +33,7 @@ jobs: } # this is the fastest one: reduced_matrix = { - 'python': ['3.11'], + 'python': ['3.12'], 'os': ['ubuntu-22.04'], } github_repository = os.environ['GITHUB_REPOSITORY'] diff --git a/hathor/event/storage/rocksdb_storage.py b/hathor/event/storage/rocksdb_storage.py index b1709473c..ca1f0c7fc 100644 --- a/hathor/event/storage/rocksdb_storage.py +++ b/hathor/event/storage/rocksdb_storage.py @@ -54,6 +54,10 @@ def iter_from_event(self, key: int) -> Iterator[BaseEvent]: for event_bytes in it: yield BaseEvent.parse_raw(event_bytes) + # XXX: on Python 3.12, not deleting it here can cause EXC_BAD_ACCESS if the db is released before the iterator + # in the garbage collector. This race condition might happen between tests. + del it + def _db_get_last_event(self) -> Optional[BaseEvent]: last_element: Optional[bytes] = None it = self._db.itervalues(self._cf_event) diff --git a/hathor/util.py b/hathor/util.py index 20fd9e995..bd674f128 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -176,14 +176,14 @@ class MaxSizeOrderedDict(OrderedDict): >>> foo[3] = 'c' >>> foo[4] = 'd' >>> foo[5] = 'e' - >>> foo - MaxSizeOrderedDict([(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')]) + >>> list(foo.items()) + [(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')] >>> foo[6] = 'f' - >>> foo - MaxSizeOrderedDict([(2, 'b'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 'f')]) + >>> list(foo.items()) + [(2, 'b'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 'f')] >>> foo[7] = 'g' - >>> foo - MaxSizeOrderedDict([(3, 'c'), (4, 'd'), (5, 'e'), (6, 'f'), (7, 'g')]) + >>> list(foo.items()) + [(3, 'c'), (4, 'd'), (5, 'e'), (6, 'f'), (7, 'g')] """ # Kindly stolen from: https://stackoverflow.com/a/49274421/947511 def __init__(self, *args, max=0, **kwargs): diff --git a/poetry.lock b/poetry.lock index bed67e61f..f0609067c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,111 +2,99 @@ [[package]] name = "aiohttp" -version = "3.8.3" +version = "3.9.1" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, - {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, - {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, - {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, - {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, - {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, - {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, - {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, - {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, - {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, - {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, - {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, - {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, - {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, - {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, - {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, - {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, - {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, - {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, - {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, - {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, - {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, - {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, - {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, - {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, - {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, - {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, - {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, - {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, - {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, + {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, + {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, + {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, + {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, + {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, + {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, + {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, + {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, + {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, + {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, + {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, ] [package.dependencies] aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiosignal" @@ -135,48 +123,50 @@ files = [ [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] -test = ["astroid", "pytest"] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-timeout" -version = "4.0.2" +version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] name = "attrs" -version = "22.1.0" +version = "23.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "autobahn" @@ -251,86 +241,74 @@ tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", " [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -363,13 +341,13 @@ files = [ [[package]] name = "comm" -version = "0.2.0" +version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, - {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, ] [package.dependencies] @@ -380,13 +358,13 @@ test = ["pytest"] [[package]] name = "configargparse" -version = "1.5.3" +version = "1.5.5" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "ConfigArgParse-1.5.3-py3-none-any.whl", hash = "sha256:18f6535a2db9f6e02bd5626cc7455eac3e96b9ab3d969d366f9aafd5c5c00fe7"}, - {file = "ConfigArgParse-1.5.3.tar.gz", hash = "sha256:1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f"}, + {file = "ConfigArgParse-1.5.5-py3-none-any.whl", hash = "sha256:541360ddc1b15c517f95c0d02d1fca4591266628f3667acdc5d13dccc78884ca"}, + {file = "ConfigArgParse-1.5.5.tar.gz", hash = "sha256:363d80a6d35614bd446e2f2b1b216f3b33741d03ac6d0a92803306f40e555b58"}, ] [package.extras] @@ -395,72 +373,74 @@ yaml = ["PyYAML"] [[package]] name = "constantly" -version = "15.1.0" +version = "23.10.4" description = "Symbolic constants in Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, - {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, + {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, + {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, ] [[package]] name = "coverage" -version = "6.5.0" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -555,13 +535,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -569,31 +549,31 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "1.9.0" +version = "2.0.2" description = "execnet: rapid multi-Python deployment" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, - {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, ] [package.extras] -testing = ["pre-commit"] +testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "executing" -version = "1.2.0" +version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "flake8" @@ -624,85 +604,88 @@ files = [ [[package]] name = "frozenlist" -version = "1.3.3" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] @@ -782,13 +765,13 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] @@ -806,13 +789,13 @@ sortedcontainers = ">=2.0,<3.0" [[package]] name = "ipykernel" -version = "6.27.1" +version = "6.28.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, - {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, + {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, + {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, ] [package.dependencies] @@ -826,7 +809,7 @@ matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=20" +pyzmq = ">=24" tornado = ">=6.1" traitlets = ">=5.4.0" @@ -898,22 +881,22 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" -version = "0.18.2" +version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, - {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, ] [package.dependencies] -parso = ">=0.8.0,<0.9.0" +parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jupyter-client" @@ -939,13 +922,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.5.1" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.5.1-py3-none-any.whl", hash = "sha256:220dfb00c45f0d780ce132bb7976b58263f81a3ada6e90a9b6823785a424f739"}, - {file = "jupyter_core-5.5.1.tar.gz", hash = "sha256:1553311a97ccd12936037f36b9ab4d6ae8ceea6ad2d5c90d94a909e752178e40"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -995,121 +978,121 @@ files = [ [[package]] name = "multidict" -version = "6.0.3" +version = "6.0.4" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614"}, - {file = "multidict-6.0.3-cp310-cp310-win32.whl", hash = "sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba"}, - {file = "multidict-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734"}, - {file = "multidict-6.0.3-cp311-cp311-win32.whl", hash = "sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531"}, - {file = "multidict-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30"}, - {file = "multidict-6.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000"}, - {file = "multidict-6.0.3-cp37-cp37m-win32.whl", hash = "sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253"}, - {file = "multidict-6.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7"}, - {file = "multidict-6.0.3-cp38-cp38-win32.whl", hash = "sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0"}, - {file = "multidict-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08"}, - {file = "multidict-6.0.3-cp39-cp39-win32.whl", hash = "sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce"}, - {file = "multidict-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51"}, - {file = "multidict-6.0.3.tar.gz", hash = "sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] [[package]] name = "mypy" -version = "1.5.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, - {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, - {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, - {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, - {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, - {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, - {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, - {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, - {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, - {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, - {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, - {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, - {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, - {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -1120,6 +1103,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1135,17 +1119,17 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.1" +version = "1.0.3" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" files = [ - {file = "mypy-zope-1.0.1.tar.gz", hash = "sha256:003953896629d762d7f497135171ad549df42a8ac63c1521a230832dd6f7fc25"}, - {file = "mypy_zope-1.0.1-py3-none-any.whl", hash = "sha256:ffa291a7af9f5904ce9f0e56de44323a4476e28aaf0d68361b62b1b0e997d0b8"}, + {file = "mypy-zope-1.0.3.tar.gz", hash = "sha256:149081bd2754d947747baefac569bb1c2bc127b4a2cc1fa505492336946bb3b4"}, + {file = "mypy_zope-1.0.3-py3-none-any.whl", hash = "sha256:7a30ce1a2589173f0be66662c9a9179f75737afc40e4104df4c76fb5a8421c14"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.6.0" +mypy = ">=1.0.0,<1.9.0" "zope.interface" = "*" "zope.schema" = "*" @@ -1165,13 +1149,13 @@ files = [ [[package]] name = "packaging" -version = "22.0" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1191,13 +1175,13 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.1" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -1242,13 +1226,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.0.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1271,13 +1255,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.36" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, - {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -1338,28 +1322,28 @@ tests = ["pytest"] [[package]] name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, ] [[package]] name = "pyasn1-modules" -version = "0.2.8" -description = "A collection of ASN.1-based protocols modules." +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, - {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.5.0" +pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycodestyle" @@ -1374,12 +1358,12 @@ files = [ [[package]] name = "pycoin" -version = "0.92.20220529" +version = "0.92.20230326" description = "Utilities for Bitcoin and altcoin addresses and transaction manipulation." optional = false python-versions = "*" files = [ - {file = "pycoin-0.92.20220529.tar.gz", hash = "sha256:3d0396475b5e2d9da7a5057eab72be0e088505e8e44680788106236872c542a2"}, + {file = "pycoin-0.92.20230326.tar.gz", hash = "sha256:0d85f0013447c356b2f6cc0bb903ad07ee4b72805ee13b40296cd0831112c0df"}, ] [[package]] @@ -1458,17 +1442,18 @@ files = [ [[package]] name = "pygments" -version = "2.13.0" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyopenssl" @@ -1564,36 +1549,36 @@ six = ">=1.5" [[package]] name = "python-healthchecklib" -version = "0.1.0" +version = "0.1.1" description = "Opinionated healthcheck library" optional = false python-versions = ">=3.8.1,<4.0.0" files = [ - {file = "python_healthchecklib-0.1.0-py3-none-any.whl", hash = "sha256:95d94fcae7f281adf16624014ae789dfa38d1be327cc38b02ee82bad70671f2f"}, - {file = "python_healthchecklib-0.1.0.tar.gz", hash = "sha256:afa0572d37902c50232d99acf0065836082bb027109c9c98e8d5acfefd381595"}, + {file = "python_healthchecklib-0.1.1-py3-none-any.whl", hash = "sha256:51ad9e7e782145977bf322cbe2095198a8b61473b09d43e79018e47483840d15"}, + {file = "python_healthchecklib-0.1.1.tar.gz", hash = "sha256:bac6cdd9ef5825f6deb0cbe5f6d97260f3f402e111fc7fe2146444bdb77fd892"}, ] [[package]] name = "pywin32" -version = "305" +version = "306" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, - {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, - {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, - {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, - {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, - {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, - {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, - {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, - {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, - {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, - {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, - {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, - {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, - {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] [[package]] @@ -1805,13 +1790,13 @@ resolved_reference = "72edcfbd22f4a3ca816f94096d3ec181da41031e" [[package]] name = "sentry-sdk" -version = "1.11.1" +version = "1.39.1" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"}, - {file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"}, + {file = "sentry-sdk-1.39.1.tar.gz", hash = "sha256:320a55cdf9da9097a0bead239c35b7e61f53660ef9878861824fd6d9b2eaf3b5"}, + {file = "sentry_sdk-1.39.1-py2.py3-none-any.whl", hash = "sha256:81b5b9ffdd1a374e9eb0c053b5d2012155db9cbe76393a8585677b753bd5fdc1"}, ] [package.dependencies] @@ -1820,15 +1805,23 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] @@ -1837,6 +1830,7 @@ rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] tornado = ["tornado (>=5)"] [[package]] @@ -1865,83 +1859,99 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setproctitle" -version = "1.3.2" +version = "1.3.3" description = "A Python module to customize the process title" optional = false python-versions = ">=3.7" files = [ - {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:288943dec88e178bb2fd868adf491197cc0fc8b6810416b1c6775e686bab87fe"}, - {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:630f6fe5e24a619ccf970c78e084319ee8be5be253ecc9b5b216b0f474f5ef18"}, - {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c877691b90026670e5a70adfbcc735460a9f4c274d35ec5e8a43ce3f8443005"}, - {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a55fe05f15c10e8c705038777656fe45e3bd676d49ad9ac8370b75c66dd7cd7"}, - {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab45146c71ca6592c9cc8b354a2cc9cc4843c33efcbe1d245d7d37ce9696552d"}, - {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c9d5c541a2713ba0e657e0303bf96ddddc412ef4761676adc35df35d7c246"}, - {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:265ecbe2c6eafe82e104f994ddd7c811520acdd0647b73f65c24f51374cf9494"}, - {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c2c46200656280a064073447ebd363937562debef329482fd7e570c8d498f806"}, - {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:fa2f50678f04fda7a75d0fe5dd02bbdd3b13cbe6ed4cf626e4472a7ccf47ae94"}, - {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f2719a398e1a2c01c2a63bf30377a34d0b6ef61946ab9cf4d550733af8f1ef1"}, - {file = "setproctitle-1.3.2-cp310-cp310-win32.whl", hash = "sha256:e425be62524dc0c593985da794ee73eb8a17abb10fe692ee43bb39e201d7a099"}, - {file = "setproctitle-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:e85e50b9c67854f89635a86247412f3ad66b132a4d8534ac017547197c88f27d"}, - {file = "setproctitle-1.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a97d51c17d438cf5be284775a322d57b7ca9505bb7e118c28b1824ecaf8aeaa"}, - {file = "setproctitle-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587c7d6780109fbd8a627758063d08ab0421377c0853780e5c356873cdf0f077"}, - {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d17c8bd073cbf8d141993db45145a70b307385b69171d6b54bcf23e5d644de"}, - {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e932089c35a396dc31a5a1fc49889dd559548d14cb2237adae260382a090382e"}, - {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e4f8f12258a8739c565292a551c3db62cca4ed4f6b6126664e2381acb4931bf"}, - {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:570d255fd99c7f14d8f91363c3ea96bd54f8742275796bca67e1414aeca7d8c3"}, - {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a8e0881568c5e6beff91ef73c0ec8ac2a9d3ecc9edd6bd83c31ca34f770910c4"}, - {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4bba3be4c1fabf170595b71f3af46c6d482fbe7d9e0563999b49999a31876f77"}, - {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:37ece938110cab2bb3957e3910af8152ca15f2b6efdf4f2612e3f6b7e5459b80"}, - {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db684d6bbb735a80bcbc3737856385b55d53f8a44ce9b46e9a5682c5133a9bf7"}, - {file = "setproctitle-1.3.2-cp311-cp311-win32.whl", hash = "sha256:ca58cd260ea02759238d994cfae844fc8b1e206c684beb8f38877dcab8451dfc"}, - {file = "setproctitle-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:88486e6cce2a18a033013d17b30a594f1c5cb42520c49c19e6ade40b864bb7ff"}, - {file = "setproctitle-1.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:92c626edc66169a1b09e9541b9c0c9f10488447d8a2b1d87c8f0672e771bc927"}, - {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710e16fa3bade3b026907e4a5e841124983620046166f355bbb84be364bf2a02"}, - {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f29b75e86260b0ab59adb12661ef9f113d2f93a59951373eb6d68a852b13e83"}, - {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c8d9650154afaa86a44ff195b7b10d683c73509d085339d174e394a22cccbb9"}, - {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0452282258dfcc01697026a8841258dd2057c4438b43914b611bccbcd048f10"}, - {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e49ae693306d7624015f31cb3e82708916759d592c2e5f72a35c8f4cc8aef258"}, - {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1ff863a20d1ff6ba2c24e22436a3daa3cd80be1dfb26891aae73f61b54b04aca"}, - {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:55ce1e9925ce1765865442ede9dca0ba9bde10593fcd570b1f0fa25d3ec6b31c"}, - {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7fe9df7aeb8c64db6c34fc3b13271a363475d77bc157d3f00275a53910cb1989"}, - {file = "setproctitle-1.3.2-cp37-cp37m-win32.whl", hash = "sha256:e5c50e164cd2459bc5137c15288a9ef57160fd5cbf293265ea3c45efe7870865"}, - {file = "setproctitle-1.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a499fff50387c1520c085a07578a000123f519e5f3eee61dd68e1d301659651f"}, - {file = "setproctitle-1.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b932c3041aa924163f4aab970c2f0e6b4d9d773f4d50326e0ea1cd69240e5c5"}, - {file = "setproctitle-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4bfc89bd33ebb8e4c0e9846a09b1f5a4a86f5cb7a317e75cc42fee1131b4f4f"}, - {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd3cf4286a60fdc95451d8d14e0389a6b4f5cebe02c7f2609325eb016535963"}, - {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fb4f769c02f63fac90989711a3fee83919f47ae9afd4758ced5d86596318c65"}, - {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5194b4969f82ea842a4f6af2f82cd16ebdc3f1771fb2771796e6add9835c1973"}, - {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cde41857a644b7353a0060b5f94f7ba7cf593ebde5a1094da1be581ac9a31"}, - {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9124bedd8006b0e04d4e8a71a0945da9b67e7a4ab88fdad7b1440dc5b6122c42"}, - {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8a09d570b39517de10ee5b718730e171251ce63bbb890c430c725c8c53d4484"}, - {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8ff3c8cb26afaed25e8bca7b9dd0c1e36de71f35a3a0706b5c0d5172587a3827"}, - {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:589be87172b238f839e19f146b9ea47c71e413e951ef0dc6db4218ddacf3c202"}, - {file = "setproctitle-1.3.2-cp38-cp38-win32.whl", hash = "sha256:4749a2b0c9ac52f864d13cee94546606f92b981b50e46226f7f830a56a9dc8e1"}, - {file = "setproctitle-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e43f315c68aa61cbdef522a2272c5a5b9b8fd03c301d3167b5e1343ef50c676c"}, - {file = "setproctitle-1.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:de3a540cd1817ede31f530d20e6a4935bbc1b145fd8f8cf393903b1e02f1ae76"}, - {file = "setproctitle-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4058564195b975ddc3f0462375c533cce310ccdd41b80ac9aed641c296c3eff4"}, - {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c5d5dad7c28bdd1ec4187d818e43796f58a845aa892bb4481587010dc4d362b"}, - {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffc61a388a5834a97953d6444a2888c24a05f2e333f9ed49f977a87bb1ad4761"}, - {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fa1a0fbee72b47dc339c87c890d3c03a72ea65c061ade3204f285582f2da30f"}, - {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8a988c7220c002c45347430993830666e55bc350179d91fcee0feafe64e1d4"}, - {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bae283e85fc084b18ffeb92e061ff7ac5af9e183c9d1345c93e178c3e5069cbe"}, - {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fed18e44711c5af4b681c2b3b18f85e6f0f1b2370a28854c645d636d5305ccd8"}, - {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b34baef93bfb20a8ecb930e395ccd2ae3268050d8cf4fe187de5e2bd806fd796"}, - {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f0bed90a216ef28b9d227d8d73e28a8c9b88c0f48a082d13ab3fa83c581488f"}, - {file = "setproctitle-1.3.2-cp39-cp39-win32.whl", hash = "sha256:4d8938249a7cea45ab7e1e48b77685d0f2bab1ebfa9dde23e94ab97968996a7c"}, - {file = "setproctitle-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:a47d97a75fd2d10c37410b180f67a5835cb1d8fdea2648fd7f359d4277f180b9"}, - {file = "setproctitle-1.3.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dad42e676c5261eb50fdb16bdf3e2771cf8f99a79ef69ba88729aeb3472d8575"}, - {file = "setproctitle-1.3.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c91b9bc8985d00239f7dc08a49927a7ca1ca8a6af2c3890feec3ed9665b6f91e"}, - {file = "setproctitle-1.3.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8579a43eafd246e285eb3a5b939e7158073d5087aacdd2308f23200eac2458b"}, - {file = "setproctitle-1.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:2fbd8187948284293f43533c150cd69a0e4192c83c377da837dbcd29f6b83084"}, - {file = "setproctitle-1.3.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:faec934cfe5fd6ac1151c02e67156c3f526e82f96b24d550b5d51efa4a5527c6"}, - {file = "setproctitle-1.3.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1aafc91cbdacc9e5fe712c52077369168e6b6c346f3a9d51bf600b53eae56bb"}, - {file = "setproctitle-1.3.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b617f12c9be61e8f4b2857be4a4319754756845dbbbd9c3718f468bbb1e17bcb"}, - {file = "setproctitle-1.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b2c9cb2705fc84cb8798f1ba74194f4c080aaef19d9dae843591c09b97678e98"}, - {file = "setproctitle-1.3.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a149a5f7f2c5a065d4e63cb0d7a4b6d3b66e6e80f12e3f8827c4f63974cbf122"}, - {file = "setproctitle-1.3.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e3ac25bfc4a0f29d2409650c7532d5ddfdbf29f16f8a256fc31c47d0dc05172"}, - {file = "setproctitle-1.3.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d884e22037b23fa25b2baf1a3316602ed5c5971eb3e9d771a38c3a69ce6e13"}, - {file = "setproctitle-1.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7aa0aac1711fadffc1d51e9d00a3bea61f68443d6ac0241a224e4d622489d665"}, - {file = "setproctitle-1.3.2.tar.gz", hash = "sha256:b9fb97907c830d260fa0658ed58afd48a86b2b88aac521135c352ff7fd3477fd"}, + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"}, + {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"}, + {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"}, + {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"}, + {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"}, + {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"}, + {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"}, + {file = "setproctitle-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450"}, + {file = "setproctitle-1.3.3-cp38-cp38-win32.whl", hash = "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2"}, + {file = "setproctitle-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081"}, + {file = "setproctitle-1.3.3-cp39-cp39-win32.whl", hash = "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3"}, + {file = "setproctitle-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"}, + {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"}, ] [package.extras] @@ -1949,19 +1959,19 @@ test = ["pytest"] [[package]] name = "setuptools" -version = "65.6.3" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1987,13 +1997,13 @@ files = [ [[package]] name = "stack-data" -version = "0.6.2" +version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, ] [package.dependencies] @@ -2068,18 +2078,18 @@ files = [ [[package]] name = "traitlets" -version = "5.6.0" +version = "5.14.0" description = "Traitlets Python configuration system" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.6.0-py3-none-any.whl", hash = "sha256:1410755385d778aed847d68deb99b3ba30fbbf489e17a1e8cbb753060d5cce73"}, - {file = "traitlets-5.6.0.tar.gz", hash = "sha256:10b6ed1c9cedee83e795db70a8b9c2db157bb3778ec4587a349ecb7ef3b1033b"}, + {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, + {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "twisted" @@ -2121,34 +2131,41 @@ windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0. [[package]] name = "twisted-iocpsupport" -version = "1.0.2" +version = "1.0.4" description = "An extension for use in the twisted I/O Completion Ports reactor." optional = false python-versions = "*" files = [ - {file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"}, - {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win32.whl", hash = "sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4"}, - {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323"}, - {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win32.whl", hash = "sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f"}, - {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565"}, - {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win32.whl", hash = "sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878"}, - {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32"}, - {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win32.whl", hash = "sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415"}, - {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41"}, - {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win32.whl", hash = "sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d"}, - {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546"}, - {file = "twisted_iocpsupport-1.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf"}, + {file = "twisted-iocpsupport-1.0.4.tar.gz", hash = "sha256:858096c0d15e33f15ac157f455d8f86f2f2cdd223963e58c0f682a3af8362d89"}, + {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win32.whl", hash = "sha256:afa2b630797f9ed2f27f3d9f55e3f72b4244911e45a8c82756f44babbf0b243e"}, + {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:0058c963c8957bcd3deda62122e89953c9de1e867a274facc9b15dde1a9f31e8"}, + {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win32.whl", hash = "sha256:196f7c7ccad4ba4d1783b1c4e1d1b22d93c04275cd780bf7498d16c77319ad6e"}, + {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:4e5f97bcbabdd79cbaa969b63439b89801ea560f11d42b0a387634275c633623"}, + {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win32.whl", hash = "sha256:6081bd7c2f4fcf9b383dcdb3b3385d75a26a7c9d2be25b6950c3d8ea652d2d2d"}, + {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:76f7e67cec1f1d097d1f4ed7de41be3d74546e1a4ede0c7d56e775c4dce5dfb0"}, + {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:3d306fc4d88a6bcf61ce9d572c738b918578121bfd72891625fab314549024b5"}, + {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:391ac4d6002a80e15f35adc4ad6056f4fe1c17ceb0d1f98ba01b0f4f917adfd7"}, + {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:0c1b5cf37f0b2d96cc3c9bc86fff16613b9f5d0ca565c96cf1f1fb8cfca4b81c"}, + {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3c5dc11d72519e55f727320e3cee535feedfaee09c0f0765ed1ca7badff1ab3c"}, + {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win32.whl", hash = "sha256:cc86c2ef598c15d824a243c2541c29459881c67fc3c0adb6efe2242f8f0ec3af"}, + {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c27985e949b9b1a1fb4c20c71d315c10ea0f93fdf3ccdd4a8c158b5926edd8c8"}, + {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win32.whl", hash = "sha256:e311dfcb470696e3c077249615893cada598e62fa7c4e4ca090167bd2b7d331f"}, + {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4574eef1f3bb81501fb02f911298af3c02fe8179c31a33b361dd49180c3e644d"}, + {file = "twisted_iocpsupport-1.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:872747a3b64e2909aee59c803ccd0bceb9b75bf27915520ebd32d69687040fa2"}, + {file = "twisted_iocpsupport-1.0.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2712b778bacf1db434e3e065adfed3db300754186a29aecac1efae9ef4bcaff"}, + {file = "twisted_iocpsupport-1.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c66fa0aa4236b27b3c61cb488662d85dae746a6d1c7b0d91cf7aae118445adf"}, + {file = "twisted_iocpsupport-1.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:300437af17396a945a58dcfffd77863303a8b6d9e65c6e81f1d2eed55b50d444"}, ] [[package]] name = "txaio" -version = "22.2.1" +version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "txaio-22.2.1-py2.py3-none-any.whl", hash = "sha256:41223af4a9d5726e645a8ee82480f413e5e300dd257db94bc38ae12ea48fb2e5"}, - {file = "txaio-22.2.1.tar.gz", hash = "sha256:2e4582b70f04b2345908254684a984206c0d9b50e3074a24a4c55aba21d24d01"}, + {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, + {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, ] [package.extras] @@ -2208,13 +2225,13 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.25.4" +version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, - {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, + {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, + {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, ] [[package]] @@ -2230,29 +2247,29 @@ files = [ [[package]] name = "urllib3" -version = "1.26.13" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, ] [[package]] @@ -2275,85 +2292,101 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [[package]] name = "yarl" -version = "1.8.2" +version = "1.9.4" description = "Yet another URL library" optional = false python-versions = ">=3.7" files = [ - {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, - {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, - {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, - {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, - {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, - {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, - {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, - {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, - {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, - {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, - {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, - {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, - {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, - {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, - {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, - {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, - {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, - {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, - {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, - {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, - {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, - {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, - {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, - {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, - {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, - {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, - {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, - {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, ] [package.dependencies] @@ -2362,13 +2395,13 @@ multidict = ">=4.0" [[package]] name = "zope-event" -version = "4.5.0" +version = "5.0" description = "Very basic event publishing system" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "zope.event-4.5.0-py2.py3-none-any.whl", hash = "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42"}, - {file = "zope.event-4.5.0.tar.gz", hash = "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330"}, + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, ] [package.dependencies] @@ -2380,66 +2413,66 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "5.5.2" +version = "6.1" description = "Interfaces for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "zope.interface-5.5.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a2ad597c8c9e038a5912ac3cf166f82926feff2f6e0dabdab956768de0a258f5"}, - {file = "zope.interface-5.5.2-cp27-cp27m-win_amd64.whl", hash = "sha256:65c3c06afee96c654e590e046c4a24559e65b0a87dbff256cd4bd6f77e1a33f9"}, - {file = "zope.interface-5.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d514c269d1f9f5cd05ddfed15298d6c418129f3f064765295659798349c43e6f"}, - {file = "zope.interface-5.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5334e2ef60d3d9439c08baedaf8b84dc9bb9522d0dacbc10572ef5609ef8db6d"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc26c8d44472e035d59d6f1177eb712888447f5799743da9c398b0339ed90b1b"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:17ebf6e0b1d07ed009738016abf0d0a0f80388e009d0ac6e0ead26fc162b3b9c"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f98d4bd7bbb15ca701d19b93263cc5edfd480c3475d163f137385f49e5b3a3a7"}, - {file = "zope.interface-5.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:696f3d5493eae7359887da55c2afa05acc3db5fc625c49529e84bd9992313296"}, - {file = "zope.interface-5.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7579960be23d1fddecb53898035a0d112ac858c3554018ce615cefc03024e46d"}, - {file = "zope.interface-5.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:765d703096ca47aa5d93044bf701b00bbce4d903a95b41fff7c3796e747b1f1d"}, - {file = "zope.interface-5.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e945de62917acbf853ab968d8916290548df18dd62c739d862f359ecd25842a6"}, - {file = "zope.interface-5.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:655796a906fa3ca67273011c9805c1e1baa047781fca80feeb710328cdbed87f"}, - {file = "zope.interface-5.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:0fb497c6b088818e3395e302e426850f8236d8d9f4ef5b2836feae812a8f699c"}, - {file = "zope.interface-5.5.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:008b0b65c05993bb08912f644d140530e775cf1c62a072bf9340c2249e613c32"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:404d1e284eda9e233c90128697c71acffd55e183d70628aa0bbb0e7a3084ed8b"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3218ab1a7748327e08ef83cca63eea7cf20ea7e2ebcb2522072896e5e2fceedf"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d169ccd0756c15bbb2f1acc012f5aab279dffc334d733ca0d9362c5beaebe88e"}, - {file = "zope.interface-5.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e1574980b48c8c74f83578d1e77e701f8439a5d93f36a5a0af31337467c08fcf"}, - {file = "zope.interface-5.5.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0217a9615531c83aeedb12e126611b1b1a3175013bbafe57c702ce40000eb9a0"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:311196634bb9333aa06f00fc94f59d3a9fddd2305c2c425d86e406ddc6f2260d"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6373d7eb813a143cb7795d3e42bd8ed857c82a90571567e681e1b3841a390d16"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:959697ef2757406bff71467a09d940ca364e724c534efbf3786e86eee8591452"}, - {file = "zope.interface-5.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dbaeb9cf0ea0b3bc4b36fae54a016933d64c6d52a94810a63c00f440ecb37dd7"}, - {file = "zope.interface-5.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604cdba8f1983d0ab78edc29aa71c8df0ada06fb147cea436dc37093a0100a4e"}, - {file = "zope.interface-5.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e74a578172525c20d7223eac5f8ad187f10940dac06e40113d62f14f3adb1e8f"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0980d44b8aded808bec5059018d64692f0127f10510eca71f2f0ace8fb11188"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6e972493cdfe4ad0411fd9abfab7d4d800a7317a93928217f1a5de2bb0f0d87a"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9d783213fab61832dbb10d385a319cb0e45451088abd45f95b5bb88ed0acca1a"}, - {file = "zope.interface-5.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:a16025df73d24795a0bde05504911d306307c24a64187752685ff6ea23897cb0"}, - {file = "zope.interface-5.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40f4065745e2c2fa0dff0e7ccd7c166a8ac9748974f960cd39f63d2c19f9231f"}, - {file = "zope.interface-5.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a2ffadefd0e7206adc86e492ccc60395f7edb5680adedf17a7ee4205c530df4"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d692374b578360d36568dd05efb8a5a67ab6d1878c29c582e37ddba80e66c396"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4087e253bd3bbbc3e615ecd0b6dd03c4e6a1e46d152d3be6d2ad08fbad742dcc"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb68d212efd057596dee9e6582daded9f8ef776538afdf5feceb3059df2d2e7b"}, - {file = "zope.interface-5.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:7e66f60b0067a10dd289b29dceabd3d0e6d68be1504fc9d0bc209cf07f56d189"}, - {file = "zope.interface-5.5.2.tar.gz", hash = "sha256:bfee1f3ff62143819499e348f5b8a7f3aa0259f9aca5e0ddae7391d059dce671"}, + {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, + {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, + {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, + {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, + {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, + {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"}, + {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"}, + {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"}, + {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, + {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface"] +docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [[package]] name = "zope-schema" -version = "6.2.1" +version = "7.0.1" description = "zope.interface extension for defining data schemas" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "zope.schema-6.2.1-py2.py3-none-any.whl", hash = "sha256:843c6fce13886333f707246f135a832f4408555ca9650f07ed6e4d429302f349"}, - {file = "zope.schema-6.2.1.tar.gz", hash = "sha256:e3b33c8bc8ba9d85a56713ab8f0a3c2615d54a8085f1e415ce0c8dfb5e540be6"}, + {file = "zope.schema-7.0.1-py3-none-any.whl", hash = "sha256:cf006c678793b00e0075ad54d55281c8785ea21e5bc1f5ec0584787719c2aab2"}, + {file = "zope.schema-7.0.1.tar.gz", hash = "sha256:ead4dbcb03354d4e410c9a3b904451eb44d90254751b1cbdedf4a61aede9fbb9"}, ] [package.dependencies] @@ -2457,4 +2490,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "608fb10f02f72ce1ad8e3d20b85ee32c725a867aadd5a5e27ea4a2faf3a06848" +content-hash = "5763f80ceef15f118b402852436bf43cdf70f1a7f95d2a5b59e40b8c3c1e24db" diff --git a/pyproject.toml b/pyproject.toml index 56bee4d19..c6e87ce42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ repository = "https://github.com/HathorNetwork/hathor-core/" classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "License :: OSI Approved :: Apache Software License", "Private :: Do Not Upload", @@ -62,15 +63,15 @@ ipython = {version = "~8.7.0", extras = ["kernel"]} mnemonic = "~0.20" prometheus_client = "~0.15.0" pyopenssl = "=22.1.0" -pycoin = "~0.92" -pywin32 = {version = "305", markers = "sys_platform == 'win32'"} +pycoin = "~0.92.20230326" +pywin32 = {version = "306", markers = "sys_platform == 'win32'"} requests = "=2.28.1" service_identity = "~21.1.0" pexpect = "~4.8.0" intervaltree = "~3.1.0" structlog = "~22.3.0" rocksdb = {git = "https://github.com/hathornetwork/python-rocksdb.git", markers = "sys_platform != 'win32'"} -aiohttp = "~3.8.3" +aiohttp = "~3.9.0" idna = "~3.4" setproctitle = "^1.2.2" sentry-sdk = {version = "^1.5.11", optional = true} From 1b2b955365b788ca9107f10440a691787f7f0614 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 21 Dec 2023 20:35:07 -0300 Subject: [PATCH 08/38] feat(feature-activation): decrease mainnet evaluation interval to 1 week --- hathor/conf/testnet.py | 1 + hathor/conf/testnet.yml | 1 + hathor/feature_activation/settings.py | 6 +++--- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 792d95ce5..91aa854c1 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -55,6 +55,7 @@ cp(1_600_000, bytes.fromhex('00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2')), ], FEATURE_ACTIVATION=FeatureActivationSettings( + evaluation_interval=40_320, enable_usage=True, default_threshold=30240, features={ diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index ca799c299..b8b58c06f 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -37,6 +37,7 @@ CHECKPOINTS: 1_600_000: 00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2 FEATURE_ACTIVATION: + evaluation_interval: 40_320 enable_usage: true default_threshold: 30_240 # 30240 = 75% of evaluation_interval (40320) features: diff --git a/hathor/feature_activation/settings.py b/hathor/feature_activation/settings.py index da4849ef6..d6b713068 100644 --- a/hathor/feature_activation/settings.py +++ b/hathor/feature_activation/settings.py @@ -26,15 +26,15 @@ class Settings(BaseModel, validate_all=True): """Feature Activation settings.""" # The number of blocks in the feature activation evaluation interval. - # Equivalent to 14 days (40320 * 30 seconds = 14 days) - evaluation_interval: PositiveInt = 40320 + # Equivalent to 1 week (20160 * 30 seconds = 1 week) + evaluation_interval: PositiveInt = 20_160 # The number of bits used in the first byte of a block's version field. The 4 left-most bits are not used. max_signal_bits: int = Field(ge=1, le=8, default=4) # Specifies the default minimum number of blocks per evaluation interval required to activate a feature. # Usually calculated from a percentage of evaluation_interval. - default_threshold: NonNegativeInt = 36288 # 36288 = 90% of evaluation_interval (40320) + default_threshold: NonNegativeInt = 18_144 # 18144 = 90% of evaluation_interval (20160) # Dictionary of Feature enum to Criteria definition for all features that participate in the feature activation # process for a network, past or future, activated or not. Features should NOT be removed from this list, and From 9cf7d7955293216a61128392963a9a7d8b6ba441 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Tue, 16 Jan 2024 01:41:50 +0100 Subject: [PATCH 09/38] tests: fix CI regression on Windows + Python 3.10 --- tests/utils_modules/test_yaml.py | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/tests/utils_modules/test_yaml.py b/tests/utils_modules/test_yaml.py index 7ef925b1d..be7a88e39 100644 --- a/tests/utils_modules/test_yaml.py +++ b/tests/utils_modules/test_yaml.py @@ -12,12 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys +import unittest from pathlib import Path import pytest +from structlog import get_logger from hathor.utils.yaml import dict_from_extended_yaml, dict_from_yaml +logger = get_logger() + def _get_absolute_filepath(filepath: str) -> Path: parent_dir = Path(__file__).parent @@ -101,13 +106,26 @@ def test_dict_from_extended_yaml_invalid_extends(): assert "unknown_file.yml' is not a file" in str(e.value) -def test_dict_from_extended_yaml_recursive_extends(): - filepath = _get_absolute_filepath('fixtures/self_extends.yml') +class TestWithChangesToSys(unittest.TestCase): + recursion_limit = 100 - with pytest.raises(ValueError) as e: - dict_from_extended_yaml(filepath=filepath) + def setUp(self): + self.log = logger.new() + self._old_recursion_limit = sys.getrecursionlimit() + self.log.debug('temporarily reduce recursion limit', old=self._old_recursion_limit, new=self.recursion_limit) + sys.setrecursionlimit(self.recursion_limit) + + def tearDown(self): + self.log.debug('revert recursion limit', to=self._old_recursion_limit) + sys.setrecursionlimit(self._old_recursion_limit) + + def test_dict_from_extended_yaml_recursive_extends(self): + filepath = _get_absolute_filepath('fixtures/self_extends.yml') + + with pytest.raises(ValueError) as e: + dict_from_extended_yaml(filepath=filepath) - assert str(e.value) == 'Cannot parse yaml with recursive extensions.' + assert str(e.value) == 'Cannot parse yaml with recursive extensions.' def test_dict_from_extended_yaml_valid_extends(): From 7e8ec09b5377b7d7503a48ae8ef5e08dbbbe0e3b Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 16 Jan 2024 19:39:05 -0300 Subject: [PATCH 10/38] refactor(settings): rename get_settings to get_global_settings (#919) --- hathor/builder/builder.py | 4 ++-- hathor/builder/cli_builder.py | 4 ++-- hathor/builder/resources_builder.py | 4 ++-- hathor/cli/db_export.py | 8 ++++---- hathor/cli/events_simulator/scenario.py | 8 ++++---- hathor/cli/mining.py | 4 ++-- hathor/cli/nginx_config.py | 4 ++-- hathor/cli/run_node.py | 16 ++++++++-------- hathor/conf/get_settings.py | 2 +- hathor/consensus/block_consensus.py | 4 ++-- hathor/consensus/consensus.py | 4 ++-- hathor/consensus/transaction_consensus.py | 4 ++-- hathor/crypto/util.py | 8 ++++---- hathor/graphviz.py | 4 ++-- hathor/indexes/base_index.py | 4 ++-- hathor/indexes/rocksdb_tokens_index.py | 4 ++-- hathor/indexes/rocksdb_utils.py | 6 +++--- hathor/indexes/utxo_index.py | 6 +++--- hathor/p2p/peer_id.py | 4 ++-- hathor/p2p/protocol.py | 4 ++-- hathor/p2p/resources/mining_info.py | 4 ++-- hathor/p2p/resources/status.py | 4 ++-- hathor/p2p/states/hello.py | 6 +++--- hathor/p2p/states/ready.py | 4 ++-- hathor/p2p/sync_v1/agent.py | 4 ++-- hathor/p2p/sync_v1/downloader.py | 6 +++--- hathor/p2p/sync_v2/agent.py | 4 ++-- hathor/p2p/utils.py | 6 +++--- hathor/prometheus.py | 4 ++-- hathor/simulator/miner/geometric_miner.py | 4 ++-- hathor/simulator/simulator.py | 4 ++-- hathor/simulator/tx_generator.py | 4 ++-- hathor/stratum/stratum.py | 4 ++-- hathor/transaction/base_transaction.py | 4 ++-- hathor/transaction/resources/dashboard.py | 4 ++-- hathor/transaction/resources/mempool.py | 4 ++-- hathor/transaction/resources/push_tx.py | 4 ++-- hathor/transaction/resources/transaction.py | 6 +++--- hathor/transaction/resources/utxo_search.py | 4 ++-- hathor/transaction/scripts/construct.py | 8 ++++---- hathor/transaction/scripts/opcode.py | 4 ++-- .../migrations/remove_first_nop_features.py | 4 ++-- .../transaction/storage/transaction_storage.py | 4 ++-- hathor/transaction/transaction_metadata.py | 4 ++-- hathor/transaction/util.py | 6 +++--- hathor/util.py | 6 +++--- hathor/version_resource.py | 4 ++-- hathor/wallet/resources/balance.py | 4 ++-- .../resources/thin_wallet/address_balance.py | 4 ++-- .../resources/thin_wallet/address_history.py | 4 ++-- .../resources/thin_wallet/address_search.py | 4 ++-- .../wallet/resources/thin_wallet/send_tokens.py | 4 ++-- .../resources/thin_wallet/token_history.py | 4 ++-- hathor/wallet/resources/thin_wallet/tokens.py | 4 ++-- hathor/wallet/util.py | 6 +++--- .../test_feature_simulation.py | 8 ++++---- tests/tx/test_block.py | 6 +++--- tests/unittest.py | 4 ++-- 58 files changed, 141 insertions(+), 141 deletions(-) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index d8ee85522..815ed352f 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -18,7 +18,7 @@ from structlog import get_logger from hathor.checkpoint import Checkpoint -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings as HathorSettingsType from hathor.consensus import ConsensusAlgorithm from hathor.daa import DifficultyAdjustmentAlgorithm @@ -285,7 +285,7 @@ def set_peer_id(self, peer_id: PeerId) -> 'Builder': def _get_or_create_settings(self) -> HathorSettingsType: """Return the HathorSettings instance set on this builder, or a new one if not set.""" if self._settings is None: - self._settings = get_settings() + self._settings = get_global_settings() return self._settings def _get_reactor(self) -> Reactor: diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 368846ce7..9d0cc2da1 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -61,7 +61,7 @@ def check_or_raise(self, condition: bool, message: str) -> None: def create_manager(self, reactor: Reactor) -> HathorManager: import hathor - from hathor.conf.get_settings import get_settings, get_settings_source + from hathor.conf.get_settings import get_global_settings, get_settings_source from hathor.daa import TestMode from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket.factory import EventWebsocketFactory @@ -79,7 +79,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: ) from hathor.util import get_environment_info - settings = get_settings() + settings = get_global_settings() # only used for logging its location settings_source = get_settings_source() diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index ce92ccaa1..0e89448ab 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -77,7 +77,7 @@ def create_prometheus(self) -> PrometheusMetricsExporter: return prometheus def create_resources(self) -> server.Site: - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings from hathor.debug_resources import ( DebugCrashResource, DebugLogResource, @@ -142,7 +142,7 @@ def create_resources(self) -> server.Site: ) from hathor.websocket import HathorAdminWebsocketFactory, WebsocketStatsResource - settings = get_settings() + settings = get_global_settings() cpu = get_cpu_profiler() # TODO get this from a file. How should we do with the factory? diff --git a/hathor/cli/db_export.py b/hathor/cli/db_export.py index 1a13afd9e..62187e021 100644 --- a/hathor/cli/db_export.py +++ b/hathor/cli/db_export.py @@ -34,8 +34,8 @@ def register_signal_handlers(self) -> None: @classmethod def create_parser(cls) -> ArgumentParser: - from hathor.conf.get_settings import get_settings - settings = get_settings() + from hathor.conf.get_settings import get_global_settings + settings = get_global_settings() def max_height(arg: str) -> Optional[int]: if arg.lower() == 'checkpoint': @@ -80,8 +80,8 @@ def prepare(self, *, register_resources: bool = True) -> None: self.skip_voided = self._args.export_skip_voided def iter_tx(self) -> Iterator['BaseTransaction']: - from hathor.conf.get_settings import get_settings - settings = get_settings() + from hathor.conf.get_settings import get_global_settings + settings = get_global_settings() soft_voided_ids = set(settings.SOFT_VOIDED_TX_IDS) for tx in self._iter_tx: diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 7c08a72bc..25723697a 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -52,10 +52,10 @@ def simulate_single_chain_one_block(simulator: 'Simulator', manager: 'HathorMana def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manager: 'HathorManager') -> None: - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_blocks, gen_new_tx - settings = get_settings() + settings = get_global_settings() assert manager.wallet is not None address = manager.wallet.get_unused_address(mark_as_used=False) @@ -97,11 +97,11 @@ def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManager') -> None: - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx from hathor.util import not_none - settings = get_settings() + settings = get_global_settings() assert manager.wallet is not None address = manager.wallet.get_unused_address(mark_as_used=False) diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 1fbd8a927..35a131640 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -135,10 +135,10 @@ def execute(args: Namespace) -> None: block.nonce, block.weight)) try: - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.verification.verification_service import VerificationService, VertexVerifiers - settings = get_settings() + settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa) verification_service = VerificationService(verifiers=verifiers) diff --git a/hathor/cli/nginx_config.py b/hathor/cli/nginx_config.py index 974d0f74c..5c6f2a874 100644 --- a/hathor/cli/nginx_config.py +++ b/hathor/cli/nginx_config.py @@ -114,9 +114,9 @@ def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: """ from datetime import datetime - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings - settings = get_settings() + settings = get_global_settings() api_prefix = settings.API_VERSION_PREFIX locations: dict[str, dict[str, Any]] = {} diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 30bab7fb2..7b5ac15ed 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -166,8 +166,8 @@ def prepare(self, *, register_resources: bool = True) -> None: assert self.manager.stratum_factory is not None self.reactor.listenTCP(self._args.stratum, self.manager.stratum_factory) - from hathor.conf.get_settings import get_settings - settings = get_settings() + from hathor.conf.get_settings import get_global_settings + settings = get_global_settings() if register_resources: resources_builder = ResourcesBuilder( @@ -212,8 +212,8 @@ def start_sentry_if_possible(self) -> None: sys.exit(-3) import hathor - from hathor.conf.get_settings import get_settings - settings = get_settings() + from hathor.conf.get_settings import get_global_settings + settings = get_global_settings() sentry_sdk.init( dsn=self._args.sentry_dsn, release=hathor.__version__, @@ -274,8 +274,8 @@ def check_unsafe_arguments(self) -> None: '', ] - from hathor.conf.get_settings import get_settings - settings = get_settings() + from hathor.conf.get_settings import get_global_settings + settings = get_global_settings() if self._args.unsafe_mode != settings.NETWORK_NAME: message.extend([ @@ -355,7 +355,7 @@ def check_python_version(self) -> None: def __init__(self, *, argv=None): from hathor.cli.run_node_args import RunNodeArgs from hathor.conf import TESTNET_SETTINGS_FILEPATH - from hathor.conf.get_settings import get_settings + from hathor.conf.get_settings import get_global_settings self.log = logger.new() if argv is None: @@ -373,7 +373,7 @@ def __init__(self, *, argv=None): os.environ['HATHOR_CONFIG_YAML'] = TESTNET_SETTINGS_FILEPATH try: - get_settings() + get_global_settings() except (TypeError, ValidationError) as e: from hathor.exception import PreInitializationError raise PreInitializationError( diff --git a/hathor/conf/get_settings.py b/hathor/conf/get_settings.py index c330ca795..6bdbd88b6 100644 --- a/hathor/conf/get_settings.py +++ b/hathor/conf/get_settings.py @@ -33,7 +33,7 @@ class _SettingsMetadata(NamedTuple): _settings_singleton: Optional[_SettingsMetadata] = None -def get_settings() -> Settings: +def get_global_settings() -> Settings: return HathorSettings() diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 9c8c0d83a..515b96a07 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -17,7 +17,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights from hathor.util import classproperty, not_none @@ -35,7 +35,7 @@ class BlockConsensusAlgorithm: """Implement the consensus algorithm for blocks.""" def __init__(self, context: 'ConsensusAlgorithmContext') -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.context = context @classproperty diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index e0a1ad5b5..34167d973 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -14,7 +14,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.consensus.block_consensus import BlockConsensusAlgorithmFactory from hathor.consensus.context import ConsensusAlgorithmContext from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithmFactory @@ -56,7 +56,7 @@ class ConsensusAlgorithm: """ def __init__(self, soft_voided_tx_ids: set[bytes], pubsub: PubSubManager) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() self._pubsub = pubsub self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 0747c1753..1cb250679 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -16,7 +16,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights from hathor.util import classproperty @@ -34,7 +34,7 @@ class TransactionConsensusAlgorithm: """Implement the consensus algorithm for transactions.""" def __init__(self, context: 'ConsensusAlgorithmContext') -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.context = context @classproperty diff --git a/hathor/crypto/util.py b/hathor/crypto/util.py index 9bf4bad89..dbb36783b 100644 --- a/hathor/crypto/util.py +++ b/hathor/crypto/util.py @@ -27,7 +27,7 @@ load_der_private_key, ) -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import not_none _BACKEND = default_backend() @@ -129,7 +129,7 @@ def get_address_from_public_key_hash(public_key_hash: bytes, version_byte: Optio :return: address in bytes :rtype: bytes """ - settings = get_settings() + settings = get_global_settings() address = b'' actual_version_byte: bytes = version_byte if version_byte is not None else settings.P2PKH_VERSION_BYTE # Version byte @@ -208,7 +208,7 @@ def get_address_b58_from_redeem_script_hash(redeem_script_hash: bytes, version_b :return: address in base 58 :rtype: string """ - settings = get_settings() + settings = get_global_settings() actual_version_byte: bytes = version_byte if version_byte is not None else settings.MULTISIG_VERSION_BYTE address = get_address_from_redeem_script_hash(redeem_script_hash, actual_version_byte) return base58.b58encode(address).decode('utf-8') @@ -226,7 +226,7 @@ def get_address_from_redeem_script_hash(redeem_script_hash: bytes, version_byte: :return: address in bytes :rtype: bytes """ - settings = get_settings() + settings = get_global_settings() actual_version_byte: bytes = version_byte if version_byte is not None else settings.MULTISIG_VERSION_BYTE address = b'' # Version byte diff --git a/hathor/graphviz.py b/hathor/graphviz.py index 3e06bed8b..c75074576 100644 --- a/hathor/graphviz.py +++ b/hathor/graphviz.py @@ -18,7 +18,7 @@ from graphviz import Digraph -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction import BaseTransaction from hathor.transaction.storage import TransactionStorage @@ -26,7 +26,7 @@ class GraphvizVisualizer: def __init__(self, storage: TransactionStorage, include_funds: bool = False, include_verifications: bool = False, only_blocks: bool = False): - self._settings = get_settings() + self._settings = get_global_settings() self.storage = storage # Indicate whether it should show fund edges diff --git a/hathor/indexes/base_index.py b/hathor/indexes/base_index.py index 22a782313..bc9195009 100644 --- a/hathor/indexes/base_index.py +++ b/hathor/indexes/base_index.py @@ -17,7 +17,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes.scope import Scope from hathor.transaction.base_transaction import BaseTransaction @@ -34,7 +34,7 @@ class BaseIndex(ABC): created to generalize how we initialize indexes and keep track of which ones are up-to-date. """ def __init__(self) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() def init_start(self, indexes_manager: 'IndexesManager') -> None: diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 575b44f37..b978d9c38 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -18,7 +18,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes.rocksdb_utils import ( InternalUid, RocksDBIndexUtils, @@ -85,7 +85,7 @@ class RocksDBTokensIndex(TokensIndex, RocksDBIndexUtils): """ def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_TOKENS_INDEX) diff --git a/hathor/indexes/rocksdb_utils.py b/hathor/indexes/rocksdb_utils.py index 8ce19ba39..431bfc2f6 100644 --- a/hathor/indexes/rocksdb_utils.py +++ b/hathor/indexes/rocksdb_utils.py @@ -15,7 +15,7 @@ from collections.abc import Collection from typing import TYPE_CHECKING, Iterable, Iterator, NewType -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings if TYPE_CHECKING: # pragma: no cover import rocksdb @@ -30,7 +30,7 @@ def to_internal_token_uid(token_uid: bytes) -> InternalUid: """Normalizes a token_uid so that the native token (\x00) will have the same length as custom tokens.""" - settings = get_settings() + settings = get_global_settings() if token_uid == settings.HATHOR_TOKEN_UID: return _INTERNAL_HATHOR_TOKEN_UID assert len(token_uid) == 32 @@ -40,7 +40,7 @@ def to_internal_token_uid(token_uid: bytes) -> InternalUid: def from_internal_token_uid(token_uid: InternalUid) -> bytes: """De-normalizes the token_uid so that the native token is b'\x00' as expected""" assert len(token_uid) == 32 - settings = get_settings() + settings = get_global_settings() if token_uid == _INTERNAL_HATHOR_TOKEN_UID: return settings.HATHOR_TOKEN_UID return token_uid diff --git a/hathor/indexes/utxo_index.py b/hathor/indexes/utxo_index.py index 5b1cf34ee..5ccbf07e4 100644 --- a/hathor/indexes/utxo_index.py +++ b/hathor/indexes/utxo_index.py @@ -18,7 +18,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Block, TxOutput @@ -60,7 +60,7 @@ def __repr__(self): @classmethod def from_tx_output(cls, tx: BaseTransaction, index: int, tx_output: TxOutput) -> 'UtxoIndexItem': assert tx.hash is not None - settings = get_settings() + settings = get_global_settings() if tx_output.is_token_authority(): raise ValueError('UtxoIndexItem cannot be used with a token authority output') @@ -206,7 +206,7 @@ def iter_utxos(self, *, address: str, target_amount: int, token_uid: Optional[by target_height: Optional[int] = None) -> Iterator[UtxoIndexItem]: """ Search UTXOs for a given token_uid+address+target_value, if no token_uid is given, HTR is assumed. """ - settings = get_settings() + settings = get_global_settings() actual_token_uid = token_uid if token_uid is not None else settings.HATHOR_TOKEN_UID iter_nolock = self._iter_utxos_nolock(token_uid=actual_token_uid, address=address, target_amount=target_amount) diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer_id.py index f3122c34f..678111f1c 100644 --- a/hathor/p2p/peer_id.py +++ b/hathor/p2p/peer_id.py @@ -27,7 +27,7 @@ from twisted.internet.interfaces import ISSLTransport from twisted.internet.ssl import Certificate, CertificateOptions, TLSVersion, trustRootFromCertificates -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.p2p.utils import connection_string_to_host, discover_dns, generate_certificate from hathor.util import not_none @@ -66,7 +66,7 @@ class PeerId: flags: set[str] def __init__(self, auto_generate_keys: bool = True) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.id = None self.private_key = None self.public_key = None diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index 696ba3c07..e4cff0d5a 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -23,7 +23,7 @@ from twisted.protocols.basic import LineReceiver from twisted.python.failure import Failure -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId from hathor.p2p.rate_limiter import RateLimiter @@ -93,7 +93,7 @@ class WarningFlags(str, Enum): def __init__(self, network: str, my_peer: PeerId, p2p_manager: 'ConnectionsManager', *, use_ssl: bool, inbound: bool) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.network = network self.my_peer = my_peer self.connections = p2p_manager diff --git a/hathor/p2p/resources/mining_info.py b/hathor/p2p/resources/mining_info.py index 8263ee273..180e3876e 100644 --- a/hathor/p2p/resources/mining_info.py +++ b/hathor/p2p/resources/mining_info.py @@ -16,7 +16,7 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.difficulty import Weight from hathor.util import json_dumpb @@ -30,7 +30,7 @@ class MiningInfoResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request): diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index 09be830ca..544484f46 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -15,7 +15,7 @@ import hathor from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.p2p.utils import to_serializable_best_blockchain from hathor.util import json_dumpb @@ -30,7 +30,7 @@ class StatusResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager self.reactor = manager.reactor diff --git a/hathor/p2p/states/hello.py b/hathor/p2p/states/hello.py index 56f514dd7..9472f140c 100644 --- a/hathor/p2p/states/hello.py +++ b/hathor/p2p/states/hello.py @@ -17,7 +17,7 @@ from structlog import get_logger import hathor -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.exception import HathorError from hathor.p2p.messages import ProtocolMessages from hathor.p2p.states.base import BaseState @@ -34,7 +34,7 @@ class HelloState(BaseState): def __init__(self, protocol: 'HathorProtocol') -> None: super().__init__(protocol) - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new(**protocol.get_logger_context()) self.cmd_map.update({ ProtocolMessages.HELLO: self.handle_hello, @@ -172,7 +172,7 @@ def handle_hello(self, payload: str) -> None: def _parse_sync_versions(hello_data: dict[str, Any]) -> set[SyncVersion]: """Versions that are not recognized will not be included.""" - settings = get_settings() + settings = get_global_settings() if settings.CAPABILITY_SYNC_VERSION in hello_data['capabilities']: if 'sync_versions' not in hello_data: raise HathorError('protocol error, expected sync_versions field') diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index 3f945ff02..f500aadf1 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -18,7 +18,7 @@ from structlog import get_logger from twisted.internet.task import LoopingCall -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId @@ -37,7 +37,7 @@ class ReadyState(BaseState): def __init__(self, protocol: 'HathorProtocol') -> None: super().__init__(protocol) - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new(**self.protocol.get_logger_context()) diff --git a/hathor/p2p/sync_v1/agent.py b/hathor/p2p/sync_v1/agent.py index cb300907c..110514a83 100644 --- a/hathor/p2p/sync_v1/agent.py +++ b/hathor/p2p/sync_v1/agent.py @@ -22,7 +22,7 @@ from twisted.internet.defer import Deferred, inlineCallbacks from twisted.internet.interfaces import IDelayedCall -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.p2p.messages import GetNextPayload, GetTipsPayload, NextPayload, ProtocolMessages, TipsPayload from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_v1.downloader import Downloader @@ -68,7 +68,7 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: :param reactor: Reactor to schedule later calls. (default=twisted.internet.reactor) :type reactor: Reactor """ - self._settings = get_settings() + self._settings = get_global_settings() self.protocol = protocol self.manager = protocol.node self.downloader = downloader diff --git a/hathor/p2p/sync_v1/downloader.py b/hathor/p2p/sync_v1/downloader.py index c28863abb..670b1133a 100644 --- a/hathor/p2p/sync_v1/downloader.py +++ b/hathor/p2p/sync_v1/downloader.py @@ -20,7 +20,7 @@ from twisted.internet import defer from twisted.internet.defer import Deferred -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction.storage.exceptions import TransactionDoesNotExist if TYPE_CHECKING: @@ -51,7 +51,7 @@ class TxDetails: requested_index: int def __init__(self, tx_id: bytes, deferred: Deferred, connections: list['NodeSyncTimestamp']): - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() self.tx_id = tx_id self.deferred = deferred @@ -145,7 +145,7 @@ class Downloader: window_size: int def __init__(self, manager: 'HathorManager', window_size: int = 100): - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() self.manager = manager diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index c93ee57eb..7fb763498 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -24,7 +24,7 @@ from twisted.internet.defer import Deferred, inlineCallbacks from twisted.internet.task import LoopingCall, deferLater -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.p2p.messages import ProtocolMessages from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_v2.blockchain_streaming_client import BlockchainStreamingClient, StreamingError @@ -91,7 +91,7 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Reactor) -> None: :param reactor: Reactor to schedule later calls. (default=twisted.internet.reactor) :type reactor: Reactor """ - self._settings = get_settings() + self._settings = get_global_settings() self.protocol = protocol self.manager = protocol.node self.tx_storage: 'TransactionStorage' = protocol.node.tx_storage diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 12509ffc4..66f1bda37 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -27,7 +27,7 @@ from cryptography.x509.oid import NameOID from twisted.internet.interfaces import IAddress -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes.height_index import HeightInfo from hathor.p2p.peer_discovery import DNSPeerDiscovery from hathor.transaction.genesis import get_representation_for_all_genesis @@ -74,14 +74,14 @@ def description_to_connection_string(description: str) -> tuple[str, Optional[st def get_genesis_short_hash() -> str: """ Return the first 7 chars of the GENESIS_HASH used for validation that the genesis are the same """ - settings = get_settings() + settings = get_global_settings() return get_representation_for_all_genesis(settings).hex()[:7] def get_settings_hello_dict() -> dict[str, Any]: """ Return a dict of settings values that must be validated in the hello state """ - settings = get_settings() + settings = get_global_settings() settings_dict = {} for key in settings.P2P_SETTINGS_HASH_FIELDS: value = getattr(settings, key) diff --git a/hathor/prometheus.py b/hathor/prometheus.py index 63c1a7727..ac5ed653f 100644 --- a/hathor/prometheus.py +++ b/hathor/prometheus.py @@ -18,7 +18,7 @@ from prometheus_client import CollectorRegistry, Gauge, write_to_textfile from twisted.internet.task import LoopingCall -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.reactor import get_global_reactor if TYPE_CHECKING: @@ -77,7 +77,7 @@ def __init__(self, metrics: 'Metrics', path: str, filename: str = 'hathor.prom', :param filename: Name of the prometheus file (must end in .prom) :type filename: str """ - self._settings = get_settings() + self._settings = get_global_settings() self.metrics = metrics self.metrics_prefix = metrics_prefix diff --git a/hathor/simulator/miner/geometric_miner.py b/hathor/simulator/miner/geometric_miner.py index 2dc8209d6..a7828e015 100644 --- a/hathor/simulator/miner/geometric_miner.py +++ b/hathor/simulator/miner/geometric_miner.py @@ -15,7 +15,7 @@ import math from typing import TYPE_CHECKING, Optional -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.exception import BlockTemplateTimestampError from hathor.manager import HathorEvents from hathor.simulator.miner.abstract_miner import AbstractMiner @@ -45,7 +45,7 @@ def __init__( blocks than values provided, 0 is used. """ super().__init__(manager, rng) - self._settings = get_settings() + self._settings = get_global_settings() self._hashpower = hashpower self._signal_bits = signal_bits or [] diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index b6c546a3f..6155df3b8 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -21,7 +21,7 @@ from structlog import get_logger from hathor.builder import BuildArtifacts, Builder -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.feature_activation.feature_service import FeatureService @@ -54,7 +54,7 @@ def __init__(self, seed: Optional[int] = None): seed = secrets.randbits(64) self.seed = seed self.rng = Random(self.seed) - self.settings = get_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) + self.settings = get_global_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) self._network = 'testnet' self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index 347721d5b..8c977c870 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -17,7 +17,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import NoCandidatesError, gen_new_double_spending, gen_new_tx from hathor.transaction.exceptions import RewardLocked from hathor.util import Random @@ -43,7 +43,7 @@ def __init__(self, manager: 'HathorManager', rng: Random, *, :param: rate: Number of transactions per second :param: hashpower: Number of hashes per second """ - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager # List of addresses to send tokens. If this list is empty, tokens will be sent to an address diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 5d4085569..2b9dd8322 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -33,7 +33,7 @@ from twisted.protocols.basic import LineReceiver from twisted.python.failure import Failure -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.exception import InvalidNewTransaction from hathor.p2p.utils import format_address @@ -365,7 +365,7 @@ class StratumProtocol(JSONRPC): def __init__(self, factory: 'StratumFactory', manager: 'HathorManager', address: IAddress, id_generator: Optional[Callable[[], Iterator[Union[str, int]]]] = lambda: count()): - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new(address=address) self.factory = factory self.manager = manager diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index ae65a5000..532538969 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -27,7 +27,7 @@ from structlog import get_logger from hathor.checkpoint import Checkpoint -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction.exceptions import InvalidOutputValue, WeightError from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len @@ -158,7 +158,7 @@ def __init__(self, assert signal_bits <= _ONE_BYTE, f'signal_bits {hex(signal_bits)} must not be larger than one byte' assert version <= _ONE_BYTE, f'version {hex(version)} must not be larger than one byte' - self._settings = get_settings() + self._settings = get_global_settings() self.nonce = nonce self.timestamp = timestamp or int(time.time()) self.signal_bits = signal_bits diff --git a/hathor/transaction/resources/dashboard.py b/hathor/transaction/resources/dashboard.py index c181e210a..47f366a8e 100644 --- a/hathor/transaction/resources/dashboard.py +++ b/hathor/transaction/resources/dashboard.py @@ -14,7 +14,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, parse_args, parse_int, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import json_dumpb ARGS = ['block', 'tx'] @@ -31,7 +31,7 @@ class DashboardTransactionResource(Resource): def __init__(self, manager): # Important to have the manager so we can know the tx_storage - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request): diff --git a/hathor/transaction/resources/mempool.py b/hathor/transaction/resources/mempool.py index 08340b074..90ce29010 100644 --- a/hathor/transaction/resources/mempool.py +++ b/hathor/transaction/resources/mempool.py @@ -18,7 +18,7 @@ from hathor.api_util import Resource, get_args, parse_args, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction import Transaction from hathor.util import json_dumpb @@ -44,7 +44,7 @@ class MempoolResource(Resource): def __init__(self, manager: 'HathorManager'): # Important to have the manager so we can know the tx_storage - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request: 'Request') -> bytes: diff --git a/hathor/transaction/resources/push_tx.py b/hathor/transaction/resources/push_tx.py index 8db827edf..c550231f2 100644 --- a/hathor/transaction/resources/push_tx.py +++ b/hathor/transaction/resources/push_tx.py @@ -21,7 +21,7 @@ from hathor.api_util import Resource, get_args, parse_args, render_options, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.exception import InvalidNewTransaction from hathor.transaction import Transaction from hathor.transaction.base_transaction import tx_or_block_from_bytes @@ -46,7 +46,7 @@ class PushTxResource(Resource): def __init__(self, manager: 'HathorManager', max_output_script_size: Optional[int] = None, allow_non_standard_script: bool = False) -> None: - self._settings = get_settings() + self._settings = get_global_settings() self.log = logger.new() # Important to have the manager so we can know the tx_storage self.manager = manager diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 1646e06e5..8b755ae02 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -24,7 +24,7 @@ validate_tx_hash, ) from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction.base_transaction import BaseTransaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.util import json_dumpb @@ -49,7 +49,7 @@ def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dic assert tx.storage is not None assert tx.storage.indexes is not None - settings = get_settings() + settings = get_global_settings() serialized = tx.to_json(decode_script=True) serialized['raw'] = tx.get_struct().hex() serialized['nonce'] = str(tx.nonce) @@ -205,7 +205,7 @@ def get_list_tx(self, request): 'timestamp': int, the timestamp reference we are in the pagination 'page': 'previous' or 'next', to indicate if the user wants after or before the hash reference """ - settings = get_settings() + settings = get_global_settings() raw_args = get_args(request) parsed = parse_args(raw_args, GET_LIST_ARGS) if not parsed['success']: diff --git a/hathor/transaction/resources/utxo_search.py b/hathor/transaction/resources/utxo_search.py index 93f67e561..a7c46382a 100644 --- a/hathor/transaction/resources/utxo_search.py +++ b/hathor/transaction/resources/utxo_search.py @@ -24,7 +24,7 @@ set_cors, ) from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.util import json_dumpb from hathor.wallet.exceptions import InvalidAddress @@ -45,7 +45,7 @@ class UtxoSearchResource(Resource): def __init__(self, manager: 'HathorManager'): # Important to have the manager so we can know the tx_storage - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request: 'Request') -> bytes: diff --git a/hathor/transaction/scripts/construct.py b/hathor/transaction/scripts/construct.py index 8508b5270..94eee27b7 100644 --- a/hathor/transaction/scripts/construct.py +++ b/hathor/transaction/scripts/construct.py @@ -15,7 +15,7 @@ import re from typing import TYPE_CHECKING, Any, Generator, NamedTuple, Optional, Pattern, Union -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.transaction.exceptions import ScriptError from hathor.transaction.scripts.base_script import BaseScript @@ -84,7 +84,7 @@ def create_base_script(address: str, timelock: Optional[Any] = None) -> BaseScri """ Verifies if address is P2PKH or Multisig and return the corresponding BaseScript implementation. """ from hathor.transaction.scripts.execute import binary_to_int - settings = get_settings() + settings = get_global_settings() baddress = decode_address(address) if baddress[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): from hathor.transaction.scripts import P2PKH @@ -110,7 +110,7 @@ def create_output_script(address: bytes, timelock: Optional[Any] = None) -> byte :rtype: bytes """ from hathor.transaction.scripts.execute import binary_to_int - settings = get_settings() + settings = get_global_settings() # XXX: if the address class can somehow be simplified create_base_script could be used here if address[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): from hathor.transaction.scripts import P2PKH @@ -193,7 +193,7 @@ def count_sigops(data: bytes) -> int: """ from hathor.transaction.scripts import Opcode from hathor.transaction.scripts.execute import decode_opn, get_script_op - settings = get_settings() + settings = get_global_settings() n_ops: int = 0 data_len: int = len(data) pos: int = 0 diff --git a/hathor/transaction/scripts/opcode.py b/hathor/transaction/scripts/opcode.py index 3c185f5a5..460c66821 100644 --- a/hathor/transaction/scripts/opcode.py +++ b/hathor/transaction/scripts/opcode.py @@ -20,7 +20,7 @@ from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import ( get_address_b58_from_bytes, get_hash160, @@ -523,7 +523,7 @@ def op_checkmultisig(context: ScriptContext) -> None: :raises MissingStackItems: if stack is empty or it has less signatures than the minimum required :raises VerifyFailed: verification failed """ - settings = get_settings() + settings = get_global_settings() if not len(context.stack): raise MissingStackItems('OP_CHECKMULTISIG: empty stack') diff --git a/hathor/transaction/storage/migrations/remove_first_nop_features.py b/hathor/transaction/storage/migrations/remove_first_nop_features.py index c245e8d22..555bcf741 100644 --- a/hathor/transaction/storage/migrations/remove_first_nop_features.py +++ b/hathor/transaction/storage/migrations/remove_first_nop_features.py @@ -16,7 +16,7 @@ from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.transaction.storage.migrations import BaseMigration from hathor.util import progress @@ -37,7 +37,7 @@ def run(self, storage: 'TransactionStorage') -> None: """ This migration clears the Feature Activation metadata related to the first Phased Testing on testnet. """ - settings = get_settings() + settings = get_global_settings() log = logger.new() if settings.NETWORK_NAME != 'testnet-golf': diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index a4358c6c4..8b4d5a195 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -23,7 +23,7 @@ from intervaltree.interval import Interval from structlog import get_logger -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.indexes import IndexesManager from hathor.indexes.height_index import HeightInfo from hathor.profiler import get_cpu_profiler @@ -94,7 +94,7 @@ class TransactionStorage(ABC): _migrations: list[BaseMigration] def __init__(self) -> None: - self._settings = get_settings() + self._settings = get_global_settings() # Weakref is used to guarantee that there is only one instance of each transaction in memory. self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary() self._tx_weakref_disabled: bool = False diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index c223d928f..17ed326a1 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -15,7 +15,7 @@ from collections import defaultdict from typing import TYPE_CHECKING, Any, Optional -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.transaction.validation_state import ValidationState @@ -129,7 +129,7 @@ def __init__( self.feature_activation_bit_counts = feature_activation_bit_counts - settings = get_settings() + settings = get_global_settings() # Genesis specific: if hash is not None and is_genesis(hash, settings=settings): diff --git a/hathor/transaction/util.py b/hathor/transaction/util.py index d476daeda..a7970359a 100644 --- a/hathor/transaction/util.py +++ b/hathor/transaction/util.py @@ -17,7 +17,7 @@ from math import ceil, floor from typing import Any, Callable, Optional -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings VerboseCallback = Optional[Callable[[str, Any], None]] @@ -49,12 +49,12 @@ def unpack_len(n: int, buf: bytes) -> tuple[bytes, bytes]: def get_deposit_amount(mint_amount: int) -> int: - settings = get_settings() + settings = get_global_settings() return ceil(abs(settings.TOKEN_DEPOSIT_PERCENTAGE * mint_amount)) def get_withdraw_amount(melt_amount: int) -> int: - settings = get_settings() + settings = get_global_settings() return floor(abs(settings.TOKEN_DEPOSIT_PERCENTAGE * melt_amount)) diff --git a/hathor/util.py b/hathor/util.py index bd674f128..1f409d0f1 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -29,7 +29,7 @@ from structlog import get_logger import hathor -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.types import TokenUid if TYPE_CHECKING: @@ -760,7 +760,7 @@ def is_token_uid_valid(token_uid: TokenUid) -> bool: >>> is_token_uid_valid(bytes.fromhex('000003a3b261e142d3dfd84970d3a50a93b5bc3a66a3b6ba973956148a3eb82400')) False """ - settings = get_settings() + settings = get_global_settings() if token_uid == settings.HATHOR_TOKEN_UID: return True elif len(token_uid) == 32: @@ -784,7 +784,7 @@ def as_dict(self): def get_environment_info(args: str, peer_id: Optional[str]) -> EnvironmentInfo: - settings = get_settings() + settings = get_global_settings() environment_info = EnvironmentInfo( python_implementation=str(sys.implementation), hathor_core_args=args, diff --git a/hathor/version_resource.py b/hathor/version_resource.py index fa7f96ccb..ec64c3114 100644 --- a/hathor/version_resource.py +++ b/hathor/version_resource.py @@ -15,7 +15,7 @@ import hathor from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import json_dumpb @@ -29,7 +29,7 @@ class VersionResource(Resource): def __init__(self, manager): # Important to have the manager so we can have access to min_tx_weight_coefficient - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request): diff --git a/hathor/wallet/resources/balance.py b/hathor/wallet/resources/balance.py index 43122f5dd..b91adafd1 100644 --- a/hathor/wallet/resources/balance.py +++ b/hathor/wallet/resources/balance.py @@ -14,7 +14,7 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import json_dumpb @@ -28,7 +28,7 @@ class BalanceResource(Resource): def __init__(self, manager): # Important to have the manager so we can know the tx_storage - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request): diff --git a/hathor/wallet/resources/thin_wallet/address_balance.py b/hathor/wallet/resources/thin_wallet/address_balance.py index a80ec6355..6e12568be 100644 --- a/hathor/wallet/resources/thin_wallet/address_balance.py +++ b/hathor/wallet/resources/thin_wallet/address_balance.py @@ -19,7 +19,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.transaction.scripts import parse_address_script from hathor.util import json_dumpb @@ -53,7 +53,7 @@ class AddressBalanceResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def has_address(self, output: 'TxOutput', requested_address: str) -> bool: diff --git a/hathor/wallet/resources/thin_wallet/address_history.py b/hathor/wallet/resources/thin_wallet/address_history.py index ef4dc323b..8e0ffc0ca 100644 --- a/hathor/wallet/resources/thin_wallet/address_history.py +++ b/hathor/wallet/resources/thin_wallet/address_history.py @@ -19,7 +19,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.util import json_dumpb, json_loadb from hathor.wallet.exceptions import InvalidAddress @@ -34,7 +34,7 @@ class AddressHistoryResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager # TODO add openapi docs for this API diff --git a/hathor/wallet/resources/thin_wallet/address_search.py b/hathor/wallet/resources/thin_wallet/address_search.py index ef63bc50f..124fb4732 100644 --- a/hathor/wallet/resources/thin_wallet/address_search.py +++ b/hathor/wallet/resources/thin_wallet/address_search.py @@ -18,7 +18,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, parse_int, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.transaction.scripts import parse_address_script from hathor.util import json_dumpb @@ -37,7 +37,7 @@ class AddressSearchResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def has_token_and_address(self, tx: 'BaseTransaction', address: str, token: bytes) -> bool: diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index c9bbf10ce..6cd6badaf 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -25,7 +25,7 @@ from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.exception import InvalidNewTransaction from hathor.reactor import get_global_reactor from hathor.transaction import Transaction @@ -56,7 +56,7 @@ class SendTokensResource(Resource): def __init__(self, manager): # Important to have the manager so we can know the tx_storage - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager self.sleep_seconds = 0 self.log = logger.new() diff --git a/hathor/wallet/resources/thin_wallet/token_history.py b/hathor/wallet/resources/thin_wallet/token_history.py index 698aa94cd..a68cf4077 100644 --- a/hathor/wallet/resources/thin_wallet/token_history.py +++ b/hathor/wallet/resources/thin_wallet/token_history.py @@ -16,7 +16,7 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, parse_args, parse_int, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import json_dumpb ARGS = ['id', 'count'] @@ -31,7 +31,7 @@ class TokenHistoryResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def render_GET(self, request: Request) -> bytes: diff --git a/hathor/wallet/resources/thin_wallet/tokens.py b/hathor/wallet/resources/thin_wallet/tokens.py index 3190aaa5b..fcd29d476 100644 --- a/hathor/wallet/resources/thin_wallet/tokens.py +++ b/hathor/wallet/resources/thin_wallet/tokens.py @@ -18,7 +18,7 @@ from hathor.api_util import Resource, get_args, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.util import is_token_uid_valid, json_dumpb @@ -31,7 +31,7 @@ class TokenResource(Resource): isLeaf = True def __init__(self, manager): - self._settings = get_settings() + self._settings = get_global_settings() self.manager = manager def get_one_token_data(self, token_uid: bytes) -> dict[str, Any]: diff --git a/hathor/wallet/util.py b/hathor/wallet/util.py index 111f07ac0..b8b1aa9b4 100644 --- a/hathor/wallet/util.py +++ b/hathor/wallet/util.py @@ -19,7 +19,7 @@ from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import get_hash160, get_private_key_from_bytes from hathor.transaction.scripts import HathorScript, Opcode from hathor.transaction.transaction import Transaction @@ -39,7 +39,7 @@ def generate_multisig_redeem_script(signatures_required: int, public_key_bytes: :return: The redeem script for the multisig wallet :rtype: bytes """ - settings = get_settings() + settings = get_global_settings() if signatures_required > settings.MAX_MULTISIG_SIGNATURES: raise ValueError('Signatures required {} is over the limit'.format(signatures_required)) if len(public_key_bytes) > settings.MAX_MULTISIG_PUBKEYS: @@ -71,7 +71,7 @@ def generate_multisig_address(redeem_script: bytes, version_byte: Optional[bytes :return: The multisig address :rtype: str(base58) """ - settings = get_settings() + settings = get_global_settings() actual_version_byte: bytes = version_byte if version_byte is not None else settings.MULTISIG_VERSION_BYTE address = bytearray() diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 2e7e1f307..1a6665a1e 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -18,7 +18,7 @@ import pytest from hathor.builder import Builder -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.feature_activation import feature_service as feature_service_module from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService @@ -75,7 +75,7 @@ def test_feature(self) -> None: } ) - settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) builder = self.get_simulator_builder().set_settings(settings) artifacts = self.simulator.create_artifacts(builder) feature_service = artifacts.feature_service @@ -351,7 +351,7 @@ def test_reorg(self) -> None: } ) - settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) builder = self.get_simulator_builder().set_settings(settings) artifacts = self.simulator.create_artifacts(builder) feature_service = artifacts.feature_service @@ -566,7 +566,7 @@ def test_feature_from_existing_storage(self) -> None: } ) - settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + settings = get_global_settings()._replace(FEATURE_ACTIVATION=feature_settings) rocksdb_dir = self.get_rocksdb_directory() builder1 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) artifacts1 = self.simulator.create_artifacts(builder1) diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index a363cfb78..735351215 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -16,7 +16,7 @@ import pytest -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService @@ -27,7 +27,7 @@ def test_calculate_feature_activation_bit_counts_genesis(): - settings = get_settings() + settings = get_global_settings() storage = TransactionMemoryStorage() genesis_block = storage.get_transaction(settings.GENESIS_BLOCK_HASH) assert isinstance(genesis_block, Block) @@ -38,7 +38,7 @@ def test_calculate_feature_activation_bit_counts_genesis(): @pytest.fixture def block_mocks() -> list[Block]: - settings = get_settings() + settings = get_global_settings() blocks: list[Block] = [] feature_activation_bits = [ 0b0000, # 0: boundary block diff --git a/tests/unittest.py b/tests/unittest.py index 852f27bd8..019437e26 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -11,7 +11,7 @@ from hathor.builder import BuildArtifacts, Builder from hathor.conf import HathorSettings -from hathor.conf.get_settings import get_settings +from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_version import SyncVersion @@ -114,7 +114,7 @@ def setUp(self): self.log.info('set seed', seed=self.seed) self.rng = Random(self.seed) self._pending_cleanups = [] - self._settings = get_settings() + self._settings = get_global_settings() def tearDown(self): self.clean_tmpdirs() From eaf76bd1fe67a72e377f3788a0dd1529eb197327 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Wed, 17 Jan 2024 13:04:57 -0300 Subject: [PATCH 11/38] refactor(settings): remove some calls to HathorSettings (#920) --- hathor/conf/settings.py | 4 - hathor/event/resources/event.py | 6 +- hathor/transaction/resources/graphviz.py | 9 +-- tests/resources/wallet/test_search_address.py | 11 +-- tests/resources/wallet/test_thin_wallet.py | 24 +++--- tests/tx/test_blockchain.py | 53 +++++++------- tests/tx/test_indexes.py | 73 +++++++++---------- tests/tx/test_indexes2.py | 5 +- tests/tx/test_mining.py | 11 +-- tests/tx/test_multisig.py | 9 +-- tests/tx/test_reward_lock.py | 17 ++--- tests/tx/test_timelock.py | 17 ++--- tests/tx/test_tokens.py | 15 ++-- tests/tx/test_tx_storage.py | 16 ++-- tests/wallet/test_balance_update.py | 41 +++++------ tests/wallet/test_wallet.py | 29 ++++---- tests/wallet/test_wallet_hd.py | 13 ++-- tests/websocket/test_websocket.py | 5 +- 18 files changed, 158 insertions(+), 200 deletions(-) diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index be31f0dd5..09a90dd2d 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -395,10 +395,6 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: # Identifier used in metadata's voided_by to mark a tx as partially validated. PARTIALLY_VALIDATED_ID: bytes = b'pending-validation' - EVENT_API_DEFAULT_BATCH_SIZE: int = 100 - - EVENT_API_MAX_BATCH_SIZE: int = 1000 - # Maximum number of sync running simultaneously. MAX_ENABLED_SYNC: int = 16 diff --git a/hathor/event/resources/event.py b/hathor/event/resources/event.py index b3fd588ab..febc2bb62 100644 --- a/hathor/event/resources/event.py +++ b/hathor/event/resources/event.py @@ -19,12 +19,12 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.conf import HathorSettings from hathor.event import EventManager from hathor.event.model.base_event import BaseEvent from hathor.utils.api import ErrorResponse, QueryParams, Response -settings = HathorSettings() +EVENT_API_DEFAULT_BATCH_SIZE: int = 100 +EVENT_API_MAX_BATCH_SIZE: int = 1000 @register_resource @@ -66,7 +66,7 @@ def render_GET(self, request): class GetEventsParams(QueryParams): last_ack_event_id: Optional[NonNegativeInt] - size: int = Field(default=settings.EVENT_API_DEFAULT_BATCH_SIZE, ge=0, le=settings.EVENT_API_MAX_BATCH_SIZE) + size: int = Field(default=EVENT_API_DEFAULT_BATCH_SIZE, ge=0, le=EVENT_API_MAX_BATCH_SIZE) class GetEventsResponse(Response): diff --git a/hathor/transaction/resources/graphviz.py b/hathor/transaction/resources/graphviz.py index ee922608f..34ca47974 100644 --- a/hathor/transaction/resources/graphviz.py +++ b/hathor/transaction/resources/graphviz.py @@ -20,15 +20,13 @@ from hathor.api_util import Resource, get_args, parse_int, set_cors, validate_tx_hash from hathor.cli.openapi_files.register import register_resource -from hathor.conf import HathorSettings +from hathor.conf.get_settings import get_global_settings from hathor.graphviz import GraphvizVisualizer from hathor.util import json_dumpb if TYPE_CHECKING: from hathor.manager import HathorManager # noqa: F401 -settings = HathorSettings() - class FileFormat(Enum): PDF = 'pdf' @@ -59,6 +57,7 @@ def __init__(self, manager: 'HathorManager', *, format: Union[FileFormat, str]): # Important to have the manager so we can know the tx_storage self.manager = manager self.format: FileFormat = FileFormat(format) + self._settings = get_global_settings() def render_GET(self, request): deferred = threads.deferToThread(self._render_GET_thread, request) @@ -238,7 +237,7 @@ def _render_GET_thread(self, request: Request) -> bytes: return json_dumpb({'success': False, 'message': message}) graph_type = args[b'graph_type'][0].decode('utf-8') - max_level = parse_int(args[b'max_level'][0], cap=settings.MAX_GRAPH_LEVEL) + max_level = parse_int(args[b'max_level'][0], cap=self._settings.MAX_GRAPH_LEVEL) tx = tx_storage.get_transaction(bytes.fromhex(tx_hex)) graphviz = GraphvizVisualizer(tx_storage) @@ -319,7 +318,7 @@ def _render_GET_thread(self, request: Request) -> bytes: 'name': 'max_level', 'in': 'query', 'description': ('How many levels the neighbor can appear in the graph.' - 'Max level is {}'.format(settings.MAX_GRAPH_LEVEL)), + 'Max level is 3'), 'required': True, 'schema': { 'type': 'int' diff --git a/tests/resources/wallet/test_search_address.py b/tests/resources/wallet/test_search_address.py index 01892526d..37d6908b3 100644 --- a/tests/resources/wallet/test_search_address.py +++ b/tests/resources/wallet/test_search_address.py @@ -1,6 +1,5 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction.scripts import parse_address_script @@ -9,8 +8,6 @@ from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, create_tokens -settings = HathorSettings() - class BaseSearchAddressTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -108,12 +105,12 @@ def test_address_balance(self): data = response.json_value() self.assertTrue(data['success']) # Genesis - token deposit + blocks mined - HTR_value = settings.GENESIS_TOKENS - 1 + (settings.INITIAL_TOKENS_PER_BLOCK * 5) + HTR_value = self._settings.GENESIS_TOKENS - 1 + (self._settings.INITIAL_TOKENS_PER_BLOCK * 5) self.assertEqual(data['total_transactions'], 6) # 5 blocks mined + token creation tx - self.assertIn(settings.HATHOR_TOKEN_UID.hex(), data['tokens_data']) + self.assertIn(self._settings.HATHOR_TOKEN_UID.hex(), data['tokens_data']) self.assertIn(self.token_uid.hex(), data['tokens_data']) - self.assertEqual(HTR_value, data['tokens_data'][settings.HATHOR_TOKEN_UID.hex()]['received']) - self.assertEqual(0, data['tokens_data'][settings.HATHOR_TOKEN_UID.hex()]['spent']) + self.assertEqual(HTR_value, data['tokens_data'][self._settings.HATHOR_TOKEN_UID.hex()]['received']) + self.assertEqual(0, data['tokens_data'][self._settings.HATHOR_TOKEN_UID.hex()]['spent']) self.assertEqual(100, data['tokens_data'][self.token_uid.hex()]['received']) self.assertEqual(0, data['tokens_data'][self.token_uid.hex()]['spent']) diff --git a/tests/resources/wallet/test_thin_wallet.py b/tests/resources/wallet/test_thin_wallet.py index f14a7733c..ed1710c7e 100644 --- a/tests/resources/wallet/test_thin_wallet.py +++ b/tests/resources/wallet/test_thin_wallet.py @@ -2,7 +2,6 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput @@ -17,8 +16,6 @@ from tests.resources.base_resource import StubSite, TestDummyRequest, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, add_new_tx, create_tokens -settings = HathorSettings() - class BaseSendTokensTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -44,7 +41,7 @@ def test_post(self): add_blocks_unlock_reward(self.manager) blocks_tokens = [sum(txout.value for txout in blk.outputs) for blk in blocks] - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID].available, sum(blocks_tokens)) + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID].available, sum(blocks_tokens)) # Options yield self.web.options('thin_wallet/send_tokens') @@ -116,7 +113,10 @@ def test_post(self): self.clock.advance(5) # Check if tokens were really sent - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID].available, sum(blocks_tokens[:-1])) + self.assertEqual( + self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID].available, + sum(blocks_tokens[:-1]) + ) response_history = yield self.web_address_history.get( 'thin_wallet/address_history', { @@ -151,7 +151,7 @@ def test_post(self): # # # Making pow threads full # deferreds = [] -# for x in range(settings.MAX_POW_THREADS): +# for x in range(self._settings.MAX_POW_THREADS): # d = self.web.post('thin_wallet/send_tokens', {'tx_hex': get_new_tx_struct(50)}) # d.addErrback(lambda err: None) # deferreds.append(d) @@ -208,7 +208,7 @@ def test_history_paginate(self): new_blocks = add_new_blocks( self.manager, - settings.MAX_TX_ADDRESSES_HISTORY, + self._settings.MAX_TX_ADDRESSES_HISTORY, advance_clock=1, address=address_bytes ) @@ -222,16 +222,16 @@ def test_history_paginate(self): ) response_data = response_history.json_value() - self.assertEqual(len(response_data['history']), settings.MAX_TX_ADDRESSES_HISTORY) + self.assertEqual(len(response_data['history']), self._settings.MAX_TX_ADDRESSES_HISTORY) self.assertTrue(response_data['has_more']) self.assertEqual(response_data['first_address'], address) # Test paginate with big txs - tx_count = math.ceil(settings.MAX_INPUTS_OUTPUTS_ADDRESS_HISTORY / settings.MAX_NUM_INPUTS) + tx_count = math.ceil(self._settings.MAX_INPUTS_OUTPUTS_ADDRESS_HISTORY / self._settings.MAX_NUM_INPUTS) blocks.extend(new_blocks) new_blocks = add_new_blocks( self.manager, - tx_count*settings.MAX_NUM_INPUTS - len(blocks), + tx_count*self._settings.MAX_NUM_INPUTS - len(blocks), advance_clock=1, address=address_bytes ) @@ -240,8 +240,8 @@ def test_history_paginate(self): add_blocks_unlock_reward(self.manager) for i in range(tx_count): - start_index = i*settings.MAX_NUM_INPUTS - end_index = start_index + settings.MAX_NUM_INPUTS + start_index = i*self._settings.MAX_NUM_INPUTS + end_index = start_index + self._settings.MAX_NUM_INPUTS amount = sum([b.outputs[0].value for b in blocks[start_index:end_index]]) add_new_tx(self.manager, random_address, amount, advance_clock=1) diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index 02ec4cd07..2d03794ff 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -1,6 +1,5 @@ from itertools import chain -from hathor.conf import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.simulator.utils import add_new_blocks from hathor.transaction import sum_weights @@ -8,8 +7,6 @@ from tests import unittest from tests.utils import add_new_transactions -settings = HathorSettings() - class BaseBlockchainTestCase(unittest.TestCase): __test__ = False @@ -31,7 +28,7 @@ def setUp(self): self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] - self.daa = DifficultyAdjustmentAlgorithm(settings=settings) + self.daa = DifficultyAdjustmentAlgorithm(settings=self._settings) def test_single_chain(self): """ All new blocks belong to case (i). @@ -358,20 +355,20 @@ def test_block_height(self): def test_tokens_issued_per_block(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) # this test is pretty dumb in that it test every possible height until halving has long stopped - initial_reward = settings.INITIAL_TOKENS_PER_BLOCK - final_reward = settings.MINIMUM_TOKENS_PER_BLOCK + initial_reward = self._settings.INITIAL_TOKENS_PER_BLOCK + final_reward = self._settings.MINIMUM_TOKENS_PER_BLOCK expected_reward = initial_reward height = 1 # check that there are BLOCKS_PER_HALVING with each reward, starting at the first rewardable block (height=1) - for _i_halving in range(0, settings.MAXIMUM_NUMBER_OF_HALVINGS): - for _i_block in range(0, settings.BLOCKS_PER_HALVING): + for _i_halving in range(0, self._settings.MAXIMUM_NUMBER_OF_HALVINGS): + for _i_block in range(0, self._settings.BLOCKS_PER_HALVING): reward = manager.get_tokens_issued_per_block(height) self.assertEqual(reward, expected_reward, f'reward at height {height}') height += 1 expected_reward /= 2 self.assertEqual(expected_reward, final_reward) # check that halving stops, for at least two "halving rounds" - for _i_block in range(0, 2 * settings.BLOCKS_PER_HALVING): + for _i_block in range(0, 2 * self._settings.BLOCKS_PER_HALVING): reward = manager.get_tokens_issued_per_block(height) self.assertEqual(reward, expected_reward, f'reward at height {height}') height += 1 @@ -380,7 +377,7 @@ def test_block_rewards(self): # even dumber test that only check if manager.get_tokens_issued_per_block was used correctly for a really large # number of blocks, probably not worth running all the time manager = self.create_peer('testnet', tx_storage=self.tx_storage) - block_count = (settings.MAXIMUM_NUMBER_OF_HALVINGS + 1) * settings.BLOCKS_PER_HALVING + block_count = (self._settings.MAXIMUM_NUMBER_OF_HALVINGS + 1) * self._settings.BLOCKS_PER_HALVING blocks = add_new_blocks(manager, block_count, advance_clock=block_count * 30) for block in blocks: outputs = block.outputs @@ -393,8 +390,8 @@ def test_daa_sanity(self): # sanity test the DAA manager = self.create_peer('testnet', tx_storage=self.tx_storage) manager.daa.TEST_MODE = TestMode.DISABLED - N = settings.BLOCK_DIFFICULTY_N_BLOCKS - T = settings.AVG_TIME_BETWEEN_BLOCKS + N = self._settings.BLOCK_DIFFICULTY_N_BLOCKS + T = self._settings.AVG_TIME_BETWEEN_BLOCKS manager.avg_time_between_blocks = T # stabilize weight on 2 and lower the minimum to 1, so it can vary around 2 manager.min_block_weight = 2 @@ -419,17 +416,17 @@ def test_daa_sanity(self): def test_daa_weight_decay_amount(self): self.daa.TEST_MODE = TestMode.DISABLED - amount = settings.WEIGHT_DECAY_AMOUNT + amount = self._settings.WEIGHT_DECAY_AMOUNT - for distance in range(0, settings.WEIGHT_DECAY_ACTIVATE_DISTANCE, 10): + for distance in range(0, self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE, 10): self.assertEqual(self.daa.get_weight_decay_amount(distance), 0) - distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - 1 + distance = self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - 1 self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 0) - distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE + distance = self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE for k in range(1, 11): - for _ in range(settings.WEIGHT_DECAY_WINDOW_SIZE): + for _ in range(self._settings.WEIGHT_DECAY_WINDOW_SIZE): self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), k * amount) distance += 1 self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 11 * amount) @@ -437,34 +434,38 @@ def test_daa_weight_decay_amount(self): def test_daa_weight_decay_blocks(self): manager = self.create_peer('testnet', tx_storage=self.tx_storage) manager.daa.TEST_MODE = TestMode.DISABLED - amount = settings.WEIGHT_DECAY_AMOUNT + amount = self._settings.WEIGHT_DECAY_AMOUNT - manager.daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS - manager.daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT - add_new_blocks(manager, 2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) + manager.daa.AVG_TIME_BETWEEN_BLOCKS = self._settings.AVG_TIME_BETWEEN_BLOCKS + manager.daa.MIN_BLOCK_WEIGHT = 2 + 2 * self._settings.WEIGHT_DECAY_AMOUNT + add_new_blocks( + manager, + 2 * self._settings.BLOCK_DIFFICULTY_N_BLOCKS, + advance_clock=self._settings.AVG_TIME_BETWEEN_BLOCKS + ) manager.daa.MIN_BLOCK_WEIGHT = 1 base_weight = manager.generate_mining_block().weight self.assertGreater(base_weight, manager.daa.MIN_BLOCK_WEIGHT) - add_new_blocks(manager, 20, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) + add_new_blocks(manager, 20, advance_clock=self._settings.AVG_TIME_BETWEEN_BLOCKS) - dt = settings.AVG_TIME_BETWEEN_BLOCKS # the latest call to add_new_blocks will advance the clock - while dt < settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: + dt = self._settings.AVG_TIME_BETWEEN_BLOCKS # the latest call to add_new_blocks will advance the clock + while dt < self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight) manager.reactor.advance(1) dt += 1 dt = 0 - while dt < settings.WEIGHT_DECAY_WINDOW_SIZE: + while dt < self._settings.WEIGHT_DECAY_WINDOW_SIZE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight - amount) manager.reactor.advance(1) dt += 1 dt = 0 - while dt < settings.WEIGHT_DECAY_WINDOW_SIZE: + while dt < self._settings.WEIGHT_DECAY_WINDOW_SIZE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight - 2*amount) manager.reactor.advance(1) diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index b26652a9a..b28a7cfc4 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -1,6 +1,5 @@ import pytest -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer from hathor.simulator.utils import add_new_block, add_new_blocks @@ -11,8 +10,6 @@ from tests import unittest from tests.utils import HAS_ROCKSDB, add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key -settings = HathorSettings() - class BaseIndexesTest(unittest.TestCase): __test__ = False @@ -140,7 +137,7 @@ def test_utxo_index_genesis(self): from hathor.indexes.utxo_index import UtxoIndexItem from tests.utils import GENESIS_ADDRESS_B58 - HTR_UID = settings.HATHOR_TOKEN_UID + HTR_UID = self._settings.HATHOR_TOKEN_UID assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo @@ -149,35 +146,35 @@ def test_utxo_index_genesis(self): expected_genesis_utxos = [ UtxoIndexItem( token_uid=HTR_UID, - tx_id=settings.GENESIS_BLOCK_HASH, + tx_id=self._settings.GENESIS_BLOCK_HASH, index=0, address=GENESIS_ADDRESS_B58, - amount=settings.GENESIS_TOKENS, + amount=self._settings.GENESIS_TOKENS, timelock=None, - heightlock=settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=self._settings.REWARD_SPEND_MIN_BLOCKS, ), ] # height just not enough should be empty self.assertEqual( - list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=settings.HATHOR_TOKEN_UID, - target_amount=settings.GENESIS_TOKEN_UNITS, - target_height=settings.REWARD_SPEND_MIN_BLOCKS - 1)), + list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=self._settings.HATHOR_TOKEN_UID, + target_amount=self._settings.GENESIS_TOKEN_UNITS, + target_height=self._settings.REWARD_SPEND_MIN_BLOCKS - 1)), [], ) # height is now enough self.assertEqual( - list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=settings.HATHOR_TOKEN_UID, - target_amount=settings.GENESIS_TOKEN_UNITS, - target_height=settings.REWARD_SPEND_MIN_BLOCKS)), + list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=self._settings.HATHOR_TOKEN_UID, + target_amount=self._settings.GENESIS_TOKEN_UNITS, + target_height=self._settings.REWARD_SPEND_MIN_BLOCKS)), expected_genesis_utxos, ) # otherwise we can leave out the height and it should give the utxos self.assertEqual( - list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=settings.HATHOR_TOKEN_UID, - target_amount=settings.GENESIS_TOKEN_UNITS)), + list(utxo_index.iter_utxos(address=GENESIS_ADDRESS_B58, token_uid=self._settings.HATHOR_TOKEN_UID, + target_amount=self._settings.GENESIS_TOKEN_UNITS)), expected_genesis_utxos, ) @@ -200,7 +197,7 @@ def check_utxos(*args): actual = list(utxo_index.iter_utxos(address=address, target_amount=9999999)) expected = [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx_id, index=index, address=address, @@ -304,13 +301,13 @@ def test_utxo_index_simple(self): list(utxo_index.iter_utxos(address=address, target_amount=1)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[:1] ] ) @@ -319,13 +316,13 @@ def test_utxo_index_simple(self): list(utxo_index.iter_utxos(address=address, target_amount=6500)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[4:1:-1] ] ) @@ -334,13 +331,13 @@ def test_utxo_index_simple(self): list(utxo_index.iter_utxos(address=address, target_amount=25600)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[::-1] ] ) @@ -349,13 +346,13 @@ def test_utxo_index_simple(self): list(utxo_index.iter_utxos(address=address, target_amount=30000)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks[::-1] ] ) @@ -394,7 +391,7 @@ def test_utxo_index_limits(self): print('check target_amount =', target_amount) expected = [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx.hash, index=1, address=address, @@ -416,11 +413,11 @@ def test_utxo_index_limits(self): self.assertEqual(actual, expected) # now check that at most 255 utxos will be returned when we check for a large enough amount - max_outputs = settings.MAX_NUM_OUTPUTS + max_outputs = self._settings.MAX_NUM_OUTPUTS actual = list(utxo_index.iter_utxos(address=address, target_amount=sum(range(301)))) expected = [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx.hash, index=1, address=address, @@ -461,13 +458,13 @@ def test_utxo_index_after_push_tx(self): list(utxo_index.iter_utxos(address=address, target_amount=1)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks ] ) @@ -499,7 +496,7 @@ def test_utxo_index_after_push_tx(self): list(utxo_index.iter_utxos(address=address1, target_amount=6400)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx1.hash, index=0, address=address1, @@ -533,13 +530,13 @@ def test_utxo_index_last(self): list(utxo_index.iter_utxos(address=address, target_amount=1)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=b.hash, index=0, address=address, amount=6400, timelock=None, - heightlock=b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + heightlock=b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, ) for b in blocks ] ) @@ -571,7 +568,7 @@ def test_utxo_index_last(self): list(utxo_index.iter_utxos(address=address1, target_amount=transfer_value)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx1.hash, index=1, address=address1, @@ -586,7 +583,7 @@ def test_utxo_index_last(self): list(utxo_index.iter_utxos(address=address, target_amount=change_value)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx1.hash, index=0, address=address, @@ -603,7 +600,7 @@ def test_utxo_index_last(self): list(utxo_index.iter_utxos(address=address1, target_amount=1)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx1.hash, index=1, address=address1, @@ -618,7 +615,7 @@ def test_utxo_index_last(self): list(utxo_index.iter_utxos(address=address, target_amount=1)), [ UtxoIndexItem( - token_uid=settings.HATHOR_TOKEN_UID, + token_uid=self._settings.HATHOR_TOKEN_UID, tx_id=tx1.hash, index=0, address=address, @@ -659,7 +656,7 @@ def test_addresses_index_last(self): self.assertEqual(addresses_indexes.get_sorted_from_address(address), []) # XXX: since we didn't add any multisig address, this is guaranteed to be reach the tail end of the index - assert settings.P2PKH_VERSION_BYTE[0] < settings.MULTISIG_VERSION_BYTE[0] + assert self._settings.P2PKH_VERSION_BYTE[0] < self._settings.MULTISIG_VERSION_BYTE[0] # generating a multisig address: address = generate_multisig_address(generate_multisig_redeem_script(2, [ @@ -676,7 +673,7 @@ def test_height_index(self): # make height 100 H = 100 - blocks = add_new_blocks(self.manager, H - settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=15) + blocks = add_new_blocks(self.manager, H - self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=15) height_index = self.manager.tx_storage.indexes.height self.assertEqual(height_index.get_height_tip(), HeightInfo(100, blocks[-1].hash)) self.assertEqual(height_index.get_n_height_tips(1), [HeightInfo(100, blocks[-1].hash)]) diff --git a/tests/tx/test_indexes2.py b/tests/tx/test_indexes2.py index edca6c0c7..b8df4d9eb 100644 --- a/tests/tx/test_indexes2.py +++ b/tests/tx/test_indexes2.py @@ -3,15 +3,12 @@ import pytest -from hathor.conf import HathorSettings from tests import unittest from tests.utils import HAS_ROCKSDB if TYPE_CHECKING: # pragma: no cover import rocksdb -settings = HathorSettings() - class FakeTransaction(NamedTuple): hash: bytes @@ -25,7 +22,7 @@ def setUp(self): # how many transactions will be generated on the same timestamp before increasing it by 1 self.transactions = [] repetitions = [1, 1, 10, 10, 10, 2, 1, 0, 0, 5, 5, 5, 0, 1, 1, 10, 10, 10, 1, 2, 3, 1, 100, 100, 1, 100, 0, 1] - ts = settings.GENESIS_BLOCK_TIMESTAMP + ts = self._settings.GENESIS_BLOCK_TIMESTAMP for rep in repetitions: for _ in range(rep): tx = FakeTransaction(self.rng.randbytes(32), ts) diff --git a/tests/tx/test_mining.py b/tests/tx/test_mining.py index 3c55bc527..a6731dbad 100644 --- a/tests/tx/test_mining.py +++ b/tests/tx/test_mining.py @@ -1,14 +1,11 @@ from typing import Any -from hathor.conf import HathorSettings from hathor.mining import BlockTemplate from hathor.simulator.utils import add_new_blocks from hathor.transaction import Block, sum_weights from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest -settings = HathorSettings() - class BaseMiningTest(unittest.TestCase): """ @@ -45,10 +42,10 @@ def test_block_template_after_genesis(self) -> None: self.assertEqual(block_templates[0], BlockTemplate( versions={0, 3}, - reward=settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, + reward=self._settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, weight=1.0, timestamp_now=int(manager.reactor.seconds()), - timestamp_min=settings.GENESIS_BLOCK_TIMESTAMP + 3, + timestamp_min=self._settings.GENESIS_BLOCK_TIMESTAMP + 3, timestamp_max=timestamp_max, # no limit for next block after genesis # parents=[tx.hash for tx in self.genesis_blocks + self.genesis_txs], parents=block_templates[0].parents, @@ -68,13 +65,13 @@ def test_regular_block_template(self) -> None: self.assertEqual(len(block_templates), 1) timestamp_max = min( - blocks[-1].timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1, + blocks[-1].timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1, int(manager.reactor.seconds()) + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED ) self.assertEqual(block_templates[0], BlockTemplate( versions={0, 3}, - reward=settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, + reward=self._settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, weight=1.0, timestamp_now=int(manager.reactor.seconds()), timestamp_min=blocks[-1].timestamp + 1, diff --git a/tests/tx/test_multisig.py b/tests/tx/test_multisig.py index e6a56105c..22748266e 100644 --- a/tests/tx/test_multisig.py +++ b/tests/tx/test_multisig.py @@ -1,6 +1,5 @@ import base58 -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address, get_private_key_from_bytes, get_public_key_bytes_compressed from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput @@ -11,8 +10,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseMultisigTestCase(unittest.TestCase): __test__ = False @@ -58,7 +55,7 @@ def test_spend_multisig(self): # Adding funds to the wallet blocks = add_new_blocks(self.manager, 2, advance_clock=15) add_blocks_unlock_reward(self.manager) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(blk.outputs[0].value for blk in blocks))) first_block_amount = blocks[0].outputs[0].value @@ -77,7 +74,7 @@ def test_spend_multisig(self): self.manager.propagate_tx(tx1) self.clock.advance(10) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, first_block_amount)) # Then we create a new tx that spends this tokens from multisig wallet @@ -126,7 +123,7 @@ def test_spend_multisig(self): # Now we propagate the correct self.assertTrue(self.manager.propagate_tx(tx)) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, first_block_amount + 300)) # Testing the MultiSig class methods diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index 14f709a69..c321b5beb 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -1,6 +1,5 @@ import pytest -from hathor.conf import HathorSettings from hathor.crypto.util import get_address_from_public_key from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput @@ -11,8 +10,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward, get_genesis_key -settings = HathorSettings() - class BaseTransactionTest(unittest.TestCase): __test__ = False @@ -41,7 +38,7 @@ def _add_reward_block(self): self.manager.cpu_mining_service.resolve(reward_block) self.assertTrue(self.manager.propagate_tx(reward_block)) # XXX: calculate unlock height AFTER adding the block so the height is correctly calculated - unlock_height = reward_block.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS + 1 + unlock_height = reward_block.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS + 1 return reward_block, unlock_height def _spend_reward_tx(self, manager, reward_block): @@ -70,7 +67,7 @@ def test_classic_reward_lock(self): reward_block, unlock_height = self._add_reward_block() # reward cannot be spent while not enough blocks are added - for _ in range(settings.REWARD_SPEND_MIN_BLOCKS): + for _ in range(self._settings.REWARD_SPEND_MIN_BLOCKS): tx = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.get_metadata().min_height, unlock_height) with self.assertRaises(RewardLocked): @@ -87,7 +84,7 @@ def test_block_with_not_enough_height(self): reward_block, unlock_height = self._add_reward_block() # add one less block than needed - add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) + add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) # add tx bypassing reward-lock verification # XXX: this situation is impossible in practice, but we force it to test that when a block tries to confirms a @@ -105,7 +102,7 @@ def test_block_with_enough_height(self): reward_block, unlock_height = self._add_reward_block() # add just enough blocks - add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) + add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward tx = self._spend_reward_tx(self.manager, reward_block) @@ -122,7 +119,7 @@ def test_mempool_tx_with_not_enough_height(self): reward_block, unlock_height = self._add_reward_block() # add one less block than needed - add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) + add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS - 1, advance_clock=1) # add tx to mempool, must fail reward-lock verification tx = self._spend_reward_tx(self.manager, reward_block) @@ -137,7 +134,7 @@ def test_mempool_tx_with_enough_height(self): reward_block, unlock_height = self._add_reward_block() # add just enough blocks - add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) + add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward, must not fail tx = self._spend_reward_tx(self.manager, reward_block) @@ -149,7 +146,7 @@ def test_mempool_tx_invalid_after_reorg(self): reward_block, unlock_height = self._add_reward_block() # add just enough blocks - blocks = add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) + blocks = add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # add tx that spends the reward, must not fail tx = self._spend_reward_tx(self.manager, reward_block) diff --git a/tests/tx/test_timelock.py b/tests/tx/test_timelock.py index 711d46cff..609248d07 100644 --- a/tests/tx/test_timelock.py +++ b/tests/tx/test_timelock.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction @@ -7,8 +6,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseTimelockTransactionTestCase(unittest.TestCase): __test__ = False @@ -45,7 +42,7 @@ def test_timelock(self): self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500)) self.clock.advance(1) @@ -64,7 +61,7 @@ def test_timelock(self): self.manager.cpu_mining_service.resolve(tx2) propagated = self.manager.propagate_tx(tx2) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500)) self.assertFalse(propagated) @@ -83,7 +80,7 @@ def test_timelock(self): tx3.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx3) propagated = self.manager.propagate_tx(tx3, False) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500 - 700)) self.assertTrue(propagated) self.clock.advance(1) @@ -103,7 +100,7 @@ def test_timelock(self): tx4.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx4) propagated = self.manager.propagate_tx(tx4, False) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens[:3]))) self.assertTrue(propagated) @@ -111,7 +108,7 @@ def test_timelock(self): tx2.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx2) propagated = self.manager.propagate_tx(tx2, False) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(blocks_tokens[:3]))) self.assertTrue(propagated) @@ -136,7 +133,7 @@ def test_choose_inputs(self): self.manager.propagate_tx(tx1) self.clock.advance(1) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(blocks_tokens[0], 0)) outputs = [WalletOutputInfo(address=decode_address(address), value=blocks_tokens[0], timelock=None)] @@ -153,7 +150,7 @@ def test_choose_inputs(self): self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, blocks_tokens[0])) diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index f4626e3f8..f84158e24 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -2,7 +2,6 @@ import pytest -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.indexes.tokens_index import TokenUtxoInfo from hathor.transaction import Block, Transaction, TxInput, TxOutput @@ -13,8 +12,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, create_tokens, get_genesis_key -settings = HathorSettings() - class BaseTokenTest(unittest.TestCase): __test__ = False @@ -453,30 +450,30 @@ def update_tx(tx): tx = create_tokens(self.manager, self.address_b58) # max token name length - tx.token_name = 'a' * settings.MAX_LENGTH_TOKEN_NAME + tx.token_name = 'a' * self._settings.MAX_LENGTH_TOKEN_NAME update_tx(tx) self.manager.verification_service.verify(tx) # max token symbol length - tx.token_symbol = 'a' * settings.MAX_LENGTH_TOKEN_SYMBOL + tx.token_symbol = 'a' * self._settings.MAX_LENGTH_TOKEN_SYMBOL update_tx(tx) self.manager.verification_service.verify(tx) # long token name - tx.token_name = 'a' * (settings.MAX_LENGTH_TOKEN_NAME + 1) + tx.token_name = 'a' * (self._settings.MAX_LENGTH_TOKEN_NAME + 1) update_tx(tx) with self.assertRaises(TransactionDataError): self.manager.verification_service.verify(tx) # long token symbol tx.token_name = 'ValidName' - tx.token_symbol = 'a' * (settings.MAX_LENGTH_TOKEN_SYMBOL + 1) + tx.token_symbol = 'a' * (self._settings.MAX_LENGTH_TOKEN_SYMBOL + 1) update_tx(tx) with self.assertRaises(TransactionDataError): self.manager.verification_service.verify(tx) # Hathor token name - tx.token_name = settings.HATHOR_TOKEN_NAME + tx.token_name = self._settings.HATHOR_TOKEN_NAME tx.token_symbol = 'TST' update_tx(tx) with self.assertRaises(TransactionDataError): @@ -484,7 +481,7 @@ def update_tx(tx): # Hathor token symbol tx.token_name = 'Test' - tx.token_symbol = settings.HATHOR_TOKEN_SYMBOL + tx.token_symbol = self._settings.HATHOR_TOKEN_SYMBOL update_tx(tx) with self.assertRaises(TransactionDataError): self.manager.verification_service.verify(tx) diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index 7d25f97bd..e12880cd8 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -8,7 +8,6 @@ from twisted.internet.threads import deferToThread from twisted.trial import unittest -from hathor.conf import HathorSettings from hathor.daa import TestMode from hathor.simulator.utils import add_new_blocks from hathor.transaction import Block, Transaction, TxInput, TxOutput @@ -25,8 +24,6 @@ create_tokens, ) -settings = HathorSettings() - class BaseTransactionStorageTest(unittest.TestCase): __test__ = False @@ -46,6 +43,7 @@ def setUp(self): self.pubsub = artifacts.pubsub self.manager = artifacts.manager self.tx_storage = artifacts.tx_storage + self._settings = artifacts.settings assert artifacts.wallet is not None @@ -216,7 +214,7 @@ def test_pre_save_validation_invalid_tx_1(self): self.tx_storage.save_transaction(self.tx) def test_pre_save_validation_invalid_tx_2(self): - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) with self.assertRaises(AssertionError): with self.tx_storage.allow_partially_validated_context(): # XXX: avoid using validate_save because an exception could be raised for other reasons @@ -224,14 +222,14 @@ def test_pre_save_validation_invalid_tx_2(self): def test_pre_save_validation_success(self): self.tx.get_metadata().validation = ValidationState.BASIC - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) with self.tx_storage.allow_partially_validated_context(): # XXX: it's good to use validate_save now since we don't expect any exceptions to be raised self.validate_save(self.tx) def test_allow_scope_get_all_transactions(self): self.tx.get_metadata().validation = ValidationState.BASIC - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) with self.tx_storage.allow_partially_validated_context(): self.tx_storage.save_transaction(self.tx) only_valid_txs = list(self.tx_storage.get_all_transactions()) @@ -242,7 +240,7 @@ def test_allow_scope_get_all_transactions(self): def test_allow_scope_topological_sort_dfs(self): self.tx.get_metadata().validation = ValidationState.BASIC - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) with self.tx_storage.allow_partially_validated_context(): self.tx_storage.save_transaction(self.tx) only_valid_txs = list(self.tx_storage._topological_sort_dfs()) @@ -254,7 +252,7 @@ def test_allow_scope_topological_sort_dfs(self): def test_allow_partially_validated_context(self): from hathor.transaction.storage.exceptions import TransactionNotInAllowedScopeError self.tx.get_metadata().validation = ValidationState.BASIC - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) self.assertTrue(self.tx_storage.is_only_valid_allowed()) self.assertFalse(self.tx_storage.is_partially_validated_allowed()) self.assertFalse(self.tx_storage.is_invalid_allowed()) @@ -290,7 +288,7 @@ def test_allow_invalid_context(self): self.tx.get_metadata().validation = ValidationState.INVALID # XXX: should this apply to invalid too? note that we never save invalid transactions so using the # PARTIALLY_VALIDATED_ID marker is artificial just for testing - self.tx.get_metadata().add_voided_by(settings.PARTIALLY_VALIDATED_ID) + self.tx.get_metadata().add_voided_by(self._settings.PARTIALLY_VALIDATED_ID) self.assertTrue(self.tx_storage.is_only_valid_allowed()) self.assertFalse(self.tx_storage.is_partially_validated_allowed()) self.assertFalse(self.tx_storage.is_invalid_allowed()) diff --git a/tests/wallet/test_balance_update.py b/tests/wallet/test_balance_update.py index eb44d91ff..01bc0e337 100644 --- a/tests/wallet/test_balance_update.py +++ b/tests/wallet/test_balance_update.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput @@ -8,8 +7,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward, create_tokens -settings = HathorSettings() - class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False @@ -47,7 +44,7 @@ def test_balance_update1(self): # Tx2 is twin with tx1 but less acc weight, so it will get voided # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) # Change of parents only, so it's a twin. @@ -68,7 +65,7 @@ def test_balance_update1(self): self.assertEqual(meta2.voided_by, {tx2.hash}) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) # Voided wallet history @@ -94,7 +91,7 @@ def test_balance_update2(self): # Tx2 is twin with tx1 with equal acc weight, so both will get voided # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) # Change of parents only, so it's a twin. @@ -115,14 +112,14 @@ def test_balance_update2(self): self.assertEqual(meta2.voided_by, {tx2.hash}) # Balance changed - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(self.blocks_tokens[:3]))) def test_balance_update3(self): # Tx2 is twin with tx1 with higher acc weight, so tx1 will get voided # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) # Change of parents only, so it's a twin. @@ -144,7 +141,7 @@ def test_balance_update3(self): self.assertEqual(meta2.voided_by, None) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) def test_balance_update4(self): @@ -154,7 +151,7 @@ def test_balance_update4(self): self.manager.reactor.advance(1) # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) address = self.manager.wallet.get_unused_address_bytes() @@ -199,7 +196,7 @@ def test_balance_update4(self): self.assertEqual(meta3.voided_by, {tx3.hash}) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) def test_balance_update5(self): @@ -210,7 +207,7 @@ def test_balance_update5(self): self.clock.advance(1) # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) address = self.manager.wallet.get_unused_address_bytes() @@ -243,7 +240,7 @@ def test_balance_update5(self): self.assertEqual(meta3.twins, [self.tx1.hash]) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) def test_balance_update6(self): @@ -253,7 +250,7 @@ def test_balance_update6(self): self.manager.reactor.advance(1) # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) # Change of parents only, so it's a twin. @@ -280,7 +277,7 @@ def test_balance_update6(self): self.run_to_completion() # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance - 100)) def test_balance_update7(self): @@ -290,7 +287,7 @@ def test_balance_update7(self): self.manager.reactor.advance(1) # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) address = self.manager.wallet.get_unused_address_bytes() @@ -324,12 +321,12 @@ def test_balance_update7(self): self.assertEqual(meta3.twins, [self.tx1.hash]) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) def test_balance_update_twin_tx(self): # Start balance - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) wallet_address = self.manager.wallet.get_unused_address() @@ -387,7 +384,7 @@ def test_balance_update_twin_tx(self): self.assertEqual(meta5.voided_by, {tx5.hash}) # Balance is the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.initial_balance)) def test_tokens_balance(self): @@ -405,7 +402,7 @@ def test_tokens_balance(self): # initial hathor balance # we don't consider HTR balance 0 because we transfer genesis tokens to this # wallet during token creation - hathor_balance = self.manager.wallet.balance[settings.HATHOR_TOKEN_UID] + hathor_balance = self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID] # transfer token to another wallet and check balance again parents = self.manager.get_new_tx_parents() @@ -435,9 +432,9 @@ def test_tokens_balance(self): # verify balance self.assertEqual(self.manager.wallet.balance[token_id], WalletBalance(0, amount - 30)) # hathor balance remains the same - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], hathor_balance) + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], hathor_balance) - balances_per_address = self.manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) + balances_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) self.assertEqual(hathor_balance.available, sum(x for x in balances_per_address.values())) diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index bade3f519..f962b731f 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -4,7 +4,6 @@ from cryptography.hazmat.primitives import serialization -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address, get_address_b58_from_public_key, get_private_key_bytes from hathor.simulator.utils import add_new_block from hathor.transaction import Transaction, TxInput @@ -15,8 +14,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward, create_tokens, get_genesis_key -settings = HathorSettings() - BLOCK_REWARD = 300 PASSWORD = b'passwd' @@ -76,9 +73,9 @@ def test_wallet_create_transaction(self): # wallet will receive genesis block and store in unspent_tx w.on_new_tx(genesis_block) for index in range(len(genesis_block.outputs)): - utxo = w.unspent_txs[settings.HATHOR_TOKEN_UID].get((genesis_block.hash, index)) + utxo = w.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((genesis_block.hash, index)) self.assertIsNotNone(utxo) - self.assertEqual(w.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) + self.assertEqual(w.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) # create transaction spending this value, but sending to same wallet add_blocks_unlock_reward(self.manager) @@ -91,7 +88,7 @@ def test_wallet_create_transaction(self): self.storage.save_transaction(tx1) w.on_new_tx(tx1) self.assertEqual(len(w.spent_txs), 1) - self.assertEqual(w.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) + self.assertEqual(w.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) # pass inputs and outputs to prepare_transaction, but not the input keys # spend output last transaction @@ -107,7 +104,7 @@ def test_wallet_create_transaction(self): self.storage.save_transaction(tx2) w.on_new_tx(tx2) self.assertEqual(len(w.spent_txs), 2) - self.assertEqual(w.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) + self.assertEqual(w.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, genesis_value)) # test keypair exception with self.assertRaises(WalletLocked): @@ -123,9 +120,9 @@ def test_block_increase_balance(self): tx = w.prepare_transaction(Transaction, inputs=[], outputs=[out]) tx.update_hash() w.on_new_tx(tx) - utxo = w.unspent_txs[settings.HATHOR_TOKEN_UID].get((tx.hash, 0)) + utxo = w.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((tx.hash, 0)) self.assertIsNotNone(utxo) - self.assertEqual(w.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, BLOCK_REWARD)) + self.assertEqual(w.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, BLOCK_REWARD)) def test_locked(self): # generate a new block and check if we increase balance @@ -196,7 +193,7 @@ def test_create_token_transaction(self): address_b58 = self.manager.wallet.get_unused_address() address = decode_address(address_b58) - _, hathor_balance = self.manager.wallet.balance[settings.HATHOR_TOKEN_UID] + _, hathor_balance = self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID] # prepare tx with hathors and another token # hathor tx hathor_out = WalletOutputInfo(address, hathor_balance, None) @@ -222,7 +219,7 @@ def test_create_token_transaction(self): # make sure balance is the same and we've checked both balances did_enter = 0 for token_uid, value in token_dict.items(): - if token_uid == settings.HATHOR_TOKEN_UID: + if token_uid == self._settings.HATHOR_TOKEN_UID: self.assertEqual(value, hathor_balance) did_enter += 1 elif token_uid == token_uid: @@ -253,11 +250,11 @@ def test_maybe_spent_txs(self): self.assertEqual(len(tx1.inputs), 1) _input = tx1.inputs[0] key = (_input.tx_id, _input.index) - self.assertNotIn(key, w.unspent_txs[settings.HATHOR_TOKEN_UID]) - self.assertIn(key, w.maybe_spent_txs[settings.HATHOR_TOKEN_UID]) + self.assertNotIn(key, w.unspent_txs[self._settings.HATHOR_TOKEN_UID]) + self.assertIn(key, w.maybe_spent_txs[self._settings.HATHOR_TOKEN_UID]) self.run_to_completion() - self.assertIn(key, w.unspent_txs[settings.HATHOR_TOKEN_UID]) - self.assertEqual(0, len(w.maybe_spent_txs[settings.HATHOR_TOKEN_UID])) + self.assertIn(key, w.unspent_txs[self._settings.HATHOR_TOKEN_UID]) + self.assertEqual(0, len(w.maybe_spent_txs[self._settings.HATHOR_TOKEN_UID])) # when we receive the new tx it will remove from maybe_spent tx2 = w.prepare_transaction_compute_inputs(Transaction, [out], self.storage) @@ -269,7 +266,7 @@ def test_maybe_spent_txs(self): self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.on_new_tx(tx2, fails_silently=False)) self.clock.advance(2) - self.assertEqual(0, len(w.maybe_spent_txs[settings.HATHOR_TOKEN_UID])) + self.assertEqual(0, len(w.maybe_spent_txs[self._settings.HATHOR_TOKEN_UID])) class SyncV1BasicWalletTest(unittest.SyncV1Params, BaseBasicWalletTest): diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index ea2faa615..d006b18ae 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_block from hathor.transaction import Transaction @@ -8,8 +7,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseWalletHDTest(unittest.TestCase): __test__ = False @@ -33,9 +30,9 @@ def test_transaction_and_balance(self): out = WalletOutputInfo(decode_address(new_address), self.TOKENS, timelock=None) block = add_new_block(self.manager) self.manager.verification_service.verify(block) - utxo = self.wallet.unspent_txs[settings.HATHOR_TOKEN_UID].get((block.hash, 0)) + utxo = self.wallet.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((block.hash, 0)) self.assertIsNotNone(utxo) - self.assertEqual(self.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, self.BLOCK_TOKENS)) + self.assertEqual(self.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.BLOCK_TOKENS)) # create transaction spending this value, but sending to same wallet add_blocks_unlock_reward(self.manager) @@ -50,9 +47,9 @@ def test_transaction_and_balance(self): self.wallet.on_new_tx(tx1) self.tx_storage.save_transaction(tx1) self.assertEqual(len(self.wallet.spent_txs), 1) - utxo = self.wallet.unspent_txs[settings.HATHOR_TOKEN_UID].get((tx1.hash, 0)) + utxo = self.wallet.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((tx1.hash, 0)) self.assertIsNotNone(utxo) - self.assertEqual(self.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, self.TOKENS)) + self.assertEqual(self.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.TOKENS)) # pass inputs and outputs to prepare_transaction, but not the input keys # spend output last transaction @@ -69,7 +66,7 @@ def test_transaction_and_balance(self): self.tx_storage.save_transaction(tx2) self.wallet.on_new_tx(tx2) self.assertEqual(len(self.wallet.spent_txs), 2) - self.assertEqual(self.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, self.TOKENS)) + self.assertEqual(self.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.TOKENS)) # Test getting more unused addresses than the gap limit for i in range(3): diff --git a/tests/websocket/test_websocket.py b/tests/websocket/test_websocket.py index cc5d920c4..1583bba09 100644 --- a/tests/websocket/test_websocket.py +++ b/tests/websocket/test_websocket.py @@ -4,7 +4,6 @@ from twisted.internet.defer import inlineCallbacks from twisted.internet.testing import StringTransport -from hathor.conf import HathorSettings from hathor.pubsub import EventArguments, HathorEvents from hathor.util import json_dumpb, json_dumps, json_loadb from hathor.wallet.base_wallet import SpentTx, UnspentTx, WalletBalance @@ -13,8 +12,6 @@ from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -settings = HathorSettings() - class BaseWebsocketTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -82,7 +79,7 @@ def test_balance(self): self.factory.connections.add(self.protocol) self.protocol.state = HathorAdminWebsocketProtocol.STATE_OPEN self.manager.pubsub.publish(HathorEvents.WALLET_BALANCE_UPDATED, - balance={settings.HATHOR_TOKEN_UID: WalletBalance(10, 20)}) + balance={self._settings.HATHOR_TOKEN_UID: WalletBalance(10, 20)}) self.run_to_completion() value = self._decode_value(self.transport.value()) self.assertEqual(value['balance']['locked'], 10) From 8725d21b92931686d7b6dbcc4948f3fa34b7985a Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 18 Jan 2024 20:37:44 -0300 Subject: [PATCH 12/38] fix(p2p): fix update whitelist handler (#885) --- hathor/p2p/manager.py | 58 ++++++++++-------------------- tests/p2p/test_whitelist.py | 71 +++++++++++++++++++++++++++++++++++-- 2 files changed, 87 insertions(+), 42 deletions(-) diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index f7c60b1bb..d180af7c8 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -21,8 +21,9 @@ from twisted.internet.task import LoopingCall from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol from twisted.python.failure import Failure +from twisted.web.client import Agent -from hathor.conf import HathorSettings +from hathor.conf.get_settings import get_global_settings from hathor.p2p.netfilter.factory import NetfilterFactory from hathor.p2p.peer_discovery import PeerDiscovery from hathor.p2p.peer_id import PeerId @@ -39,12 +40,10 @@ from hathor.util import Random if TYPE_CHECKING: - from twisted.internet.interfaces import IDelayedCall - from hathor.manager import HathorManager logger = get_logger() -settings = HathorSettings() +settings = get_global_settings() # The timeout in seconds for the whitelist GET request WHITELIST_REQUEST_TIMEOUT = 45 @@ -102,6 +101,7 @@ def __init__(self, self.log = logger.new() self.rng = rng self.manager = None + self._settings = get_global_settings() self.reactor = reactor self.my_peer = my_peer @@ -124,7 +124,7 @@ def __init__(self, self.client_factory = HathorClientFactory(self.network, self.my_peer, p2p_manager=self, use_ssl=self.use_ssl) # Global maximum number of connections. - self.max_connections: int = settings.PEER_MAX_CONNECTIONS + self.max_connections: int = self._settings.PEER_MAX_CONNECTIONS # Global rate limiter for all connections. self.rate_limiter = RateLimiter(self.reactor) @@ -168,7 +168,7 @@ def __init__(self, self._last_sync_rotate: float = 0. # A timer to try to reconnect to the disconnect known peers. - if settings.ENABLE_PEER_WHITELIST: + if self._settings.ENABLE_PEER_WHITELIST: self.wl_reconnect = LoopingCall(self.update_whitelist) self.wl_reconnect.clock = self.reactor @@ -185,6 +185,9 @@ def __init__(self, self._sync_factories = {} self._enabled_sync_versions = set() + # agent to perform HTTP requests + self._http_agent = Agent(self.reactor) + def add_sync_factory(self, sync_version: SyncVersion, sync_factory: SyncAgentFactory) -> None: """Add factory for the given sync version, must use a sync version that does not already exist.""" # XXX: to allow code in `set_manager` to safely use the the available sync versions, we add this restriction: @@ -273,7 +276,7 @@ def start(self) -> None: self.lc_reconnect.start(5, now=False) self.lc_sync_update.start(self.lc_sync_update_interval, now=False) - if settings.ENABLE_PEER_WHITELIST: + if self._settings.ENABLE_PEER_WHITELIST: self._start_whitelist_reconnect() for description in self.listen_addresses: @@ -519,51 +522,28 @@ def reconnect_to_all(self) -> None: self.connect_to_if_not_connected(peer, int(now)) def update_whitelist(self) -> Deferred[None]: - from twisted.web.client import Agent, readBody + from twisted.web.client import readBody from twisted.web.http_headers import Headers - assert settings.WHITELIST_URL is not None + assert self._settings.WHITELIST_URL is not None self.log.info('update whitelist') - agent = Agent(self.reactor) - d = agent.request( + d = self._http_agent.request( b'GET', - settings.WHITELIST_URL.encode(), + self._settings.WHITELIST_URL.encode(), Headers({'User-Agent': ['hathor-core']}), None) - # Twisted Agent does not have a direct way to configure the HTTP client timeout - # only a TCP connection timeout. - # In this request we need a timeout that encompasses the connection and download time. - # The callLater below is a manual client timeout that includes it and - # will cancel the deferred in case it's called - timeout_call = self.reactor.callLater(WHITELIST_REQUEST_TIMEOUT, d.cancel) - d.addBoth(self._update_whitelist_timeout, timeout_call) d.addCallback(readBody) - d.addErrback(self._update_whitelist_err) + d.addTimeout(WHITELIST_REQUEST_TIMEOUT, self.reactor) d.addCallback(self._update_whitelist_cb) - return d - - def _update_whitelist_timeout(self, param: Union[Failure, Optional[bytes]], - timeout_call: 'IDelayedCall') -> Union[Failure, Optional[bytes]]: - """ This method is always called for both cb and errback in the update whitelist get request deferred. - Because of that, the first parameter type will depend, will be a failure in case of errback - or optional bytes in case of cb (see _update_whitelist_cb). + d.addErrback(self._update_whitelist_err) - We just need to cancel the timeout call later and return the first parameter, - to continue the cb/errback sequence. - """ - if timeout_call.active(): - timeout_call.cancel() - return param + return d def _update_whitelist_err(self, *args: Any, **kwargs: Any) -> None: self.log.error('update whitelist failed', args=args, kwargs=kwargs) - def _update_whitelist_cb(self, body: Optional[bytes]) -> None: + def _update_whitelist_cb(self, body: bytes) -> None: assert self.manager is not None - if body is None: - self.log.warn('update whitelist got no response') - return - else: - self.log.info('update whitelist got response') + self.log.info('update whitelist got response') try: text = body.decode() new_whitelist = parse_whitelist(text) diff --git a/tests/p2p/test_whitelist.py b/tests/p2p/test_whitelist.py index 7d408e71b..e7b83fc18 100644 --- a/tests/p2p/test_whitelist.py +++ b/tests/p2p/test_whitelist.py @@ -1,11 +1,18 @@ -from unittest.mock import patch +from unittest.mock import Mock, patch -from hathor.conf import HathorSettings +from twisted.internet.defer import Deferred, TimeoutError +from twisted.python.failure import Failure +from twisted.web.client import Agent + +from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings +from hathor.manager import HathorManager +from hathor.p2p.manager import WHITELIST_REQUEST_TIMEOUT from hathor.p2p.sync_version import SyncVersion from hathor.simulator import FakeConnection from tests import unittest -settings = HathorSettings() +settings = get_global_settings() class WhitelistTestCase(unittest.SyncV1Params, unittest.TestCase): @@ -79,3 +86,61 @@ def test_sync_v11_whitelist_yes_yes(self): self.assertFalse(conn.tr1.disconnecting) self.assertFalse(conn.tr2.disconnecting) + + def test_update_whitelist(self) -> None: + network = 'testnet' + manager: HathorManager = self.create_peer(network) + connections_manager = manager.connections + + settings_mock = Mock(spec_set=HathorSettings) + settings_mock.WHITELIST_URL = 'some_url' + connections_manager._settings = settings_mock + + agent_mock = Mock(spec_set=Agent) + agent_mock.request = Mock() + connections_manager._http_agent = agent_mock + + with ( + patch.object(connections_manager, '_update_whitelist_cb') as _update_whitelist_cb_mock, + patch.object(connections_manager, '_update_whitelist_err') as _update_whitelist_err_mock, + patch('twisted.web.client.readBody') as read_body_mock + ): + # Test success + agent_mock.request.return_value = Deferred() + read_body_mock.return_value = b'body' + d = connections_manager.update_whitelist() + d.callback(None) + + read_body_mock.assert_called_once_with(None) + _update_whitelist_cb_mock.assert_called_once_with(b'body') + _update_whitelist_err_mock.assert_not_called() + + read_body_mock.reset_mock() + _update_whitelist_cb_mock.reset_mock() + _update_whitelist_err_mock.reset_mock() + + # Test request error + agent_mock.request.return_value = Deferred() + d = connections_manager.update_whitelist() + error = Failure('some_error') + d.errback(error) + + read_body_mock.assert_not_called() + _update_whitelist_cb_mock.assert_not_called() + _update_whitelist_err_mock.assert_called_once_with(error) + + read_body_mock.reset_mock() + _update_whitelist_cb_mock.reset_mock() + _update_whitelist_err_mock.reset_mock() + + # Test timeout + agent_mock.request.return_value = Deferred() + read_body_mock.return_value = b'body' + connections_manager.update_whitelist() + + self.clock.advance(WHITELIST_REQUEST_TIMEOUT + 1) + + read_body_mock.assert_not_called() + _update_whitelist_cb_mock.assert_not_called() + _update_whitelist_err_mock.assert_called_once() + assert isinstance(_update_whitelist_err_mock.call_args.args[0].value, TimeoutError) From 6a43a0586c5ecaf5f022327900fec88bf6800a50 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 18 Jan 2024 22:18:46 -0300 Subject: [PATCH 13/38] feat(feature-activation): configure NOP features on mainnet (#908) --- hathor/conf/mainnet.py | 26 ++++++++++++++++++++++++++ hathor/conf/mainnet.yml | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index f66691cf3..7acc9a499 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -14,6 +14,9 @@ from hathor.checkpoint import Checkpoint as cp from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.criteria import Criteria +from hathor.feature_activation.settings import Settings as FeatureActivationSettings SETTINGS = HathorSettings( P2PKH_VERSION_BYTE=b'\x28', @@ -196,4 +199,27 @@ '00004305882eb3eef6b45f025ff58eb7baa5ca35f7d6f42c8b085482b00474e6', '000045ecbab77c9a8d819ff6d26893b9da2774eee5539f17d8fc2394f82b758e', ])), + FEATURE_ACTIVATION=FeatureActivationSettings( + enable_usage=True, + features={ + Feature.NOP_FEATURE_1: Criteria( + bit=0, + start_height=4_213_440, # N (right now the best block is 4_161_800 on mainnet) + timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height=4_273_920, # N + 3 * 20160 (3 weeks after the start) + lock_in_on_timeout=False, + version='0.59.0', + signal_support_by_default=True, + ), + Feature.NOP_FEATURE_2: Criteria( + bit=1, + start_height=4_193_280, # N (right now the best block is 4_133_220 on mainnet) + timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.59.0', + signal_support_by_default=False, + ) + } + ) ) diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index baa6fa16a..7a83fd8a1 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -179,3 +179,26 @@ SOFT_VOIDED_TX_IDS: - 000040db8e91bcdc1e65bc868e904345396a0bc4eb084694a72dbcc485555d80 - 00004305882eb3eef6b45f025ff58eb7baa5ca35f7d6f42c8b085482b00474e6 - 000045ecbab77c9a8d819ff6d26893b9da2774eee5539f17d8fc2394f82b758e + +FEATURE_ACTIVATION: + enable_usage: true + features: + #### First Phased Testing features on mainnet #### + + NOP_FEATURE_1: + bit: 0 + start_height: 4_213_440 # N (right now the best block is 4_161_800 on mainnet) + timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height: 4_273_920 # N + 3 * 20160 (3 weeks after the start) + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: true + + NOP_FEATURE_2: + bit: 1 + start_height: 4_193_280 # N (right now the best block is 4_133_220 on mainnet) + timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: false From ded4c79643e29f8a74ed7305edd90657767ce4d4 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 19 Jan 2024 13:22:30 -0300 Subject: [PATCH 14/38] feat(feature-activation): update NOP features config on mainnet (#929) --- hathor/conf/mainnet.py | 4 ++-- hathor/conf/mainnet.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index 7acc9a499..a49a2aec7 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -204,7 +204,7 @@ features={ Feature.NOP_FEATURE_1: Criteria( bit=0, - start_height=4_213_440, # N (right now the best block is 4_161_800 on mainnet) + start_height=4_213_440, # N (right now the best block is 4_169_000 on mainnet) timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) minimum_activation_height=4_273_920, # N + 3 * 20160 (3 weeks after the start) lock_in_on_timeout=False, @@ -213,7 +213,7 @@ ), Feature.NOP_FEATURE_2: Criteria( bit=1, - start_height=4_193_280, # N (right now the best block is 4_133_220 on mainnet) + start_height=4_213_440, # N (right now the best block is 4_169_000 on mainnet) timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=False, diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index 7a83fd8a1..0cefc6b56 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -187,7 +187,7 @@ FEATURE_ACTIVATION: NOP_FEATURE_1: bit: 0 - start_height: 4_213_440 # N (right now the best block is 4_161_800 on mainnet) + start_height: 4_213_440 # N (right now the best block is 4_169_000 on mainnet) timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) minimum_activation_height: 4_273_920 # N + 3 * 20160 (3 weeks after the start) lock_in_on_timeout: false @@ -196,7 +196,7 @@ FEATURE_ACTIVATION: NOP_FEATURE_2: bit: 1 - start_height: 4_193_280 # N (right now the best block is 4_133_220 on mainnet) + start_height: 4_213_440 # N (right now the best block is 4_169_000 on mainnet) timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: false From a6533354ba3e2327b84869a8ca252a954602983c Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 19 Jan 2024 16:00:36 -0300 Subject: [PATCH 15/38] refactor(feature-activation): remove enable_usage (#909) --- hathor/cli/mining.py | 4 +++- hathor/conf/mainnet.py | 1 - hathor/conf/mainnet.yml | 1 - hathor/conf/testnet.py | 1 - hathor/conf/testnet.yml | 1 - hathor/feature_activation/settings.py | 3 --- hathor/manager.py | 2 +- hathor/verification/block_verifier.py | 7 +------ hathor/verification/vertex_verifiers.py | 4 ++-- .../test_feature_simulation.py | 3 --- tests/tx/test_block.py | 17 ----------------- tests/tx/test_genesis.py | 4 +++- tests/tx/test_tx_deserialization.py | 4 +++- 13 files changed, 13 insertions(+), 39 deletions(-) diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 35a131640..491aff1e4 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -135,12 +135,14 @@ def execute(args: Namespace) -> None: block.nonce, block.weight)) try: + from unittest.mock import Mock + from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.verification.verification_service import VerificationService, VertexVerifiers settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) - verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa) + verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa, feature_service=Mock()) verification_service = VerificationService(verifiers=verifiers) verification_service.verify_without_storage(block) except HathorError: diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index a49a2aec7..6dd4acdc6 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -200,7 +200,6 @@ '000045ecbab77c9a8d819ff6d26893b9da2774eee5539f17d8fc2394f82b758e', ])), FEATURE_ACTIVATION=FeatureActivationSettings( - enable_usage=True, features={ Feature.NOP_FEATURE_1: Criteria( bit=0, diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index 0cefc6b56..650915685 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -181,7 +181,6 @@ SOFT_VOIDED_TX_IDS: - 000045ecbab77c9a8d819ff6d26893b9da2774eee5539f17d8fc2394f82b758e FEATURE_ACTIVATION: - enable_usage: true features: #### First Phased Testing features on mainnet #### diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 91aa854c1..fdfa7ab61 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -56,7 +56,6 @@ ], FEATURE_ACTIVATION=FeatureActivationSettings( evaluation_interval=40_320, - enable_usage=True, default_threshold=30240, features={ Feature.NOP_FEATURE_4: Criteria( diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index b8b58c06f..2fd1abf96 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -38,7 +38,6 @@ CHECKPOINTS: FEATURE_ACTIVATION: evaluation_interval: 40_320 - enable_usage: true default_threshold: 30_240 # 30240 = 75% of evaluation_interval (40320) features: #### Second Phased Testing features #### diff --git a/hathor/feature_activation/settings.py b/hathor/feature_activation/settings.py index d6b713068..aa4c119b4 100644 --- a/hathor/feature_activation/settings.py +++ b/hathor/feature_activation/settings.py @@ -41,9 +41,6 @@ class Settings(BaseModel, validate_all=True): # neither their values changed, to preserve history. features: dict[Feature, Criteria] = {} - # Boolean indicating whether feature activation can be used. - enable_usage: bool = False - @validator('default_threshold') def _validate_default_threshold(cls, default_threshold: int, values: dict[str, Any]) -> int: """Validates that the default_threshold is not greater than the evaluation_interval.""" diff --git a/hathor/manager.py b/hathor/manager.py index f31478232..415104d80 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -1061,7 +1061,7 @@ def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None: def _log_feature_states(self, vertex: BaseTransaction) -> None: """Log features states for a block. Used as part of the Feature Activation Phased Testing.""" - if not self._settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block): + if not isinstance(vertex, Block): return feature_descriptions = self._feature_service.get_bits_description(block=vertex) diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index d919c6bd2..b1184aea5 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -35,7 +35,7 @@ def __init__( *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm, - feature_service: FeatureService | None = None + feature_service: FeatureService, ) -> None: self._settings = settings self._daa = daa @@ -82,11 +82,6 @@ def verify_data(self, block: Block) -> None: def verify_mandatory_signaling(self, block: Block) -> None: """Verify whether this block is missing mandatory signaling for any feature.""" - if not self._settings.FEATURE_ACTIVATION.enable_usage: - return - - assert self._feature_service is not None - signaling_state = self._feature_service.is_signaling_mandatory_features(block) match signaling_state: diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py index eed2ca74f..339230acb 100644 --- a/hathor/verification/vertex_verifiers.py +++ b/hathor/verification/vertex_verifiers.py @@ -38,7 +38,7 @@ def create_defaults( *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm, - feature_service: FeatureService | None = None, + feature_service: FeatureService, ) -> 'VertexVerifiers': """ Create a VertexVerifiers instance using the default verifier for each vertex type, @@ -60,7 +60,7 @@ def create( settings: HathorSettings, vertex_verifier: VertexVerifier, daa: DifficultyAdjustmentAlgorithm, - feature_service: FeatureService | None = None, + feature_service: FeatureService, ) -> 'VertexVerifiers': """ Create a VertexVerifiers instance using a custom vertex_verifier. diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 1a6665a1e..a9b622c7a 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -59,7 +59,6 @@ def test_feature(self) -> None: method calls to make sure we're executing it in the intended, most performatic way. """ feature_settings = FeatureSettings( - enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -336,7 +335,6 @@ def test_feature(self) -> None: def test_reorg(self) -> None: feature_settings = FeatureSettings( - enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -551,7 +549,6 @@ def test_feature_from_existing_storage(self) -> None: Tests that feature states are correctly retrieved from an existing storage, so no recalculation is required. """ feature_settings = FeatureSettings( - enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index 735351215..c5f698965 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -138,24 +138,8 @@ def test_get_feature_activation_bit_value() -> None: assert block.get_feature_activation_bit_value(3) == 0 -@pytest.mark.parametrize( - 'is_signaling_mandatory_features', - [BlockIsSignaling(), BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)] -) -def test_verify_must_signal_when_feature_activation_is_disabled(is_signaling_mandatory_features: bool) -> None: - settings = Mock(spec_set=HathorSettings) - settings.FEATURE_ACTIVATION.enable_usage = False - feature_service = Mock(spec_set=FeatureService) - feature_service.is_signaling_mandatory_features = Mock(return_value=is_signaling_mandatory_features) - verifier = BlockVerifier(settings=settings, feature_service=feature_service, daa=Mock()) - block = Block() - - verifier.verify_mandatory_signaling(block) - - def test_verify_must_signal() -> None: settings = Mock(spec_set=HathorSettings) - settings.FEATURE_ACTIVATION.enable_usage = True feature_service = Mock(spec_set=FeatureService) feature_service.is_signaling_mandatory_features = Mock( return_value=BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1) @@ -171,7 +155,6 @@ def test_verify_must_signal() -> None: def test_verify_must_not_signal() -> None: settings = Mock(spec_set=HathorSettings) - settings.FEATURE_ACTIVATION.enable_usage = True feature_service = Mock(spec_set=FeatureService) feature_service.is_signaling_mandatory_features = Mock(return_value=BlockIsSignaling()) verifier = BlockVerifier(settings=settings, feature_service=feature_service, daa=Mock()) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index 885395fa7..a41021f8b 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -1,3 +1,5 @@ +from unittest.mock import Mock + from hathor.conf import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.transaction.storage import TransactionMemoryStorage @@ -30,7 +32,7 @@ class GenesisTest(unittest.TestCase): def setUp(self): super().setUp() self._daa = DifficultyAdjustmentAlgorithm(settings=self._settings) - verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=self._daa) + verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=self._daa, feature_service=Mock()) self._verification_service = VerificationService(verifiers=verifiers) self.storage = TransactionMemoryStorage() diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 4e878c802..ba19abc28 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -1,3 +1,5 @@ +from unittest.mock import Mock + from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.transaction import Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction @@ -11,7 +13,7 @@ class _DeserializationTest(unittest.TestCase): def setUp(self) -> None: super().setUp() daa = DifficultyAdjustmentAlgorithm(settings=self._settings) - verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=daa) + verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=daa, feature_service=Mock()) self._verification_service = VerificationService(verifiers=verifiers) def test_deserialize(self): From 9d042eb77f3a2426102f07337fa4e3e91e38319e Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 19 Jan 2024 17:42:47 -0300 Subject: [PATCH 16/38] refactor(storage): create StorageProtocol (#922) --- hathor/daa.py | 9 +-- hathor/manager.py | 6 +- hathor/reward_lock/__init__.py | 21 ++++++ hathor/reward_lock/reward_lock.py | 69 +++++++++++++++++++ .../storage/transaction_storage.py | 12 ++++ .../storage/vertex_storage_protocol.py | 48 +++++++++++++ hathor/transaction/transaction.py | 48 ++----------- hathor/verification/transaction_verifier.py | 4 +- 8 files changed, 165 insertions(+), 52 deletions(-) create mode 100644 hathor/reward_lock/__init__.py create mode 100644 hathor/reward_lock/reward_lock.py create mode 100644 hathor/transaction/storage/vertex_storage_protocol.py diff --git a/hathor/daa.py b/hathor/daa.py index 4d8fc7413..680ef4dfc 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -27,10 +27,11 @@ from hathor.conf.settings import HathorSettings from hathor.profiler import get_cpu_profiler -from hathor.util import iwindows +from hathor.util import iwindows, not_none if TYPE_CHECKING: from hathor.transaction import Block, Transaction + from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol logger = get_logger() cpu = get_cpu_profiler() @@ -65,9 +66,9 @@ def calculate_block_difficulty(self, block: 'Block') -> float: if block.is_genesis: return self.MIN_BLOCK_WEIGHT - return self.calculate_next_weight(block.get_block_parent(), block.timestamp) + return self.calculate_next_weight(block.get_block_parent(), block.timestamp, not_none(block.storage)) - def calculate_next_weight(self, parent_block: 'Block', timestamp: int) -> float: + def calculate_next_weight(self, parent_block: 'Block', timestamp: int, storage: 'VertexStorageProtocol') -> float: """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). @@ -90,7 +91,7 @@ def calculate_next_weight(self, parent_block: 'Block', timestamp: int) -> float: blocks: list['Block'] = [] while len(blocks) < N + 1: blocks.append(root) - root = root.get_block_parent() + root = storage.get_parent_block(root) assert root is not None # TODO: revise if this assertion can be safely removed diff --git a/hathor/manager.py b/hathor/manager.py index 415104d80..731e70c0a 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -51,6 +51,7 @@ from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor +from hathor.reward_lock import is_spent_reward_locked from hathor.stratum import StratumFactory from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights from hathor.transaction.exceptions import TxValidationError @@ -802,7 +803,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur parent_block_metadata.score, 2 * self._settings.WEIGHT_TOL ) - weight = max(self.daa.calculate_next_weight(parent_block, timestamp), min_significant_weight) + weight = max(self.daa.calculate_next_weight(parent_block, timestamp, self.tx_storage), min_significant_weight) height = parent_block.get_height() + 1 parents = [parent_block.hash] + parent_txs.must_include parents_any = parent_txs.can_include @@ -889,8 +890,7 @@ def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, if is_spending_voided_tx: raise SpendingVoidedError('Invalid transaction. At least one input is voided.') - is_spent_reward_locked = tx.is_spent_reward_locked() - if is_spent_reward_locked: + if is_spent_reward_locked(tx): raise RewardLockedError('Spent reward is locked.') # We are using here the method from lib because the property diff --git a/hathor/reward_lock/__init__.py b/hathor/reward_lock/__init__.py new file mode 100644 index 000000000..dc72928f7 --- /dev/null +++ b/hathor/reward_lock/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.reward_lock.reward_lock import get_spent_reward_locked_info, is_spent_reward_locked, iter_spent_rewards + +__all__ = [ + 'iter_spent_rewards', + 'is_spent_reward_locked', + 'get_spent_reward_locked_info', +] diff --git a/hathor/reward_lock/reward_lock.py b/hathor/reward_lock/reward_lock.py new file mode 100644 index 000000000..45f252d08 --- /dev/null +++ b/hathor/reward_lock/reward_lock.py @@ -0,0 +1,69 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Iterator, Optional + +from hathor.conf.get_settings import get_global_settings +from hathor.transaction import Block +from hathor.util import not_none + +if TYPE_CHECKING: + from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol + from hathor.transaction.transaction import RewardLockedInfo, Transaction + + +def iter_spent_rewards(tx: 'Transaction', storage: 'VertexStorageProtocol') -> Iterator[Block]: + """Iterate over all the rewards being spent, assumes tx has been verified.""" + for input_tx in tx.inputs: + spent_tx = storage.get_vertex(input_tx.tx_id) + if spent_tx.is_block: + assert isinstance(spent_tx, Block) + yield spent_tx + + +def is_spent_reward_locked(tx: 'Transaction') -> bool: + """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx + itself, and not the inherited `min_height`""" + return get_spent_reward_locked_info(tx, not_none(tx.storage)) is not None + + +def get_spent_reward_locked_info(tx: 'Transaction', storage: 'VertexStorageProtocol') -> Optional['RewardLockedInfo']: + """Check if any input block reward is locked, returning the locked information if any, or None if they are all + unlocked.""" + from hathor.transaction.transaction import RewardLockedInfo + for blk in iter_spent_rewards(tx, storage): + assert blk.hash is not None + needed_height = _spent_reward_needed_height(blk, storage) + if needed_height > 0: + return RewardLockedInfo(blk.hash, needed_height) + return None + + +def _spent_reward_needed_height(block: Block, storage: 'VertexStorageProtocol') -> int: + """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" + import math + + # omitting timestamp to get the current best block, this will usually hit the cache instead of being slow + tips = storage.get_best_block_tips() + assert len(tips) > 0 + best_height = math.inf + for tip in tips: + blk = storage.get_block(tip) + best_height = min(best_height, blk.get_height()) + assert isinstance(best_height, int) + spent_height = block.get_height() + spend_blocks = best_height - spent_height + settings = get_global_settings() + needed_height = settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks + return max(needed_height, 0) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 8b4d5a195..8b441ed31 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -45,6 +45,7 @@ from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction from hathor.transaction.transaction_metadata import TransactionMetadata +from hathor.types import VertexId from hathor.util import not_none cpu = get_cpu_profiler() @@ -1137,6 +1138,17 @@ def _construct_genesis_tx2(self) -> Transaction: assert tx2.hash == self._settings.GENESIS_TX2_HASH return tx2 + def get_parent_block(self, block: Block) -> Block: + return block.get_block_parent() + + def get_vertex(self, vertex_id: VertexId) -> BaseTransaction: + return self.get_transaction(vertex_id) + + def get_block(self, block_id: VertexId) -> Block: + block = self.get_vertex(block_id) + assert isinstance(block, Block) + return block + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] diff --git a/hathor/transaction/storage/vertex_storage_protocol.py b/hathor/transaction/storage/vertex_storage_protocol.py new file mode 100644 index 000000000..a35b3cd78 --- /dev/null +++ b/hathor/transaction/storage/vertex_storage_protocol.py @@ -0,0 +1,48 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Protocol + +from hathor.transaction import BaseTransaction, Block +from hathor.types import VertexId + + +class VertexStorageProtocol(Protocol): + """ + This Protocol currently represents a subset of TransactionStorage methods. Its main use case is for verification + methods that can receive a RocksDB storage or an ephemeral simple memory storage. + + Therefore, objects returned by this protocol may or may not have an `object.storage` pointer set. + """ + + @abstractmethod + def get_vertex(self, vertex_id: VertexId) -> BaseTransaction: + """Return a vertex from the storage.""" + raise NotImplementedError + + @abstractmethod + def get_block(self, block_id: VertexId) -> Block: + """Return a block from the storage.""" + raise NotImplementedError + + @abstractmethod + def get_parent_block(self, block: Block) -> Block: + """Get the parent block of a block.""" + raise NotImplementedError + + @abstractmethod + def get_best_block_tips(self) -> list[VertexId]: + """Return a list of blocks that are heads in a best chain.""" + raise NotImplementedError diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 37967461e..54189693d 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -15,12 +15,13 @@ import hashlib from itertools import chain from struct import pack -from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional +from typing import TYPE_CHECKING, Any, NamedTuple, Optional from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction from hathor.profiler import get_cpu_profiler -from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion +from hathor.reward_lock import iter_spent_rewards +from hathor.transaction import BaseTransaction, TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE from hathor.transaction.exceptions import InvalidToken from hathor.transaction.util import VerboseCallback, unpack, unpack_len @@ -135,7 +136,7 @@ def _calculate_inherited_min_height(self) -> int: def _calculate_my_min_height(self) -> int: """ Calculates min height derived from own spent block rewards""" min_height = 0 - for blk in self.iter_spent_rewards(): + for blk in iter_spent_rewards(self, not_none(self.storage)): min_height = max(min_height, blk.get_height() + self._settings.REWARD_SPEND_MIN_BLOCKS + 1) return min_height @@ -346,47 +347,6 @@ def _update_token_info_from_outputs(self, *, token_dict: dict[TokenUid, TokenInf sum_tokens = token_info.amount + tx_output.value token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) - def iter_spent_rewards(self) -> Iterator[Block]: - """Iterate over all the rewards being spent, assumes tx has been verified.""" - for input_tx in self.inputs: - spent_tx = self.get_spent_tx(input_tx) - if spent_tx.is_block: - assert isinstance(spent_tx, Block) - yield spent_tx - - def is_spent_reward_locked(self) -> bool: - """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx - itself, and not the inherited `min_height`""" - return self.get_spent_reward_locked_info() is not None - - def get_spent_reward_locked_info(self) -> Optional[RewardLockedInfo]: - """Check if any input block reward is locked, returning the locked information if any, or None if they are all - unlocked.""" - for blk in self.iter_spent_rewards(): - assert blk.hash is not None - needed_height = self._spent_reward_needed_height(blk) - if needed_height > 0: - return RewardLockedInfo(blk.hash, needed_height) - return None - - def _spent_reward_needed_height(self, block: Block) -> int: - """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" - import math - assert self.storage is not None - # omitting timestamp to get the current best block, this will usually hit the cache instead of being slow - tips = self.storage.get_best_block_tips() - assert len(tips) > 0 - best_height = math.inf - for tip in tips: - blk = self.storage.get_transaction(tip) - assert isinstance(blk, Block) - best_height = min(best_height, blk.get_height()) - assert isinstance(best_height, int) - spent_height = block.get_height() - spend_blocks = best_height - spent_height - needed_height = self._settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks - return max(needed_height, 0) - def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent Used to prevent users from sending double spending transactions to the network diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 630c82147..2d86883c2 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -15,6 +15,7 @@ from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.profiler import get_cpu_profiler +from hathor.reward_lock import get_spent_reward_locked_info from hathor.transaction import BaseTransaction, Transaction, TxInput from hathor.transaction.exceptions import ( ConflictingInputs, @@ -36,6 +37,7 @@ from hathor.transaction.transaction import TokenInfo from hathor.transaction.util import get_deposit_amount, get_withdraw_amount from hathor.types import TokenUid, VertexId +from hathor.util import not_none cpu = get_cpu_profiler() @@ -144,7 +146,7 @@ def verify_script(self, *, tx: Transaction, input_tx: TxInput, spent_tx: BaseTra def verify_reward_locked(self, tx: Transaction) -> None: """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only the block rewards spent by this tx itself, and not the inherited `min_height`.""" - info = tx.get_spent_reward_locked_info() + info = get_spent_reward_locked_info(tx, not_none(tx.storage)) if info is not None: raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') From 0ddab4fbce62b13f9c18899d43a5208abb45f4d1 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Tue, 23 Jan 2024 17:23:23 +0100 Subject: [PATCH 17/38] chore: bump version to v0.59.0 --- hathor/cli/openapi_files/openapi_base.json | 2 +- hathor/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index 4ebc82659..a3401d9a1 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.58.0" + "version": "0.59.0" }, "consumes": [ "application/json" diff --git a/hathor/version.py b/hathor/version.py index 1f6b94328..895f8bdd7 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.58.0' +BASE_VERSION = '0.59.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/pyproject.toml b/pyproject.toml index c6e87ce42..1c4c64487 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.58.0" +version = "0.59.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" From 2be371f845486f15d69cc92f1b9a6291a72eff43 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 26 Jan 2024 12:40:32 -0300 Subject: [PATCH 18/38] fix(feature-activation): fix get ancestor (#930) --- hathor/feature_activation/feature_service.py | 48 ++++++++++++------- .../test_feature_service.py | 32 +++++++------ .../test_feature_simulation.py | 13 +++-- 3 files changed, 54 insertions(+), 39 deletions(-) diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index 4d44dd5c2..f02195cec 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -98,7 +98,7 @@ def get_state(self, *, block: 'Block', feature: Feature) -> FeatureState: offset_to_previous_boundary = offset_to_boundary or self._feature_settings.evaluation_interval previous_boundary_height = height - offset_to_previous_boundary assert previous_boundary_height >= 0 - previous_boundary_block = self._get_ancestor_at_height(block=block, height=previous_boundary_height) + previous_boundary_block = self._get_ancestor_at_height(block=block, ancestor_height=previous_boundary_height) previous_boundary_state = self.get_state(block=previous_boundary_block, feature=feature) # We cache _and save_ the state of the previous boundary block that we just got. @@ -198,32 +198,44 @@ def get_bits_description(self, *, block: 'Block') -> dict[Feature, FeatureDescri for feature, criteria in self._feature_settings.features.items() } - def _get_ancestor_at_height(self, *, block: 'Block', height: int) -> 'Block': + def _get_ancestor_at_height(self, *, block: 'Block', ancestor_height: int) -> 'Block': """ - Given a block, returns its ancestor at a specific height. + Given a block, return its ancestor at a specific height. Uses the height index if the block is in the best blockchain, or search iteratively otherwise. """ - assert height < block.get_height(), ( - f"ancestor height must be lower than the block's height: {height} >= {block.get_height()}" + assert ancestor_height < block.get_height(), ( + f"ancestor height must be lower than the block's height: {ancestor_height} >= {block.get_height()}" ) - metadata = block.get_metadata() + # It's possible that this method is called before the consensus runs for this block, therefore we do not know + # if it's in the best blockchain. For this reason, we have to get the ancestor starting from our parent block. + parent_block = block.get_block_parent() + parent_metadata = parent_block.get_metadata() + assert parent_metadata.validation.is_fully_connected(), 'The parent should always be fully validated.' - if not metadata.voided_by and (ancestor := self._tx_storage.get_transaction_by_height(height)): + if parent_block.get_height() == ancestor_height: + return parent_block + + if not parent_metadata.voided_by and (ancestor := self._tx_storage.get_transaction_by_height(ancestor_height)): from hathor.transaction import Block assert isinstance(ancestor, Block) return ancestor - return _get_ancestor_iteratively(block=block, ancestor_height=height) - + return self._get_ancestor_iteratively(block=parent_block, ancestor_height=ancestor_height) -def _get_ancestor_iteratively(*, block: 'Block', ancestor_height: int) -> 'Block': - """Given a block, returns its ancestor at a specific height by iterating over its ancestors. This is slow.""" - # TODO: there are further optimizations to be done here, the latest common block height could be persisted in - # metadata, so we could still use the height index if the requested height is before that height. - assert ancestor_height >= 0 - ancestor = block - while ancestor.get_height() > ancestor_height: - ancestor = ancestor.get_block_parent() + def _get_ancestor_iteratively(self, *, block: 'Block', ancestor_height: int) -> 'Block': + """ + Given a block, return its ancestor at a specific height by iterating over its ancestors. + This is slower than using the height index. + """ + # TODO: there are further optimizations to be done here, the latest common block height could be persisted in + # metadata, so we could still use the height index if the requested height is before that height. + assert ancestor_height >= 0 + assert block.get_height() - ancestor_height <= self._feature_settings.evaluation_interval, ( + 'requested ancestor is deeper than the maximum allowed' + ) + ancestor = block + while ancestor.get_height() > ancestor_height: + ancestor = ancestor.get_block_parent() - return ancestor + return ancestor diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index 4a01069d3..ebe2a118e 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -29,8 +29,9 @@ from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.transaction import Block +from hathor.transaction import Block, TransactionMetadata from hathor.transaction.storage import TransactionStorage +from hathor.transaction.validation_state import ValidationState def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: @@ -72,19 +73,20 @@ def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: 0b0000, ] storage = Mock() - storage.get_metadata = Mock(return_value=None) - for i, bits in enumerate(feature_activation_bits): - block_hash = genesis_hash if i == 0 else int.to_bytes(i, length=1, byteorder='big') + for height, bits in enumerate(feature_activation_bits): + block_hash = genesis_hash if height == 0 else int.to_bytes(height, length=1, byteorder='big') block = Block(hash=block_hash, storage=storage, signal_bits=bits) blocks.append(block) - parent_hash = blocks[i - 1].hash + parent_hash = blocks[height - 1].hash assert parent_hash is not None block.parents = [parent_hash] + block._metadata = TransactionMetadata(height=height) + block._metadata.validation = ValidationState.FULL block_by_hash = {block.hash: block for block in blocks} storage.get_transaction = Mock(side_effect=lambda hash_bytes: block_by_hash[hash_bytes]) - storage.get_transaction_by_height = Mock(side_effect=lambda height: blocks[height]) + storage.get_transaction_by_height = Mock(side_effect=lambda h: blocks[h]) return blocks, storage @@ -597,7 +599,7 @@ def test_get_ancestor_at_height_invalid( block = block_mocks[block_height] with pytest.raises(AssertionError) as e: - service._get_ancestor_at_height(block=block, height=ancestor_height) + service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) assert str(e.value) == ( f"ancestor height must be lower than the block's height: {ancestor_height} >= {block_height}" @@ -624,21 +626,22 @@ def test_get_ancestor_at_height( ) -> None: service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) block = block_mocks[block_height] - result = service._get_ancestor_at_height(block=block, height=ancestor_height) + result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) assert result == block_mocks[ancestor_height] assert result.get_height() == ancestor_height - assert cast(Mock, tx_storage.get_transaction_by_height).call_count == 1 + assert cast(Mock, tx_storage.get_transaction_by_height).call_count == ( + 0 if block_height - ancestor_height <= 1 else 1 + ), 'this should only be called if the ancestor is deeper than one parent away' @pytest.mark.parametrize( ['block_height', 'ancestor_height'], [ (21, 20), - (21, 10), - (21, 0), + (21, 18), + (15, 12), (15, 10), - (15, 0), (1, 0), ] ) @@ -651,8 +654,9 @@ def test_get_ancestor_at_height_voided( ) -> None: service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) block = block_mocks[block_height] - block.get_metadata().voided_by = {b'some'} - result = service._get_ancestor_at_height(block=block, height=ancestor_height) + parent_block = block_mocks[block_height - 1] + parent_block.get_metadata().voided_by = {b'some'} + result = service._get_ancestor_at_height(block=block, ancestor_height=ancestor_height) assert result == block_mocks[ancestor_height] assert result.get_height() == ancestor_height diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index a9b622c7a..c7e8cf253 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -19,7 +19,6 @@ from hathor.builder import Builder from hathor.conf.get_settings import get_global_settings -from hathor.feature_activation import feature_service as feature_service_module from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria @@ -88,11 +87,11 @@ def test_feature(self) -> None: web_client = StubSite(feature_resource) calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) + get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) with ( patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), ): # at the beginning, the feature is DEFINED: add_new_blocks(manager, 10) @@ -578,11 +577,11 @@ def test_feature_from_existing_storage(self) -> None: web_client = StubSite(feature_resource) calculate_new_state_mock = Mock(wraps=feature_service1._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) + get_ancestor_iteratively_mock = Mock(wraps=feature_service1._get_ancestor_iteratively) with ( patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), ): assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage @@ -631,11 +630,11 @@ def test_feature_from_existing_storage(self) -> None: web_client = StubSite(feature_resource) calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) - get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) + get_ancestor_iteratively_mock = Mock(wraps=feature_service._get_ancestor_iteratively) with ( patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), - patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) + patch.object(FeatureService, '_get_ancestor_iteratively', get_ancestor_iteratively_mock), ): # the new storage starts populated assert artifacts2.tx_storage.get_vertices_count() == 67 From f651e2afa2bc6f4303eb0ec60957a3bb6ba2c477 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 26 Jan 2024 16:33:54 -0300 Subject: [PATCH 19/38] refactor(settings): remove some more calls to HathorSettings (#923) --- tests/cli/test_multisig_spend.py | 10 ++++---- tests/cli/test_twin_tx.py | 5 +--- tests/consensus/test_consensus.py | 5 +--- tests/consensus/test_soft_voided.py | 11 +++----- tests/consensus/test_soft_voided2.py | 7 ++---- tests/consensus/test_soft_voided3.py | 7 ++---- tests/consensus/test_soft_voided4.py | 3 --- tests/crypto/test_util.py | 3 --- tests/event/test_event_reorg.py | 17 ++++++------- .../test_feature_service.py | 4 +-- tests/others/test_init_manager.py | 3 --- tests/p2p/test_capabilities.py | 23 ++++++++--------- tests/p2p/test_get_best_blockchain.py | 19 ++++++-------- tests/p2p/test_peer_id.py | 25 ++++++++----------- tests/p2p/test_protocol.py | 9 +++---- tests/p2p/test_sync.py | 5 +--- tests/p2p/test_sync_rate_limiter.py | 7 ++---- tests/p2p/test_sync_v2.py | 3 --- tests/resources/test_mining_info.py | 13 ++++------ tests/resources/transaction/test_mempool.py | 7 ++---- tests/resources/transaction/test_pushtx.py | 7 ++---- .../resources/transaction/test_utxo_search.py | 11 +++----- tests/test_memory_reactor_clock.py | 2 ++ 23 files changed, 74 insertions(+), 132 deletions(-) diff --git a/tests/cli/test_multisig_spend.py b/tests/cli/test_multisig_spend.py index 0608003f7..9a152fdad 100644 --- a/tests/cli/test_multisig_spend.py +++ b/tests/cli/test_multisig_spend.py @@ -4,7 +4,6 @@ from structlog.testing import capture_logs from hathor.cli.multisig_spend import create_parser, execute -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput @@ -14,8 +13,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseMultiSigSpendTest(unittest.TestCase): __test__ = False @@ -65,7 +62,10 @@ def test_spend_multisig(self): add_blocks_unlock_reward(self.manager) blocks_tokens = [sum(txout.value for txout in blk.outputs) for blk in blocks] available_tokens = sum(blocks_tokens) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, available_tokens)) + self.assertEqual( + self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], + WalletBalance(0, available_tokens) + ) # First we send tokens to a multisig address block_reward = blocks_tokens[0] @@ -80,7 +80,7 @@ def test_spend_multisig(self): self.clock.advance(10) wallet_balance = WalletBalance(0, available_tokens - block_reward) - self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], wallet_balance) + self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], wallet_balance) # Then we create a new tx that spends this tokens from multisig wallet tx = Transaction.create_from_struct(tx1.get_struct()) diff --git a/tests/cli/test_twin_tx.py b/tests/cli/test_twin_tx.py index 4f4aef7df..0ae5ee723 100644 --- a/tests/cli/test_twin_tx.py +++ b/tests/cli/test_twin_tx.py @@ -5,7 +5,6 @@ from structlog.testing import capture_logs from hathor.cli.twin_tx import create_parser, execute -from hathor.conf import HathorSettings from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TransactionMetadata from hathor.util import json_loadb @@ -19,8 +18,6 @@ run_server, ) -settings = HathorSettings() - class BaseTwinTxTest(unittest.TestCase): __test__ = False @@ -93,7 +90,7 @@ def test_twin_different(self): tx = response['tx'] # Twin different weight and parents - host = 'http://localhost:8085/{}/'.format(settings.API_VERSION_PREFIX) + host = 'http://localhost:8085/{}/'.format(self._settings.API_VERSION_PREFIX) params = ['--url', host, '--hash', tx['hash'], '--parents', '--weight', '14'] args = self.parser.parse_args(params) diff --git a/tests/consensus/test_consensus.py b/tests/consensus/test_consensus.py index 27daa916a..caa455a54 100644 --- a/tests/consensus/test_consensus.py +++ b/tests/consensus/test_consensus.py @@ -1,13 +1,10 @@ from unittest.mock import MagicMock -from hathor.conf import HathorSettings from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions -settings = HathorSettings() - class BaseConsensusTestCase(unittest.TestCase): __test__ = False @@ -40,7 +37,7 @@ class MyError(Exception): tx2 = manager.tx_storage.get_transaction(tx.hash) meta2 = tx2.get_metadata() - self.assertEqual({settings.CONSENSUS_FAIL_ID}, meta2.voided_by) + self.assertEqual({self._settings.CONSENSUS_FAIL_ID}, meta2.voided_by) def test_revert_block_high_weight(self): """ A conflict transaction will be propagated. At first, it will be voided. diff --git a/tests/consensus/test_soft_voided.py b/tests/consensus/test_soft_voided.py index d039917ef..97bb0d562 100644 --- a/tests/consensus/test_soft_voided.py +++ b/tests/consensus/test_soft_voided.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions @@ -7,8 +6,6 @@ from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx -settings = HathorSettings() - class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628170 @@ -18,7 +15,7 @@ def assertNoParentsAreSoftVoided(self, tx): tx2 = tx.storage.get_transaction(h) tx2_meta = tx2.get_metadata() tx2_voided_by = tx2_meta.voided_by or set() - self.assertNotIn(settings.SOFT_VOIDED_ID, tx2_voided_by) + self.assertNotIn(self._settings.SOFT_VOIDED_ID, tx2_voided_by) def _run_test(self, simulator, soft_voided_tx_ids): manager1 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) @@ -57,7 +54,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): txA = manager2.tx_storage.get_transaction(txA_hash) metaA = txA.get_metadata() - self.assertEqual({settings.SOFT_VOIDED_ID, txA.hash}, metaA.voided_by) + self.assertEqual({self._settings.SOFT_VOIDED_ID, txA.hash}, metaA.voided_by) graphviz.labels[txA.hash] = 'txA' txB = add_custom_tx(manager2, [(txA, 0)]) @@ -123,8 +120,8 @@ def _run_test(self, simulator, soft_voided_tx_ids): for tx in manager1.tx_storage.get_all_transactions(): meta = tx.get_metadata() voided_by = meta.voided_by or set() - if settings.SOFT_VOIDED_ID in voided_by: - self.assertTrue({settings.SOFT_VOIDED_ID, tx.hash}.issubset(voided_by)) + if self._settings.SOFT_VOIDED_ID in voided_by: + self.assertTrue({self._settings.SOFT_VOIDED_ID, tx.hash}.issubset(voided_by)) # Uncomment lines below to visualize the DAG and the blockchain. # dot = graphviz.dot() diff --git a/tests/consensus/test_soft_voided2.py b/tests/consensus/test_soft_voided2.py index 584012f71..3e03de57a 100644 --- a/tests/consensus/test_soft_voided2.py +++ b/tests/consensus/test_soft_voided2.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.graphviz import GraphvizVisualizer from hathor.simulator import Simulator from hathor.simulator.utils import gen_new_tx @@ -6,8 +5,6 @@ from tests.simulation.base import SimulatorTestCase from tests.utils import BURN_ADDRESS, add_custom_tx -settings = HathorSettings() - class BaseConsensusSimulatorTestCase(SimulatorTestCase): seed_config = 5988775361793628169 @@ -159,8 +156,8 @@ def _run_test(self, simulator, soft_voided_tx_ids): for tx in manager1.tx_storage.get_all_transactions(): meta = tx.get_metadata() voided_by = meta.voided_by or set() - if settings.SOFT_VOIDED_ID in voided_by: - self.assertTrue({settings.SOFT_VOIDED_ID, tx.hash}.issubset(voided_by)) + if self._settings.SOFT_VOIDED_ID in voided_by: + self.assertTrue({self._settings.SOFT_VOIDED_ID, tx.hash}.issubset(voided_by)) txF1 = self.txF1_0 txF2 = self.txF2_0 diff --git a/tests/consensus/test_soft_voided3.py b/tests/consensus/test_soft_voided3.py index 77e8d4d9a..92af7c201 100644 --- a/tests/consensus/test_soft_voided3.py +++ b/tests/consensus/test_soft_voided3.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions @@ -7,8 +6,6 @@ from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx, gen_custom_tx -settings = HathorSettings() - class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628169 @@ -18,7 +15,7 @@ def assertNoParentsAreSoftVoided(self, tx): tx2 = tx.storage.get_transaction(h) tx2_meta = tx2.get_metadata() tx2_voided_by = tx2_meta.voided_by or set() - self.assertNotIn(settings.SOFT_VOIDED_ID, tx2_voided_by) + self.assertNotIn(self._settings.SOFT_VOIDED_ID, tx2_voided_by) def _run_test(self, simulator, soft_voided_tx_ids): manager1 = self.create_peer(soft_voided_tx_ids=soft_voided_tx_ids, simulator=simulator) @@ -60,7 +57,7 @@ def _run_test(self, simulator, soft_voided_tx_ids): txA = manager2.tx_storage.get_transaction(txA_hash) metaA = txA.get_metadata() - self.assertEqual({settings.SOFT_VOIDED_ID, txA.hash}, metaA.voided_by) + self.assertEqual({self._settings.SOFT_VOIDED_ID, txA.hash}, metaA.voided_by) graphviz.labels[txA.hash] = 'txA' txB = add_custom_tx(manager2, [(txA, 0)]) diff --git a/tests/consensus/test_soft_voided4.py b/tests/consensus/test_soft_voided4.py index 3776c1aba..bd914b341 100644 --- a/tests/consensus/test_soft_voided4.py +++ b/tests/consensus/test_soft_voided4.py @@ -1,4 +1,3 @@ -from hathor.conf import HathorSettings from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions @@ -7,8 +6,6 @@ from tests.simulation.base import SimulatorTestCase from tests.utils import add_custom_tx -settings = HathorSettings() - class BaseSoftVoidedTestCase(SimulatorTestCase): seed_config = 5988775361793628169 diff --git a/tests/crypto/test_util.py b/tests/crypto/test_util.py index 333b81b98..e8ba0353b 100644 --- a/tests/crypto/test_util.py +++ b/tests/crypto/test_util.py @@ -3,7 +3,6 @@ from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec -from hathor.conf import HathorSettings from hathor.crypto.util import ( decode_address, get_address_b58_from_public_key, @@ -12,8 +11,6 @@ get_private_key_from_bytes, ) -settings = HathorSettings() - class CryptoUtilTestCase(unittest.TestCase): def setUp(self): diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py index c941c9278..81648f456 100644 --- a/tests/event/test_event_reorg.py +++ b/tests/event/test_event_reorg.py @@ -1,12 +1,9 @@ -from hathor.conf import HathorSettings from hathor.event.model.event_type import EventType from hathor.event.storage import EventMemoryStorage from hathor.simulator.utils import add_new_blocks from tests import unittest from tests.utils import BURN_ADDRESS, get_genesis_key -settings = HathorSettings() - class BaseEventReorgTest(unittest.TestCase): __test__ = False @@ -27,10 +24,10 @@ def setUp(self): self.genesis_public_key = self.genesis_private_key.public_key() def test_reorg_events(self): - assert settings.REWARD_SPEND_MIN_BLOCKS == 10, 'this test was made with this hardcoded value in mind' + assert self._settings.REWARD_SPEND_MIN_BLOCKS == 10, 'this test was made with this hardcoded value in mind' # add some blocks - blocks = add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) + blocks = add_new_blocks(self.manager, self._settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) # make a re-org self.log.debug('make reorg block') @@ -49,13 +46,13 @@ def test_reorg_events(self): expected_events = [ (EventType.LOAD_STARTED, {}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_BLOCK_HASH.hex()}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX1_HASH.hex()}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX2_HASH.hex()}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': self._settings.GENESIS_BLOCK_HASH.hex()}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': self._settings.GENESIS_TX1_HASH.hex()}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': self._settings.GENESIS_TX2_HASH.hex()}), (EventType.LOAD_FINISHED, {}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[0].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX2_HASH.hex()}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX1_HASH.hex()}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': self._settings.GENESIS_TX2_HASH.hex()}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': self._settings.GENESIS_TX1_HASH.hex()}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[0].hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[1].hash_hex}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[1].hash_hex}), diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index ebe2a118e..a66af95dc 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -17,7 +17,7 @@ import pytest -from hathor.conf import HathorSettings +from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import ( BlockIsMissingSignal, @@ -35,7 +35,7 @@ def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: - settings = HathorSettings() + settings = get_global_settings() genesis_hash = settings.GENESIS_BLOCK_HASH blocks: list[Block] = [] feature_activation_bits = [ diff --git a/tests/others/test_init_manager.py b/tests/others/test_init_manager.py index 8ca7228a2..eaa96e454 100644 --- a/tests/others/test_init_manager.py +++ b/tests/others/test_init_manager.py @@ -1,6 +1,5 @@ from typing import Iterator -from hathor.conf import HathorSettings from hathor.pubsub import PubSubManager from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.transaction import BaseTransaction @@ -9,8 +8,6 @@ from tests.unittest import TestBuilder from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions -settings = HathorSettings() - class ModifiedTransactionMemoryStorage(TransactionMemoryStorage): def __init__(self, *args, **kwargs): diff --git a/tests/p2p/test_capabilities.py b/tests/p2p/test_capabilities.py index a9bae9c02..0380abaf4 100644 --- a/tests/p2p/test_capabilities.py +++ b/tests/p2p/test_capabilities.py @@ -1,16 +1,13 @@ -from hathor.conf import HathorSettings from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.simulator import FakeConnection from tests import unittest -settings = HathorSettings() - class SyncV1HathorCapabilitiesTestCase(unittest.SyncV1Params, unittest.TestCase): def test_capabilities(self): network = 'testnet' - manager1 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST]) + manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST]) manager2 = self.create_peer(network, capabilities=[]) conn = FakeConnection(manager1, manager2) @@ -26,8 +23,8 @@ def test_capabilities(self): self.assertIsInstance(conn._proto1.state.sync_agent, NodeSyncTimestamp) self.assertIsInstance(conn._proto2.state.sync_agent, NodeSyncTimestamp) - manager3 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST]) - manager4 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST]) + manager3 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST]) + manager4 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST]) conn2 = FakeConnection(manager3, manager4) @@ -45,9 +42,9 @@ def test_capabilities(self): class SyncV2HathorCapabilitiesTestCase(unittest.SyncV2Params, unittest.TestCase): def test_capabilities(self): network = 'testnet' - manager1 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST, - settings.CAPABILITY_SYNC_VERSION]) - manager2 = self.create_peer(network, capabilities=[settings.CAPABILITY_SYNC_VERSION]) + manager1 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST, + self._settings.CAPABILITY_SYNC_VERSION]) + manager2 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_SYNC_VERSION]) conn = FakeConnection(manager1, manager2) @@ -62,10 +59,10 @@ def test_capabilities(self): self.assertIsInstance(conn._proto1.state.sync_agent, NodeBlockSync) self.assertIsInstance(conn._proto2.state.sync_agent, NodeBlockSync) - manager3 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST, - settings.CAPABILITY_SYNC_VERSION]) - manager4 = self.create_peer(network, capabilities=[settings.CAPABILITY_WHITELIST, - settings.CAPABILITY_SYNC_VERSION]) + manager3 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST, + self._settings.CAPABILITY_SYNC_VERSION]) + manager4 = self.create_peer(network, capabilities=[self._settings.CAPABILITY_WHITELIST, + self._settings.CAPABILITY_SYNC_VERSION]) conn2 = FakeConnection(manager3, manager4) diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index 11e71db34..4d00ea55b 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -1,6 +1,5 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.indexes.height_index import HeightInfo from hathor.p2p.messages import ProtocolMessages from hathor.p2p.resources import StatusResource @@ -13,8 +12,6 @@ from tests.resources.base_resource import StubSite from tests.simulation.base import SimulatorTestCase -settings = HathorSettings() - class BaseGetBestBlockchainTestCase(SimulatorTestCase): @@ -51,8 +48,8 @@ def test_get_best_blockchain(self): self.assertIsNotNone(protocol2.capabilities) # assert the protocol has the GET_BEST_BLOCKCHAIN capability - self.assertIn(settings.CAPABILITY_GET_BEST_BLOCKCHAIN, protocol1.capabilities) - self.assertIn(settings.CAPABILITY_GET_BEST_BLOCKCHAIN, protocol2.capabilities) + self.assertIn(self._settings.CAPABILITY_GET_BEST_BLOCKCHAIN, protocol1.capabilities) + self.assertIn(self._settings.CAPABILITY_GET_BEST_BLOCKCHAIN, protocol2.capabilities) # assert the protocol is in ReadyState state1 = protocol1.state @@ -81,8 +78,8 @@ def test_get_best_blockchain(self): state1.send_get_best_blockchain() state2.send_get_best_blockchain() self.simulator.run(60) - self.assertEqual(settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state1.peer_best_blockchain)) - self.assertEqual(settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state2.peer_best_blockchain)) + self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state1.peer_best_blockchain)) + self.assertEqual(self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS, len(state2.peer_best_blockchain)) self.assertIsInstance(state1.peer_best_blockchain[0], HeightInfo) self.assertIsInstance(state2.peer_best_blockchain[0], HeightInfo) @@ -211,8 +208,8 @@ def test_node_without_get_best_blockchain_capability(self): manager2 = self.create_peer() cababilities_without_get_best_blockchain = [ - settings.CAPABILITY_WHITELIST, - settings.CAPABILITY_SYNC_VERSION, + self._settings.CAPABILITY_WHITELIST, + self._settings.CAPABILITY_SYNC_VERSION, ] manager2.capabilities = cababilities_without_get_best_blockchain @@ -397,7 +394,7 @@ def test_best_blockchain_from_status_resource(self): # connected_peers # assert default peer_best_blockchain length peer_best_blockchain = connections['connected_peers'][0]['peer_best_blockchain'] - self.assertEqual(len(peer_best_blockchain), settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) + self.assertEqual(len(peer_best_blockchain), self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) # assert a raw_height_info can be converted to HeightInfo try: @@ -418,7 +415,7 @@ def test_best_blockchain_from_status_resource(self): # dag # assert default peer_best_blockchain length peer_best_blockchain = dag['best_blockchain'] - self.assertEqual(len(peer_best_blockchain), settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) + self.assertEqual(len(peer_best_blockchain), self._settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) # assert a raw_height_info can be converted to HeightInfo try: diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index b9add5faa..c3e8be202 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -2,14 +2,11 @@ import shutil import tempfile -from hathor.conf import HathorSettings from hathor.p2p.peer_id import InvalidPeerIdException, PeerId from hathor.p2p.peer_storage import PeerStorage from tests import unittest from tests.unittest import TestBuilder -settings = HathorSettings() - class PeerIdTest(unittest.TestCase): def test_invalid_id(self): @@ -134,13 +131,13 @@ def test_retry_connection(self): p = PeerId() interval = p.retry_interval p.increment_retry_attempt(0) - self.assertEqual(settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER*interval, p.retry_interval) + self.assertEqual(self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER*interval, p.retry_interval) self.assertEqual(interval, p.retry_timestamp) # when retry_interval is already 180 - p.retry_interval = settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 10 + p.retry_interval = self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 10 p.increment_retry_attempt(0) - self.assertEqual(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL, p.retry_interval) + self.assertEqual(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL, p.retry_interval) # reset p.reset_retry_timestamp() @@ -180,27 +177,27 @@ def test_retry_logic(self): peer.increment_retry_attempt(0) self.assertFalse(peer.can_retry(retry_interval)) - retry_interval *= settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER + retry_interval *= self._settings.PEER_CONNECTION_RETRY_INTERVAL_MULTIPLIER self.assertFalse(peer.can_retry(retry_interval - 1)) self.assertTrue(peer.can_retry(retry_interval)) self.assertTrue(peer.can_retry(retry_interval)) # Retry until we reach max retry interval. - while peer.retry_interval < settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: + while peer.retry_interval < self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL: peer.increment_retry_attempt(0) # We need to call it once more because peer.retry_interval is always one step behind. peer.increment_retry_attempt(0) # Confirm we are at the max retry interval. - self.assertFalse(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) - self.assertTrue(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) - self.assertTrue(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) + self.assertFalse(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) + self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) + self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) # It shouldn't change with another retry. peer.increment_retry_attempt(0) - self.assertFalse(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) - self.assertTrue(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) - self.assertTrue(peer.can_retry(settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) + self.assertFalse(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL - 1)) + self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL)) + self.assertTrue(peer.can_retry(self._settings.PEER_CONNECTION_RETRY_MAX_RETRY_INTERVAL + 1)) # Finally, reset it. peer.reset_retry_timestamp() diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index f643ed7d2..1aadea540 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -5,15 +5,12 @@ from twisted.internet.defer import inlineCallbacks from twisted.python.failure import Failure -from hathor.conf import HathorSettings from hathor.p2p.peer_id import PeerId from hathor.p2p.protocol import HathorLineReceiver, HathorProtocol from hathor.simulator import FakeConnection from hathor.util import json_dumps from tests import unittest -settings = HathorSettings() - class BaseHathorProtocolTestCase(unittest.TestCase): __test__ = False @@ -165,7 +162,7 @@ def test_invalid_hello5(self): # hello with clocks too far apart self.conn.tr1.clear() data = self.conn.proto2.state._get_hello_data() - data['timestamp'] = data['timestamp'] + settings.MAX_FUTURE_TIMESTAMP_ALLOWED/2 + 1 + data['timestamp'] = data['timestamp'] + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED/2 + 1 self._send_cmd( self.conn.proto1, 'HELLO', @@ -295,7 +292,7 @@ def test_on_disconnect_after_peer_id(self): self.assertNotIn(self.peer_id2.id, self.manager1.connections.peer_storage) def test_idle_connection(self): - self.clock.advance(settings.PEER_IDLE_TIMEOUT - 10) + self.clock.advance(self._settings.PEER_IDLE_TIMEOUT - 10) self.assertIsConnected(self.conn) self.clock.advance(15) self.assertIsNotConnected(self.conn) @@ -443,7 +440,7 @@ def test_get_data(self): payload = { 'first_block_hash': missing_tx, 'last_block_hash': missing_tx, - 'start_from': [settings.GENESIS_BLOCK_HASH.hex()] + 'start_from': [self._settings.GENESIS_BLOCK_HASH.hex()] } yield self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index bad0f654f..e387bba89 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -1,7 +1,6 @@ from twisted.python.failure import Failure from hathor.checkpoint import Checkpoint as cp -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.p2p.protocol import PeerIdState from hathor.p2p.sync_version import SyncVersion @@ -10,8 +9,6 @@ from tests import unittest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseHathorSyncMethodsTestCase(unittest.TestCase): __test__ = False @@ -425,7 +422,7 @@ def test_downloader_retry_reorder(self): # disconnect and wait for the download of tx_A to timeout but not yet the download of tx_B self.conn1.disconnect(Failure(Exception('testing'))) self.conn2.disconnect(Failure(Exception('testing'))) - self.clock.advance(settings.GET_DATA_TIMEOUT - 10.0) + self.clock.advance(self._settings.GET_DATA_TIMEOUT - 10.0) # reconnect peer_X and peer_Y self.conn1 = FakeConnection(self.manager_bug, self.manager1) diff --git a/tests/p2p/test_sync_rate_limiter.py b/tests/p2p/test_sync_rate_limiter.py index f9d21114a..9433c7ade 100644 --- a/tests/p2p/test_sync_rate_limiter.py +++ b/tests/p2p/test_sync_rate_limiter.py @@ -154,11 +154,8 @@ def test_sync_rate_limiter_delayed_calls_stop(self): sync1.send_tips() self.assertEqual(len(sync1._send_tips_call_later), 0) - from hathor.conf import HathorSettings - settings = HathorSettings() - - # add delayed calls to the the maximum - max_delayed_calls = settings.MAX_GET_TIPS_DELAYED_CALLS + # add delayed calls to the maximum + max_delayed_calls = self._settings.MAX_GET_TIPS_DELAYED_CALLS for count in range(max_delayed_calls): sync1.send_tips() diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index e393431c3..68be619de 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -5,7 +5,6 @@ from twisted.internet.defer import inlineCallbacks, succeed from twisted.python.failure import Failure -from hathor.conf import HathorSettings from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_v2.agent import _HeightInfo @@ -21,8 +20,6 @@ from tests.simulation.base import SimulatorTestCase from tests.utils import HAS_ROCKSDB -settings = HathorSettings() - class BaseRandomSimulatorTestCase(SimulatorTestCase): __test__ = True diff --git a/tests/resources/test_mining_info.py b/tests/resources/test_mining_info.py index cde6fcc28..2f028f624 100644 --- a/tests/resources/test_mining_info.py +++ b/tests/resources/test_mining_info.py @@ -1,13 +1,10 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.p2p.resources import MiningInfoResource from hathor.simulator.utils import add_new_blocks from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -settings = HathorSettings() - class BaseGetMiningInfoTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -54,15 +51,15 @@ def test_mined_tokens(self): response = yield self.web.get("mined_tokens") data = response.json_value() self.assertEqual(data['blocks'], 5) - self.assertEqual(data['mined_tokens'], 5*settings.INITIAL_TOKENS_PER_BLOCK) + self.assertEqual(data['mined_tokens'], 5*self._settings.INITIAL_TOKENS_PER_BLOCK) - add_new_blocks(self.manager, settings.BLOCKS_PER_HALVING + 15, advance_clock=1) - mined_tokens = (settings.BLOCKS_PER_HALVING * settings.INITIAL_TOKENS_PER_BLOCK + - 20 * settings.INITIAL_TOKENS_PER_BLOCK // 2) + add_new_blocks(self.manager, self._settings.BLOCKS_PER_HALVING + 15, advance_clock=1) + mined_tokens = (self._settings.BLOCKS_PER_HALVING * self._settings.INITIAL_TOKENS_PER_BLOCK + + 20 * self._settings.INITIAL_TOKENS_PER_BLOCK // 2) response = yield self.web.get("mined_tokens") data = response.json_value() - self.assertEqual(data['blocks'], settings.BLOCKS_PER_HALVING + 20) + self.assertEqual(data['blocks'], self._settings.BLOCKS_PER_HALVING + 20) self.assertEqual(data['mined_tokens'], mined_tokens) diff --git a/tests/resources/transaction/test_mempool.py b/tests/resources/transaction/test_mempool.py index c98075b13..45e29d1ab 100644 --- a/tests/resources/transaction/test_mempool.py +++ b/tests/resources/transaction/test_mempool.py @@ -1,14 +1,11 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import MempoolResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, add_new_transactions -settings = HathorSettings() - class BaseMempoolTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -57,12 +54,12 @@ def test_get(self): add_new_blocks(self.manager, 1, advance_clock=1) # Add more than api limit and check truncated return - add_new_transactions(self.manager, settings.MEMPOOL_API_TX_LIMIT + 1, advance_clock=1) + add_new_transactions(self.manager, self._settings.MEMPOOL_API_TX_LIMIT + 1, advance_clock=1) response5 = yield self.web.get("mempool") data5 = response5.json_value() self.assertTrue(data5['success']) # default limit is 100 - self.assertEqual(len(data5['transactions']), settings.MEMPOOL_API_TX_LIMIT) + self.assertEqual(len(data5['transactions']), self._settings.MEMPOOL_API_TX_LIMIT) class SyncV1MempoolTest(unittest.SyncV1Params, BaseMempoolTest): diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index 10392c214..e861283df 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -2,7 +2,6 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput @@ -15,8 +14,6 @@ from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward, add_tx_with_data_script, create_tokens -settings = HathorSettings() - class BasePushTxTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -184,7 +181,7 @@ def test_script_too_big(self) -> Generator: tx = self.get_tx() # Invalid tx (output script is too long) - tx.outputs[0].script = b'*' * (settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1) + tx.outputs[0].script = b'*' * (self._settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1) self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() response = yield self.push_tx({'hex_tx': tx_hex}) @@ -254,7 +251,7 @@ def test_spending_voided(self) -> Generator: # Now we set this tx2 as voided and try to push a tx3 that spends tx2 tx_meta = tx2.get_metadata() - tx_meta.voided_by = {settings.SOFT_VOIDED_ID} + tx_meta.voided_by = {self._settings.SOFT_VOIDED_ID} self.manager.tx_storage.save_transaction(tx2, only_metadata=True) # Try to push again with soft voided id as voided by diff --git a/tests/resources/transaction/test_utxo_search.py b/tests/resources/transaction/test_utxo_search.py index ecd73c918..5929f7314 100644 --- a/tests/resources/transaction/test_utxo_search.py +++ b/tests/resources/transaction/test_utxo_search.py @@ -1,6 +1,5 @@ from twisted.internet.defer import inlineCallbacks -from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import UtxoSearchResource @@ -8,8 +7,6 @@ from tests.resources.base_resource import StubSite, _BaseResourceTest from tests.utils import add_blocks_unlock_reward -settings = HathorSettings() - class BaseUtxoSearchTest(_BaseResourceTest._ResourceTest): __test__ = False @@ -62,7 +59,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[:1]]) # Success non-empty address with medium amount, will require more than one output @@ -75,7 +72,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[4:1:-1]]) # Success non-empty address with exact amount, will require all UTXOs @@ -88,7 +85,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[::-1]]) # Success non-empty address with excessive amount, will require all UTXOs, even if it's not enough @@ -101,7 +98,7 @@ def test_simple_gets(self): 'index': 0, 'amount': 6400, 'timelock': None, - 'heightlock': b.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS, + 'heightlock': b.get_metadata().height + self._settings.REWARD_SPEND_MIN_BLOCKS, } for b in blocks[::-1]]) diff --git a/tests/test_memory_reactor_clock.py b/tests/test_memory_reactor_clock.py index 1ba9eda12..48e8a6d48 100644 --- a/tests/test_memory_reactor_clock.py +++ b/tests/test_memory_reactor_clock.py @@ -16,6 +16,8 @@ class TestMemoryReactorClock(MemoryReactorClock): + __test__ = False + def run(self): """ We have to override MemoryReactor.run() because the original Twisted implementation weirdly calls stop() inside From b6748ee5e1f32b1437ba68913ece0574977e0925 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 29 Jan 2024 00:20:39 -0300 Subject: [PATCH 20/38] fix(feature-activation): add missing metadata migration (#927) --- hathor/transaction/base_transaction.py | 10 +++++ ..._feature_activation_bit_counts_metadata.py | 8 +--- ...feature_activation_bit_counts_metadata2.py | 41 +++++++++++++++++++ .../storage/transaction_storage.py | 4 +- 4 files changed, 56 insertions(+), 7 deletions(-) create mode 100644 hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 532538969..958c59c05 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -707,6 +707,7 @@ def update_initial_metadata(self, *, save: bool = True) -> None: self._update_height_metadata() self._update_parents_children_metadata() self._update_reward_lock_metadata() + self._update_feature_activation_bit_counts() if save: assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) @@ -732,6 +733,15 @@ def _update_parents_children_metadata(self) -> None: metadata.children.append(self.hash) self.storage.save_transaction(parent, only_metadata=True) + def _update_feature_activation_bit_counts(self) -> None: + """Update the block's feature_activation_bit_counts.""" + if not self.is_block: + return + from hathor.transaction import Block + assert isinstance(self, Block) + # This method lazily calculates and stores the value in metadata + self.get_feature_activation_bit_counts() + def update_timestamp(self, now: int) -> None: """Update this tx's timestamp diff --git a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py index e231fdf46..20c93b3b5 100644 --- a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py +++ b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py @@ -17,7 +17,6 @@ from structlog import get_logger from hathor.transaction.storage.migrations import BaseMigration -from hathor.util import progress if TYPE_CHECKING: from hathor.transaction.storage import TransactionStorage @@ -33,9 +32,6 @@ def get_db_name(self) -> str: return 'add_feature_activation_bit_counts_metadata' def run(self, storage: 'TransactionStorage') -> None: + # We can skip this migration as it will run again in `add_feature_activation_bit_counts_metadata2`. log = logger.new() - topological_iterator = storage.topological_iterator() - - for vertex in progress(topological_iterator, log=log, total=None): - if vertex.is_block: - vertex.update_initial_metadata() + log.info('Skipping unnecessary migration.') diff --git a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py new file mode 100644 index 000000000..eb59daa6b --- /dev/null +++ b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata2.py @@ -0,0 +1,41 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.transaction.storage.migrations import BaseMigration +from hathor.util import progress + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'add_feature_activation_bit_counts_metadata2' + + def run(self, storage: 'TransactionStorage') -> None: + log = logger.new() + topological_iterator = storage.topological_iterator() + + for vertex in progress(topological_iterator, log=log, total=None): + if vertex.is_block: + vertex.update_initial_metadata() diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 8b441ed31..4a94f98f9 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -39,6 +39,7 @@ BaseMigration, MigrationState, add_feature_activation_bit_counts_metadata, + add_feature_activation_bit_counts_metadata2, add_min_height_metadata, remove_first_nop_features, ) @@ -89,7 +90,8 @@ class TransactionStorage(ABC): _migration_factories: list[type[BaseMigration]] = [ add_min_height_metadata.Migration, add_feature_activation_bit_counts_metadata.Migration, - remove_first_nop_features.Migration + remove_first_nop_features.Migration, + add_feature_activation_bit_counts_metadata2.Migration, ] _migrations: list[BaseMigration] From 94d29e3500ec01af74d3985f38054ccc7de79cdb Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Tue, 30 Jan 2024 01:48:39 -0300 Subject: [PATCH 21/38] chore(feature-activation): phased testing adjustments (#931) --- hathor/conf/testnet.py | 34 +----------- hathor/conf/testnet.yml | 32 +---------- hathor/feature_activation/feature.py | 5 +- hathor/manager.py | 2 +- .../migrations/remove_second_nop_features.py | 54 +++++++++++++++++++ .../storage/transaction_storage.py | 2 + 6 files changed, 62 insertions(+), 67 deletions(-) create mode 100644 hathor/transaction/storage/migrations/remove_second_nop_features.py diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index fdfa7ab61..bfde279dc 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -14,8 +14,6 @@ from hathor.checkpoint import Checkpoint as cp from hathor.conf.settings import HathorSettings -from hathor.feature_activation.feature import Feature -from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.settings import Settings as FeatureActivationSettings SETTINGS = HathorSettings( @@ -55,36 +53,6 @@ cp(1_600_000, bytes.fromhex('00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2')), ], FEATURE_ACTIVATION=FeatureActivationSettings( - evaluation_interval=40_320, - default_threshold=30240, - features={ - Feature.NOP_FEATURE_4: Criteria( - bit=0, - start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) - timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height=3_507_840, # N + 3 * 40320 (6 weeks after the start) - lock_in_on_timeout=False, - version='0.57.0', - signal_support_by_default=True - ), - Feature.NOP_FEATURE_5: Criteria( - bit=1, - start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) - timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height=0, - lock_in_on_timeout=True, - version='0.57.0', - signal_support_by_default=False - ), - Feature.NOP_FEATURE_6: Criteria( - bit=2, - start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) - timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height=0, - lock_in_on_timeout=False, - version='0.57.0', - signal_support_by_default=False - ) - } + default_threshold=15_120, # 15120 = 75% of evaluation_interval (20160) ) ) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 2fd1abf96..d3be15dde 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -37,34 +37,4 @@ CHECKPOINTS: 1_600_000: 00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2 FEATURE_ACTIVATION: - evaluation_interval: 40_320 - default_threshold: 30_240 # 30240 = 75% of evaluation_interval (40320) - features: - #### Second Phased Testing features #### - - NOP_FEATURE_4: - bit: 0 - start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) - timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height: 3_507_840 # N + 3 * 40320 (6 weeks after the start) - lock_in_on_timeout: false - version: 0.57.0 - signal_support_by_default: true - - NOP_FEATURE_5: - bit: 1 - start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) - timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height: 0 - lock_in_on_timeout: true - version: 0.57.0 - signal_support_by_default: false - - NOP_FEATURE_6: - bit: 2 - start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) - timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height: 0 - lock_in_on_timeout: false - version: 0.57.0 - signal_support_by_default: false + default_threshold: 15_120 # 15120 = 75% of evaluation_interval (20160) diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index c056b21d1..eb6a6e897 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -23,12 +23,13 @@ class Feature(Enum): should NOT be changed either, as configuration uses them for setting feature activation criteria. """ - # First Phased Testing features + # Mainnet Phased Testing features NOP_FEATURE_1 = 'NOP_FEATURE_1' NOP_FEATURE_2 = 'NOP_FEATURE_2' NOP_FEATURE_3 = 'NOP_FEATURE_3' - # Second Phased Testing features + # TODO: Those can be removed in a future PR + # Testnet Phased Testing features NOP_FEATURE_4 = 'NOP_FEATURE_4' NOP_FEATURE_5 = 'NOP_FEATURE_5' NOP_FEATURE_6 = 'NOP_FEATURE_6' diff --git a/hathor/manager.py b/hathor/manager.py index 731e70c0a..10af61697 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -1077,7 +1077,7 @@ def _log_feature_states(self, vertex: BaseTransaction) -> None: features_states=state_by_feature ) - features = [Feature.NOP_FEATURE_4, Feature.NOP_FEATURE_5, Feature.NOP_FEATURE_6] + features = [Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2] for feature in features: self._log_if_feature_is_active(vertex, feature) diff --git a/hathor/transaction/storage/migrations/remove_second_nop_features.py b/hathor/transaction/storage/migrations/remove_second_nop_features.py new file mode 100644 index 000000000..dd322b1f7 --- /dev/null +++ b/hathor/transaction/storage/migrations/remove_second_nop_features.py @@ -0,0 +1,54 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.conf.get_settings import get_global_settings +from hathor.transaction.storage.migrations import BaseMigration +from hathor.util import progress + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'remove_second_nop_features' + + def run(self, storage: 'TransactionStorage') -> None: + """ + This migration clears the Feature Activation metadata related to the second Phased Testing on testnet. + """ + settings = get_global_settings() + log = logger.new() + + if settings.NETWORK_NAME != 'testnet-golf': + # If it's not testnet, we don't have to clear anything. + log.info('Skipping testnet-only migration.') + return + + topological_iterator = storage.topological_iterator() + + for vertex in progress(topological_iterator, log=log, total=None): + if vertex.is_block: + meta = vertex.get_metadata() + meta.feature_states = None + storage.save_transaction(vertex, only_metadata=True) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 4a94f98f9..5b56431cb 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -42,6 +42,7 @@ add_feature_activation_bit_counts_metadata2, add_min_height_metadata, remove_first_nop_features, + remove_second_nop_features, ) from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction @@ -92,6 +93,7 @@ class TransactionStorage(ABC): add_feature_activation_bit_counts_metadata.Migration, remove_first_nop_features.Migration, add_feature_activation_bit_counts_metadata2.Migration, + remove_second_nop_features.Migration, ] _migrations: list[BaseMigration] From a9a643dd4865468d605d90be1b2e7a25328676a2 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Fri, 5 Jan 2024 20:21:25 -0300 Subject: [PATCH 22/38] feat(merged-mining): configure new max merkle path length on testnet --- hathor/conf/settings.py | 4 ++ hathor/conf/testnet.py | 16 +++++ hathor/conf/testnet.yml | 12 ++++ hathor/feature_activation/feature.py | 2 + hathor/merged_mining/coordinator.py | 2 +- hathor/transaction/aux_pow.py | 24 +++---- .../merge_mined_block_verifier.py | 21 ++++++- hathor/verification/vertex_verifiers.py | 2 +- tests/tx/test_tx.py | 62 ++++++++++++++++--- 9 files changed, 123 insertions(+), 22 deletions(-) diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 09a90dd2d..bdd441f3e 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -419,6 +419,10 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: # Time in seconds to request the best blockchain from peers. BEST_BLOCKCHAIN_INTERVAL: int = 5 # seconds + # Merged mining settings. The old value is going to be replaced by the new value through Feature Activation. + OLD_MAX_MERKLE_PATH_LENGTH: int = 12 + NEW_MAX_MERKLE_PATH_LENGTH: int = 20 + @classmethod def from_yaml(cls, *, filepath: str) -> 'HathorSettings': """Takes a filepath to a yaml file and returns a validated HathorSettings instance.""" diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index bfde279dc..c956c48c5 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -14,6 +14,8 @@ from hathor.checkpoint import Checkpoint as cp from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.settings import Settings as FeatureActivationSettings SETTINGS = HathorSettings( @@ -54,5 +56,19 @@ ], FEATURE_ACTIVATION=FeatureActivationSettings( default_threshold=15_120, # 15120 = 75% of evaluation_interval (20160) + features={ + Feature.INCREASE_MAX_MERKLE_PATH_LENGTH: Criteria( + bit=3, + # N = 3_548_160 + # Expected to be reached around Sunday, 2024-02-04. + # Right now the best block is 3_521_000 on testnet (2024-01-26). + start_height=3_548_160, + timeout_height=3_588_480, # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.59.0', + signal_support_by_default=True, + ) + } ) ) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index d3be15dde..554df5247 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -38,3 +38,15 @@ CHECKPOINTS: FEATURE_ACTIVATION: default_threshold: 15_120 # 15120 = 75% of evaluation_interval (20160) + features: + INCREASE_MAX_MERKLE_PATH_LENGTH: + bit: 3 + # N = 3_548_160 + # Expected to be reached around Sunday, 2024-02-04. + # Right now the best block is 3_521_000 on testnet (2024-01-26). + start_height: 3_548_160 + timeout_height: 3_588_480 + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: true diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index eb6a6e897..56082def8 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -33,3 +33,5 @@ class Feature(Enum): NOP_FEATURE_4 = 'NOP_FEATURE_4' NOP_FEATURE_5 = 'NOP_FEATURE_5' NOP_FEATURE_6 = 'NOP_FEATURE_6' + + INCREASE_MAX_MERKLE_PATH_LENGTH = 'INCREASE_MAX_MERKLE_PATH_LENGTH' diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index 61c9c2a65..1a9ac39ff 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -624,7 +624,7 @@ def handle_submit(self, params: list[Any], msgid: Optional[str]) -> None: try: aux_pow = job.build_aux_pow(work) - aux_pow.verify(block_base_hash) + aux_pow.verify_magic_number(block_base_hash) except TxValidationError as e: self.log.warn('invalid work', job_id=work.job_id, error=e) self.send_error(INVALID_SOLUTION, data={'message': 'Job has invalid work.'}) diff --git a/hathor/transaction/aux_pow.py b/hathor/transaction/aux_pow.py index 0a18ee2aa..c6772ac88 100644 --- a/hathor/transaction/aux_pow.py +++ b/hathor/transaction/aux_pow.py @@ -19,9 +19,6 @@ logger = get_logger() -MAX_MERKLE_PATH_LENGTH: int = 12 - - class BitcoinAuxPow(NamedTuple): header_head: bytes # 36 bytes coinbase_head: bytes # variable length (at least 47 bytes) @@ -44,23 +41,28 @@ def calculate_hash(self, base_block_hash: bytes) -> bytes: merkle_root = bytes(reversed(build_merkle_root_from_path([coinbase_tx_hash] + self.merkle_path))) return sha256d_hash(self.header_head + merkle_root + self.header_tail) - def verify(self, _base_block_hash: bytes) -> None: + def verify(self, _base_block_hash: bytes, max_merkle_path_length: int) -> None: """ Check for inconsistencies, raises instance of TxValidationError on error. """ + self.verify_magic_number(_base_block_hash) + self.verify_merkle_path(_base_block_hash, max_merkle_path_length) + + def verify_magic_number(self, _base_block_hash: bytes) -> None: + """Check that the `MAGIC_NUMBER` is present and in the correct index.""" from hathor.merged_mining import MAGIC_NUMBER - from hathor.transaction.exceptions import ( - AuxPowLongMerklePathError, - AuxPowNoMagicError, - AuxPowUnexpectedMagicError, - ) + from hathor.transaction.exceptions import AuxPowNoMagicError, AuxPowUnexpectedMagicError magic_index = self.coinbase_head.find(MAGIC_NUMBER) if magic_index == -1: raise AuxPowNoMagicError('cannot find MAGIC_NUMBER') if magic_index < len(self.coinbase_head) - len(MAGIC_NUMBER): raise AuxPowUnexpectedMagicError('unexpected MAGIC_NUMBER') + + def verify_merkle_path(self, _base_block_hash: bytes, max_merkle_path_length: int) -> None: + """Check that the merkle path length is smaller than the maximum limit.""" + from hathor.transaction.exceptions import AuxPowLongMerklePathError merkle_path_length = len(self.merkle_path) - if merkle_path_length > MAX_MERKLE_PATH_LENGTH: - raise AuxPowLongMerklePathError(f'merkle_path too long: {merkle_path_length} > {MAX_MERKLE_PATH_LENGTH}') + if merkle_path_length > max_merkle_path_length: + raise AuxPowLongMerklePathError(f'merkle_path too long: {merkle_path_length} > {max_merkle_path_length}') def __bytes__(self) -> bytes: """ Convert to byte representation. diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py index 9314fbb2a..60bfb42da 100644 --- a/hathor/verification/merge_mined_block_verifier.py +++ b/hathor/verification/merge_mined_block_verifier.py @@ -12,14 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService from hathor.transaction import MergeMinedBlock class MergeMinedBlockVerifier: - __slots__ = () + __slots__ = ('_settings', '_feature_service',) + + def __init__(self, *, settings: HathorSettings, feature_service: FeatureService): + self._settings = settings + self._feature_service = feature_service def verify_aux_pow(self, block: MergeMinedBlock) -> None: """ Verify auxiliary proof-of-work (for merged mining). """ assert block.aux_pow is not None - block.aux_pow.verify(block.get_base_hash()) + + is_feature_active = self._feature_service.is_feature_active( + block=block, + feature=Feature.INCREASE_MAX_MERKLE_PATH_LENGTH + ) + max_merkle_path_length = ( + self._settings.NEW_MAX_MERKLE_PATH_LENGTH if is_feature_active + else self._settings.OLD_MAX_MERKLE_PATH_LENGTH + ) + + block.aux_pow.verify(block.get_base_hash(), max_merkle_path_length) diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py index 339230acb..98477c397 100644 --- a/hathor/verification/vertex_verifiers.py +++ b/hathor/verification/vertex_verifiers.py @@ -66,7 +66,7 @@ def create( Create a VertexVerifiers instance using a custom vertex_verifier. """ block_verifier = BlockVerifier(settings=settings, daa=daa, feature_service=feature_service) - merge_mined_block_verifier = MergeMinedBlockVerifier() + merge_mined_block_verifier = MergeMinedBlockVerifier(settings=settings, feature_service=feature_service) tx_verifier = TransactionVerifier(settings=settings, daa=daa) token_creation_tx_verifier = TokenCreationTransactionVerifier(settings=settings) diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index fd802c7f5..9ebf999bd 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -1,9 +1,12 @@ import base64 import hashlib from math import isinf, isnan +from unittest.mock import patch from hathor.crypto.util import decode_address, get_address_from_public_key, get_private_key_from_bytes from hathor.daa import TestMode +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService from hathor.simulator.utils import add_new_blocks from hathor.transaction import MAX_OUTPUT_VALUE, Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import ( @@ -222,15 +225,19 @@ def test_merge_mined_no_magic(self): from hathor.transaction.exceptions import AuxPowNoMagicError from hathor.transaction.merge_mined_block import MergeMinedBlock - parents = [tx.hash for tx in self.genesis] + parent_block = self.genesis_blocks[0].hash + parent_txs = [tx.hash for tx in self.genesis_txs] + parents = [parent_block, *parent_txs] address = decode_address(self.get_address(1)) outputs = [TxOutput(100, P2PKH.create_output_script(address))] b = MergeMinedBlock( + hash=b'some_hash', timestamp=self.genesis_blocks[0].timestamp + 1, weight=1, outputs=outputs, parents=parents, + storage=self.tx_storage, aux_pow=BitcoinAuxPow( b'\x00' * 32, b'\x00' * 42, # no MAGIC_NUMBER @@ -253,7 +260,9 @@ def test_merge_mined_multiple_magic(self): from hathor.transaction.exceptions import AuxPowUnexpectedMagicError from hathor.transaction.merge_mined_block import MergeMinedBlock - parents = [tx.hash for tx in self.genesis] + parent_block = self.genesis_blocks[0].hash + parent_txs = [tx.hash for tx in self.genesis_txs] + parents = [parent_block, *parent_txs] address1 = decode_address(self.get_address(1)) address2 = decode_address(self.get_address(2)) assert address1 != address2 @@ -261,17 +270,21 @@ def test_merge_mined_multiple_magic(self): outputs2 = [TxOutput(100, P2PKH.create_output_script(address2))] b1 = MergeMinedBlock( + hash=b'some_hash1', timestamp=self.genesis_blocks[0].timestamp + 1, weight=1, outputs=outputs1, parents=parents, + storage=self.tx_storage, ) b2 = MergeMinedBlock( + hash=b'some_hash2', timestamp=self.genesis_blocks[0].timestamp + 1, weight=1, outputs=outputs2, parents=parents, + storage=self.tx_storage, ) assert b1.get_base_hash() != b2.get_base_hash() @@ -321,6 +334,16 @@ def test_merge_mined_long_merkle_path(self): address = decode_address(self.get_address(1)) outputs = [TxOutput(100, P2PKH.create_output_script(address))] + patch_path = 'hathor.feature_activation.feature_service.FeatureService.is_feature_active' + + def is_feature_active_false(self: FeatureService, *, block: Block, feature: Feature) -> bool: + assert feature == Feature.INCREASE_MAX_MERKLE_PATH_LENGTH + return False + + def is_feature_active_true(self: FeatureService, *, block: Block, feature: Feature) -> bool: + assert feature == Feature.INCREASE_MAX_MERKLE_PATH_LENGTH + return True + b = MergeMinedBlock( timestamp=self.genesis_blocks[0].timestamp + 1, weight=1, @@ -330,17 +353,42 @@ def test_merge_mined_long_merkle_path(self): b'\x00' * 32, b'\x00' * 42 + MAGIC_NUMBER, b'\x00' * 18, - [b'\x00' * 32] * 13, # 1 too long + [b'\x00' * 32] * (self._settings.OLD_MAX_MERKLE_PATH_LENGTH + 1), # 1 too long b'\x00' * 12, ) ) - with self.assertRaises(AuxPowLongMerklePathError): + # Test with the INCREASE_MAX_MERKLE_PATH_LENGTH feature disabled + with patch(patch_path, is_feature_active_false): + with self.assertRaises(AuxPowLongMerklePathError): + self._verifiers.merge_mined_block.verify_aux_pow(b) + + # removing one path makes it work + b.aux_pow.merkle_path.pop() self._verifiers.merge_mined_block.verify_aux_pow(b) - # removing one path makes it work - b.aux_pow.merkle_path.pop() - self._verifiers.merge_mined_block.verify_aux_pow(b) + b2 = MergeMinedBlock( + timestamp=self.genesis_blocks[0].timestamp + 1, + weight=1, + outputs=outputs, + parents=parents, + aux_pow=BitcoinAuxPow( + b'\x00' * 32, + b'\x00' * 42 + MAGIC_NUMBER, + b'\x00' * 18, + [b'\x00' * 32] * (self._settings.NEW_MAX_MERKLE_PATH_LENGTH + 1), # 1 too long + b'\x00' * 12, + ) + ) + + # Test with the INCREASE_MAX_MERKLE_PATH_LENGTH feature enabled + with patch(patch_path, is_feature_active_true): + with self.assertRaises(AuxPowLongMerklePathError): + self._verifiers.merge_mined_block.verify_aux_pow(b2) + + # removing one path makes it work + b2.aux_pow.merkle_path.pop() + self._verifiers.merge_mined_block.verify_aux_pow(b2) def test_block_outputs(self): from hathor.transaction.exceptions import TooManyOutputs From 94696f2e90b9a99774296b93851e003a80f254d8 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 26 Jan 2024 19:32:43 +0100 Subject: [PATCH 23/38] feat(cli): mark sync-v2 parameters as safe and deprecate --x-* ones --- hathor/builder/cli_builder.py | 13 +++++++++++-- hathor/cli/run_node.py | 5 +++++ hathor/cli/run_node_args.py | 2 ++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 9d0cc2da1..1ee82f9bb 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -158,8 +158,17 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hostname = self.get_hostname() network = settings.NETWORK_NAME - enable_sync_v1 = not self._args.x_sync_v2_only - enable_sync_v2 = self._args.x_sync_v2_only or self._args.x_sync_bridge + + arg_sync_v2_only = self._args.x_sync_v2_only or self._args.sync_v2_only + if self._args.x_sync_v2_only: + self.log.warn('--x-sync-v2-only is deprecated and will be removed, use --sync-v2-only instead') + + arg_sync_bridge = self._args.x_sync_bridge or self._args.sync_bridge + if self._args.x_sync_bridge: + self.log.warn('--x-sync-bridge is deprecated and will be removed, use --sync-bridge instead') + + enable_sync_v1 = not arg_sync_v2_only + enable_sync_v2 = arg_sync_v2_only or arg_sync_bridge pubsub = PubSubManager(reactor) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 7b5ac15ed..42415688c 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -108,6 +108,11 @@ def create_parser(cls) -> ArgumentParser: help='Enable support for running both sync protocols. DO NOT ENABLE, IT WILL BREAK.') v2args.add_argument('--x-sync-v2-only', action='store_true', help='Disable support for running sync-v1. DO NOT ENABLE, IT WILL BREAK.') + # XXX: new safe arguments along side the unsafe --x- arguments so transition is easier + v2args.add_argument('--sync-bridge', action='store_true', + help='Enable support for running both sync protocols.') + v2args.add_argument('--sync-v2-only', action='store_true', + help='Disable support for running sync-v1.') parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') parser.add_argument('--x-rocksdb-indexes', action='store_true', help=SUPPRESS) parser.add_argument('--x-enable-event-queue', action='store_true', help='Enable event queue mechanism') diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index ca581bfed..3beca2b0f 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -65,6 +65,8 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): enable_crash_api: bool x_sync_bridge: bool x_sync_v2_only: bool + sync_bridge: bool + sync_v2_only: bool x_localhost_only: bool x_rocksdb_indexes: bool x_enable_event_queue: bool From 0f5ff700cf021723bb7c5a9da49c1034c875c52e Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Tue, 6 Feb 2024 15:30:09 -0600 Subject: [PATCH 24/38] chore(deps): Upgrade hathorlib to v0.5.2 --- poetry.lock | 18 +++++++++--------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index f0609067c..475743517 100644 --- a/poetry.lock +++ b/poetry.lock @@ -706,22 +706,22 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= [[package]] name = "hathorlib" -version = "0.3.0" +version = "0.5.2" description = "Hathor Network base objects library" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.9,<4" files = [ - {file = "hathorlib-0.3.0-py3-none-any.whl", hash = "sha256:079c2adbe0a28052e1db224324ca0cb8edbe6c3ced6ee5de09bb289cbed4c4e2"}, - {file = "hathorlib-0.3.0.tar.gz", hash = "sha256:0d268666504c9bd92369de889ebc292c077ab37b12b18b3383dfddb3d8b14741"}, + {file = "hathorlib-0.5.2-py3-none-any.whl", hash = "sha256:bf50853efff592a90fd10d9ef3988c62029bd728418ec88600cd00e21c075240"}, + {file = "hathorlib-0.5.2.tar.gz", hash = "sha256:565958f66cfbebdb159450855b7218ab0b3a6fdcb4f6f5ca4e8294e0c018e913"}, ] [package.dependencies] -base58 = ">=2.1.0" -cryptography = ">=38.0.3" -pycoin = ">=0.92.20220529,<0.93.0" +base58 = ">=2.1.1,<2.2.0" +cryptography = ">=38.0.3,<38.1.0" +pycoin = ">=0.92,<0.93" [package.extras] -client = ["aiohttp (>=3.7.0)", "structlog (>=20.0.0)"] +client = ["aiohttp (>=3.8.3,<3.9.0)", "structlog (>=22.3.0,<22.4.0)"] [[package]] name = "hyperlink" @@ -2490,4 +2490,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "5763f80ceef15f118b402852436bf43cdf70f1a7f95d2a5b59e40b8c3c1e24db" +content-hash = "cce7b9832ae2d13cc56fb572af82face7a824307ddd6953387737a27d6e7088a" diff --git a/pyproject.toml b/pyproject.toml index 1c4c64487..e27cfb609 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ idna = "~3.4" setproctitle = "^1.2.2" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} -hathorlib = "0.3.0" +hathorlib = "^0.5.2" pydantic = "~1.10.13" pyyaml = "^6.0.1" typing-extensions = "~4.8.0" From 8b13de760cf04173d393d6638dd723f97f594841 Mon Sep 17 00:00:00 2001 From: Luis Helder Date: Thu, 8 Feb 2024 10:20:34 -0300 Subject: [PATCH 25/38] fix: health endpoint when using --x-asyncio-reactor (#940) --- hathor/healthcheck/resources/healthcheck.py | 54 +++++++++++++++---- .../resources/healthcheck/test_healthcheck.py | 53 +++++++++++++++++- 2 files changed, 96 insertions(+), 11 deletions(-) diff --git a/hathor/healthcheck/resources/healthcheck.py b/hathor/healthcheck/resources/healthcheck.py index 5e9afcb9f..eb1de7eed 100644 --- a/hathor/healthcheck/resources/healthcheck.py +++ b/hathor/healthcheck/resources/healthcheck.py @@ -1,6 +1,16 @@ import asyncio -from healthcheck import Healthcheck, HealthcheckCallbackResponse, HealthcheckInternalComponent, HealthcheckStatus +from healthcheck import ( + Healthcheck, + HealthcheckCallbackResponse, + HealthcheckInternalComponent, + HealthcheckResponse, + HealthcheckStatus, +) +from twisted.internet.defer import Deferred, succeed +from twisted.python.failure import Failure +from twisted.web.http import Request +from twisted.web.server import NOT_DONE_YET from hathor.api_util import Resource, get_arg_default, get_args from hathor.cli.openapi_files.register import register_resource @@ -24,6 +34,28 @@ class HealthcheckResource(Resource): def __init__(self, manager: HathorManager): self.manager = manager + def _render_error(self, failure: Failure, request: Request) -> None: + request.setResponseCode(500) + request.write(json_dumpb({ + 'status': 'fail', + 'reason': f'Internal Error: {failure.getErrorMessage()}', + 'traceback': failure.getTraceback() + })) + request.finish() + + def _render_success(self, result: HealthcheckResponse, request: Request) -> None: + raw_args = get_args(request) + strict_status_code = get_arg_default(raw_args, 'strict_status_code', '0') == '1' + + if strict_status_code: + request.setResponseCode(200) + else: + status_code = result.get_http_status_code() + request.setResponseCode(status_code) + + request.write(json_dumpb(result.to_json())) + request.finish() + def render_GET(self, request): """ GET request /health/ Returns the health status of the fullnode @@ -34,24 +66,26 @@ def render_GET(self, request): :rtype: string (json) """ - raw_args = get_args(request) - strict_status_code = get_arg_default(raw_args, 'strict_status_code', '0') == '1' - sync_component = HealthcheckInternalComponent( name='sync', ) sync_component.add_healthcheck(lambda: sync_healthcheck(self.manager)) healthcheck = Healthcheck(name='hathor-core', components=[sync_component]) - status = asyncio.get_event_loop().run_until_complete(healthcheck.run()) - if strict_status_code: - request.setResponseCode(200) + # The asyncio loop will be running in case the option --x-asyncio-reactor is used + # XXX: We should remove this if when the asyncio reactor becomes the default and the only option + if asyncio.get_event_loop().is_running(): + future = asyncio.ensure_future(healthcheck.run()) + deferred = Deferred.fromFuture(future) else: - status_code = status.get_http_status_code() - request.setResponseCode(status_code) + status = asyncio.get_event_loop().run_until_complete(healthcheck.run()) + deferred = succeed(status) + + deferred.addCallback(self._render_success, request) + deferred.addErrback(self._render_error, request) - return json_dumpb(status.to_json()) + return NOT_DONE_YET HealthcheckResource.openapi = { diff --git a/tests/resources/healthcheck/test_healthcheck.py b/tests/resources/healthcheck/test_healthcheck.py index e40fb2a76..c616d3a03 100644 --- a/tests/resources/healthcheck/test_healthcheck.py +++ b/tests/resources/healthcheck/test_healthcheck.py @@ -1,6 +1,7 @@ +import asyncio from unittest.mock import ANY -from twisted.internet.defer import inlineCallbacks +from twisted.internet.defer import Deferred, inlineCallbacks from hathor.healthcheck.resources.healthcheck import HealthcheckResource from hathor.manager import HathorManager @@ -39,6 +40,56 @@ def test_get_no_recent_activity(self): } }) + def test_with_running_asyncio_loop(self): + """Test with a running asyncio loop. + + This is a simulation of how this endpoint should behave in production when the + --x-asyncio-reactor is provided to hathor-core, because this causes the reactor to run + an asyncio loop. + """ + # This deferred will be used solely to make sure the test doesn't finish before the async code + done = Deferred() + + def set_done(_): + done.callback(None) + + def set_done_fail(failure): + done.errback(failure) + + # This will be called from inside the async method to perform the web request + # while a running asyncio loop is present + @inlineCallbacks + def get_health(): + response = yield self.web.get('/health') + return response.json_value() + + async def run(): + data = get_health() + # When the request is done, we make sure the response is as expected + data.addCallback(self.assertEqual, { + 'status': 'fail', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'fail', + 'output': HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY, + 'time': ANY + }] + } + }) + # We succeed the "done" deferred if everything is ok + data.addCallback(set_done) + # We fail the "done" deferred if something goes wrong. This includes the assertion above failing. + data.addErrback(set_done_fail) + + # This will make sure we have a running asyncio loop + asyncio.get_event_loop().run_until_complete(run()) + + # Return the deferred so the test doesn't finish before the async code + return done + @inlineCallbacks def test_strict_status_code(self): """Make sure the 'strict_status_code' parameter is working. From 6e4bfff808f48514cd8cd64dac87245afa03e2d1 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Thu, 8 Feb 2024 12:54:46 -0600 Subject: [PATCH 26/38] feat(sync-v2): Improve logging when unexpected TRANSACTIONS-END or BLOCKS-END are received --- hathor/p2p/sync_v2/agent.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 7fb763498..08ff31ce5 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -721,7 +721,7 @@ def handle_blocks_end(self, payload: str) -> None: assert self.protocol.connections is not None if self.state is not PeerState.SYNCING_BLOCKS: - self.log.error('unexpected BLOCKS-END', state=self.state) + self.log.error('unexpected BLOCKS-END', state=self.state, response_code=response_code.name) self.protocol.send_error_and_close_connection('Not expecting to receive BLOCKS-END message') return @@ -978,7 +978,7 @@ def handle_transactions_end(self, payload: str) -> None: assert self.protocol.connections is not None if self.state is not PeerState.SYNCING_TRANSACTIONS: - self.log.error('unexpected TRANSACTIONS-END', state=self.state) + self.log.error('unexpected TRANSACTIONS-END', state=self.state, response_code=response_code.name) self.protocol.send_error_and_close_connection('Not expecting to receive TRANSACTIONS-END message') return From 08f1bd423def08cbd6e34e3db9e7bdd380e1e333 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Thu, 8 Feb 2024 15:30:21 +0100 Subject: [PATCH 27/38] refactor(cli): improve handling of sync version from arguments --- hathor/builder/cli_builder.py | 41 +++++++++++++++++++++++++------- hathor/cli/run_node.py | 16 +++++-------- tests/others/test_cli_builder.py | 15 ++++++++++++ 3 files changed, 53 insertions(+), 19 deletions(-) diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 1ee82f9bb..f1303774f 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -17,6 +17,7 @@ import os import platform import sys +from enum import Enum, auto from typing import Any, Optional from structlog import get_logger @@ -45,6 +46,12 @@ logger = get_logger() +class SyncChoice(Enum): + V1_ONLY = auto() + V2_ONLY = auto() + BRIDGE = auto() + + class CliBuilder: """CliBuilder builds the core objects from args. @@ -159,16 +166,32 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hostname = self.get_hostname() network = settings.NETWORK_NAME - arg_sync_v2_only = self._args.x_sync_v2_only or self._args.sync_v2_only - if self._args.x_sync_v2_only: - self.log.warn('--x-sync-v2-only is deprecated and will be removed, use --sync-v2-only instead') - - arg_sync_bridge = self._args.x_sync_bridge or self._args.sync_bridge - if self._args.x_sync_bridge: + sync_choice: SyncChoice + if self._args.sync_bridge: + sync_choice = SyncChoice.BRIDGE + elif self._args.sync_v2_only: + sync_choice = SyncChoice.V2_ONLY + elif self._args.x_sync_bridge: self.log.warn('--x-sync-bridge is deprecated and will be removed, use --sync-bridge instead') - - enable_sync_v1 = not arg_sync_v2_only - enable_sync_v2 = arg_sync_v2_only or arg_sync_bridge + sync_choice = SyncChoice.BRIDGE + elif self._args.x_sync_v2_only: + self.log.warn('--x-sync-v2-only is deprecated and will be removed, use --sync-v2-only instead') + sync_choice = SyncChoice.V2_ONLY + else: # default + sync_choice = SyncChoice.V1_ONLY + + enable_sync_v1: bool + enable_sync_v2: bool + match sync_choice: + case SyncChoice.V1_ONLY: + enable_sync_v1 = True + enable_sync_v2 = False + case SyncChoice.V2_ONLY: + enable_sync_v1 = False + enable_sync_v2 = True + case SyncChoice.BRIDGE: + enable_sync_v1 = True + enable_sync_v2 = True pubsub = PubSubManager(reactor) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 42415688c..b30c3a666 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -103,16 +103,12 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--sentry-dsn', help='Sentry DSN') parser.add_argument('--enable-debug-api', action='store_true', help='Enable _debug/* endpoints') parser.add_argument('--enable-crash-api', action='store_true', help='Enable _crash/* endpoints') - v2args = parser.add_mutually_exclusive_group() - v2args.add_argument('--x-sync-bridge', action='store_true', - help='Enable support for running both sync protocols. DO NOT ENABLE, IT WILL BREAK.') - v2args.add_argument('--x-sync-v2-only', action='store_true', - help='Disable support for running sync-v1. DO NOT ENABLE, IT WILL BREAK.') - # XXX: new safe arguments along side the unsafe --x- arguments so transition is easier - v2args.add_argument('--sync-bridge', action='store_true', - help='Enable support for running both sync protocols.') - v2args.add_argument('--sync-v2-only', action='store_true', - help='Disable support for running sync-v1.') + sync_args = parser.add_mutually_exclusive_group() + sync_args.add_argument('--sync-bridge', action='store_true', + help='Enable running both sync protocols.') + sync_args.add_argument('--sync-v2-only', action='store_true', help='Disable support for running sync-v1.') + sync_args.add_argument('--x-sync-v2-only', action='store_true', help=SUPPRESS) # old argument + sync_args.add_argument('--x-sync-bridge', action='store_true', help=SUPPRESS) # old argument parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') parser.add_argument('--x-rocksdb-indexes', action='store_true', help=SUPPRESS) parser.add_argument('--x-enable-event-queue', action='store_true', help='Enable event queue mechanism') diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 3aabf4b3d..1b3f6a53d 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -101,16 +101,31 @@ def test_memory_storage(self): def test_memory_storage_with_rocksdb_indexes(self): self._build_with_error(['--memory-storage', '--x-rocksdb-indexes'], 'RocksDB indexes require RocksDB data') + def test_sync_default(self): + manager = self._build(['--memory-storage']) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_sync_bridge(self): manager = self._build(['--memory-storage', '--x-sync-bridge']) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_sync_bridge2(self): + manager = self._build(['--memory-storage', '--sync-bridge']) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_sync_v2_only(self): manager = self._build(['--memory-storage', '--x-sync-v2-only']) self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_sync_v2_only2(self): + manager = self._build(['--memory-storage', '--sync-v2-only']) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_keypair_wallet(self): manager = self._build(['--memory-storage', '--wallet', 'keypair']) self.assertIsInstance(manager.wallet, Wallet) From 3ce172f9f0d3c45838b58fbeee66205b12264f2f Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Thu, 8 Feb 2024 15:18:36 -0600 Subject: [PATCH 28/38] feat(sync-v2): Add mempool sync status --- hathor/p2p/sync_v2/agent.py | 28 +++++++++++++++++++++++++--- hathor/p2p/sync_v2/mempool.py | 15 ++++++++++----- 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 08ff31ce5..8382cdefc 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -42,7 +42,7 @@ from hathor.transaction.base_transaction import tx_or_block_from_bytes from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.types import VertexId -from hathor.util import not_none +from hathor.util import collect_n, not_none if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -51,6 +51,7 @@ logger = get_logger() MAX_GET_TRANSACTIONS_BFS_LEN: int = 8 +MAX_MEMPOOL_STATUS_TIPS: int = 20 class _HeightInfo(NamedTuple): @@ -132,6 +133,9 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Reactor) -> None: # Notice that this flag ignores the mempool. self._synced = False + # Whether the mempool is synced or not. + self._synced_mempool = False + # Indicate whether the sync manager has been started. self._started: bool = False @@ -162,12 +166,22 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Reactor) -> None: def get_status(self) -> dict[str, Any]: """ Return the status of the sync. """ + assert self.tx_storage.indexes is not None + assert self.tx_storage.indexes.mempool_tips is not None + tips = self.tx_storage.indexes.mempool_tips.get() + tips_limited, tips_has_more = collect_n(iter(tips), MAX_MEMPOOL_STATUS_TIPS) res = { 'is_enabled': self.is_sync_enabled(), 'peer_best_block': self.peer_best_block.to_json() if self.peer_best_block else None, 'synced_block': self.synced_block.to_json() if self.synced_block else None, 'synced': self._synced, 'state': self.state.value, + 'mempool': { + 'tips_count': len(tips), + 'tips': [x.hex() for x in tips_limited], + 'has_more': tips_has_more, + 'is_synced': self._synced_mempool, + } } return res @@ -263,6 +277,9 @@ def handle_error(self, payload: str) -> None: def update_synced(self, synced: bool) -> None: self._synced = synced + def update_synced_mempool(self, value: bool) -> None: + self._synced_mempool = value + def watchdog(self) -> None: """Close connection if sync is stale.""" if not self._is_running: @@ -308,8 +325,13 @@ def _run_sync(self) -> Generator[Any, Any, None]: is_block_synced = yield self.run_sync_blocks() if is_block_synced: # our blocks are synced, so sync the mempool - self.state = PeerState.SYNCING_MEMPOOL - yield self.mempool_manager.run() + yield self.run_sync_mempool() + + @inlineCallbacks + def run_sync_mempool(self) -> Generator[Any, Any, None]: + self.state = PeerState.SYNCING_MEMPOOL + is_mempool_synced = yield self.mempool_manager.run() + self.update_synced_mempool(is_mempool_synced) def get_my_best_block(self) -> _HeightInfo: """Return my best block info.""" diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index e27796fc5..b914804e9 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -39,7 +39,7 @@ def __init__(self, sync_agent: 'NodeBlockSync'): self.tx_storage = self.manager.tx_storage self.reactor = self.sync_agent.reactor - self._deferred: Optional[Deferred[None]] = None + self._deferred: Optional[Deferred[bool]] = None # Set of tips we know but couldn't add to the DAG yet. self.missing_tips: set[bytes] = set() @@ -54,7 +54,7 @@ def is_running(self) -> bool: """Whether the sync-mempool is currently running.""" return self._is_running - def run(self) -> Deferred[None]: + def run(self) -> Deferred[bool]: """Starts _run in, won't start again if already running.""" if self.is_running(): self.log.warn('already started') @@ -71,17 +71,18 @@ def run(self) -> Deferred[None]: @inlineCallbacks def _run(self) -> Generator[Deferred, Any, None]: + is_synced = False try: - yield self._unsafe_run() + is_synced = yield self._unsafe_run() finally: # sync_agent.run_sync will start it again when needed self._is_running = False assert self._deferred is not None - self._deferred.callback(None) + self._deferred.callback(is_synced) self._deferred = None @inlineCallbacks - def _unsafe_run(self) -> Generator[Deferred, Any, None]: + def _unsafe_run(self) -> Generator[Deferred, Any, bool]: """Run a single loop of the sync-v2 mempool.""" if not self.missing_tips: # No missing tips? Let's get them! @@ -97,6 +98,10 @@ def _unsafe_run(self) -> Generator[Deferred, Any, None]: self.log.debug('start mempool DSF', tx=tx.hash_hex) yield self._dfs(deque([tx])) + if not self.missing_tips: + return True + return False + @inlineCallbacks def _dfs(self, stack: deque[BaseTransaction]) -> Generator[Deferred, Any, None]: """DFS method.""" From a7cf1bacbc6913eec854986f6566ef1d1ed385d1 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 1 Mar 2023 00:56:08 -0600 Subject: [PATCH 29/38] feat(cli): Add feature flag and CLI parameters for Nano Contracts --- hathor/builder/cli_builder.py | 6 +++++ hathor/cli/run_node.py | 10 ++++++-- hathor/cli/run_node_args.py | 1 + hathor/conf/__init__.py | 2 ++ hathor/conf/nano_testnet.py | 38 ++++++++++++++++++++++++++++ hathor/conf/nano_testnet.yml | 20 +++++++++++++++ hathor/conf/settings.py | 26 ++++++++++++++++++- hathor/conf/unittests.py | 3 ++- hathor/conf/unittests.yml | 1 + tests/others/test_hathor_settings.py | 12 ++++++++- 10 files changed, 114 insertions(+), 5 deletions(-) create mode 100644 hathor/conf/nano_testnet.py create mode 100644 hathor/conf/nano_testnet.yml diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index f1303774f..af820f4c5 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -110,6 +110,12 @@ def create_manager(self, reactor: Reactor) -> HathorManager: reactor_type=type(reactor).__name__, ) + # XXX Remove this protection after Nano Contracts are launched. + if settings.NETWORK_NAME not in {'nano-testnet-alpha', 'unittests'}: + # Add protection to prevent enabling Nano Contracts due to misconfigurations. + self.check_or_raise(not settings.ENABLE_NANO_CONTRACTS, + 'configuration error: NanoContracts can only be enabled on localnets for now') + tx_storage: TransactionStorage event_storage: EventStorage indexes: IndexesManager diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index b30c3a666..363814a20 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -52,7 +52,11 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--auto-hostname', action='store_true', help='Try to discover the hostname automatically') parser.add_argument('--unsafe-mode', help='Enable unsafe parameters. **NEVER USE IT IN PRODUCTION ENVIRONMENT**') - parser.add_argument('--testnet', action='store_true', help='Connect to Hathor testnet') + + netargs = parser.add_mutually_exclusive_group() + netargs.add_argument('--nano-testnet', action='store_true', help='Connect to Hathor nano-testnet') + netargs.add_argument('--testnet', action='store_true', help='Connect to Hathor testnet') + parser.add_argument('--test-mode-tx-weight', action='store_true', help='Reduces tx weight to 1 for testing purposes') parser.add_argument('--dns', action='append', help='Seed DNS') @@ -355,7 +359,7 @@ def check_python_version(self) -> None: def __init__(self, *, argv=None): from hathor.cli.run_node_args import RunNodeArgs - from hathor.conf import TESTNET_SETTINGS_FILEPATH + from hathor.conf import NANO_TESTNET_SETTINGS_FILEPATH, TESTNET_SETTINGS_FILEPATH from hathor.conf.get_settings import get_global_settings self.log = logger.new() @@ -372,6 +376,8 @@ def __init__(self, *, argv=None): os.environ['HATHOR_CONFIG_YAML'] = self._args.config_yaml elif self._args.testnet: os.environ['HATHOR_CONFIG_YAML'] = TESTNET_SETTINGS_FILEPATH + elif self._args.nano_testnet: + os.environ['HATHOR_CONFIG_YAML'] = NANO_TESTNET_SETTINGS_FILEPATH try: get_global_settings() diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index 3beca2b0f..fadc3521c 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -76,3 +76,4 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): signal_not_support: set[Feature] x_asyncio_reactor: bool x_ipython_kernel: bool + nano_testnet: bool diff --git a/hathor/conf/__init__.py b/hathor/conf/__init__.py index 99dde297c..36fecbae5 100644 --- a/hathor/conf/__init__.py +++ b/hathor/conf/__init__.py @@ -20,11 +20,13 @@ MAINNET_SETTINGS_FILEPATH = str(parent_dir / 'mainnet.yml') TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'testnet.yml') +NANO_TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'nano_testnet.yml') UNITTESTS_SETTINGS_FILEPATH = str(parent_dir / 'unittests.yml') __all__ = [ 'MAINNET_SETTINGS_FILEPATH', 'TESTNET_SETTINGS_FILEPATH', + 'NANO_TESTNET_SETTINGS_FILEPATH', 'UNITTESTS_SETTINGS_FILEPATH', 'HathorSettings', ] diff --git a/hathor/conf/nano_testnet.py b/hathor/conf/nano_testnet.py new file mode 100644 index 000000000..32f7ab7c9 --- /dev/null +++ b/hathor/conf/nano_testnet.py @@ -0,0 +1,38 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.conf.settings import HathorSettings + +SETTINGS = HathorSettings( + P2PKH_VERSION_BYTE=b'\x49', + MULTISIG_VERSION_BYTE=b'\x87', + NETWORK_NAME='nano-testnet-alpha', + BOOTSTRAP_DNS=[], + # Genesis stuff + GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac'), + GENESIS_BLOCK_TIMESTAMP=1677601898, + GENESIS_BLOCK_NONCE=7881594, + GENESIS_BLOCK_HASH=bytes.fromhex('000003472f6a17c2199e24c481a4326c217d07376acd9598651f8413c008554d'), + GENESIS_TX1_NONCE=110, + GENESIS_TX1_HASH=bytes.fromhex('0008f0e9dbe6e4bbc3a85fce7494fee70011b9c7e72f5276daa2a235355ac013'), + GENESIS_TX2_NONCE=180, + GENESIS_TX2_HASH=bytes.fromhex('008d81d9d58a43fd9649f33483d804a4417247b4d4e4e01d64406c4177fee0c2'), + # tx weight parameters. With these settings, tx weight is always 8 + MIN_TX_WEIGHT_K=0, + MIN_TX_WEIGHT_COEFFICIENT=0, + MIN_TX_WEIGHT=8, + CHECKPOINTS=[], + ENABLE_NANO_CONTRACTS=True, + BLUEPRINTS={}, +) diff --git a/hathor/conf/nano_testnet.yml b/hathor/conf/nano_testnet.yml new file mode 100644 index 000000000..ece2b1b87 --- /dev/null +++ b/hathor/conf/nano_testnet.yml @@ -0,0 +1,20 @@ +P2PKH_VERSION_BYTE: x49 +MULTISIG_VERSION_BYTE: x87 +NETWORK_NAME: nano-testnet-alpha +BOOTSTRAP_DNS: [] + +# Genesis stuff +GENESIS_OUTPUT_SCRIPT: 76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac +GENESIS_BLOCK_TIMESTAMP: 1677601898 +GENESIS_BLOCK_NONCE: 7881594 +GENESIS_BLOCK_HASH: 000003472f6a17c2199e24c481a4326c217d07376acd9598651f8413c008554d +GENESIS_TX1_NONCE: 110 +GENESIS_TX1_HASH: 0008f0e9dbe6e4bbc3a85fce7494fee70011b9c7e72f5276daa2a235355ac013 +GENESIS_TX2_NONCE: 180 +GENESIS_TX2_HASH: 008d81d9d58a43fd9649f33483d804a4417247b4d4e4e01d64406c4177fee0c2 + +# tx weight parameters. With these settings tx weight is always 8 +MIN_TX_WEIGHT_K: 0 +MIN_TX_WEIGHT_COEFFICIENT: 0 +MIN_TX_WEIGHT: 8 +ENABLE_NANO_CONTRACTS: true diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index bdd441f3e..62718bf2a 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -423,6 +423,15 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: OLD_MAX_MERKLE_PATH_LENGTH: int = 12 NEW_MAX_MERKLE_PATH_LENGTH: int = 20 + # Used to enable nano contracts. + # + # This should NEVER be enabled for mainnet and testnet, since both networks will + # activate Nano Contracts through the Feature Activation. + ENABLE_NANO_CONTRACTS: bool = False + + # List of enabled blueprints. + BLUEPRINTS: dict[bytes, 'str'] = {} + @classmethod def from_yaml(cls, *, filepath: str) -> 'HathorSettings': """Takes a filepath to a yaml file and returns a validated HathorSettings instance.""" @@ -449,6 +458,17 @@ def _parse_checkpoints(checkpoints: Union[dict[int, str], list[Checkpoint]]) -> return checkpoints +def _parse_blueprints(blueprints_raw: dict[str, str]) -> dict[bytes, str]: + """Parse dict[str, str] into dict[bytes, str].""" + blueprints: dict[bytes, str] = {} + for _id_str, _name in blueprints_raw.items(): + _id = bytes.fromhex(_id_str) + if _id in blueprints: + raise TypeError(f'Duplicate blueprint id: {_id_str}') + blueprints[_id] = _name + return blueprints + + def _parse_hex_str(hex_str: Union[str, bytes]) -> bytes: """Parse a raw hex string into bytes.""" if isinstance(hex_str, str): @@ -480,5 +500,9 @@ def _parse_hex_str(hex_str: Union[str, bytes]) -> bytes: _parse_checkpoints=pydantic.validator( 'CHECKPOINTS', pre=True - )(_parse_checkpoints) + )(_parse_checkpoints), + _parse_blueprints=pydantic.validator( + 'BLUEPRINTS', + pre=True + )(_parse_blueprints) ) diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py index 0b9d46144..39e0b67e0 100644 --- a/hathor/conf/unittests.py +++ b/hathor/conf/unittests.py @@ -39,5 +39,6 @@ evaluation_interval=4, max_signal_bits=4, default_threshold=3 - ) + ), + ENABLE_NANO_CONTRACTS=True, ) diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index ebd25657d..abab9ae90 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -17,6 +17,7 @@ GENESIS_TX2_HASH: 33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e8 REWARD_SPEND_MIN_BLOCKS: 10 SLOW_ASSERTS: true MAX_TX_WEIGHT_DIFF_ACTIVATION: 0.0 +ENABLE_NANO_CONTRACTS: true FEATURE_ACTIVATION: evaluation_interval: 4 diff --git a/tests/others/test_hathor_settings.py b/tests/others/test_hathor_settings.py index 12040a0ca..3994e2a42 100644 --- a/tests/others/test_hathor_settings.py +++ b/tests/others/test_hathor_settings.py @@ -18,8 +18,14 @@ from pydantic import ValidationError from hathor.checkpoint import Checkpoint -from hathor.conf import MAINNET_SETTINGS_FILEPATH, TESTNET_SETTINGS_FILEPATH, UNITTESTS_SETTINGS_FILEPATH +from hathor.conf import ( + MAINNET_SETTINGS_FILEPATH, + NANO_TESTNET_SETTINGS_FILEPATH, + TESTNET_SETTINGS_FILEPATH, + UNITTESTS_SETTINGS_FILEPATH, +) from hathor.conf.mainnet import SETTINGS as MAINNET_SETTINGS +from hathor.conf.nano_testnet import SETTINGS as NANO_TESTNET_SETTINGS from hathor.conf.settings import HathorSettings from hathor.conf.testnet import SETTINGS as TESTNET_SETTINGS from hathor.conf.unittests import SETTINGS as UNITTESTS_SETTINGS @@ -115,3 +121,7 @@ def test_testnet_settings_migration(): def test_unittests_settings_migration(): assert UNITTESTS_SETTINGS == HathorSettings.from_yaml(filepath=UNITTESTS_SETTINGS_FILEPATH) + + +def test_nano_testnet_settings_migration(): + assert NANO_TESTNET_SETTINGS == HathorSettings.from_yaml(filepath=NANO_TESTNET_SETTINGS_FILEPATH) From f4d6a28edcf51384ce1dd132893206eec6a88202 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Fri, 9 Feb 2024 12:40:07 -0600 Subject: [PATCH 30/38] feat(sysctl): Add p2p.kill_connection to sysctl --- hathor/sysctl/p2p/manager.py | 18 ++++++++++++++++++ hathor/sysctl/sysctl.py | 4 ++++ tests/sysctl/test_p2p.py | 30 ++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+) diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index 2cfe291a6..09e4ad407 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -98,6 +98,11 @@ def __init__(self, connections: ConnectionsManager) -> None: self.get_enabled_sync_versions, self.set_enabled_sync_versions, ) + self.register( + 'kill_connection', + None, + self.set_kill_connection, + ) def set_force_sync_rotate(self) -> None: """Force a sync rotate.""" @@ -196,3 +201,16 @@ def _enable_sync_version(self, sync_version: SyncVersion) -> None: def _disable_sync_version(self, sync_version: SyncVersion) -> None: """Disable the given sync version.""" self.connections.disable_sync_version(sync_version) + + def set_kill_connection(self, peer_id: str, force: bool = False) -> None: + """Kill connection with peer_id or kill all connections if peer_id == '*'.""" + if peer_id == '*': + self.log.warn('Killing all connections') + self.connections.disconnect_all_peers(force=force) + return + + conn = self.connections.connected_peers.get(peer_id, None) + if conn is None: + self.log.warn('Killing connection', peer_id=peer_id) + raise SysctlException('peer-id is not connected') + conn.disconnect(force=force) diff --git a/hathor/sysctl/sysctl.py b/hathor/sysctl/sysctl.py index f9a805af8..1a045d0e3 100644 --- a/hathor/sysctl/sysctl.py +++ b/hathor/sysctl/sysctl.py @@ -15,12 +15,15 @@ from typing import Any, Callable, Iterator, NamedTuple, Optional from pydantic import validate_arguments +from structlog import get_logger from hathor.sysctl.exception import SysctlEntryNotFound, SysctlReadOnlyEntry, SysctlWriteOnlyEntry Getter = Callable[[], Any] Setter = Callable[..., None] +logger = get_logger() + class SysctlCommand(NamedTuple): getter: Optional[Getter] @@ -33,6 +36,7 @@ class Sysctl: def __init__(self) -> None: self._children: dict[str, 'Sysctl'] = {} self._commands: dict[str, SysctlCommand] = {} + self.log = logger.new() def put_child(self, path: str, sysctl: 'Sysctl') -> None: """Add a child to the tree.""" diff --git a/tests/sysctl/test_p2p.py b/tests/sysctl/test_p2p.py index 726e0d78a..f8598b91d 100644 --- a/tests/sysctl/test_p2p.py +++ b/tests/sysctl/test_p2p.py @@ -151,6 +151,36 @@ def test_enabled_sync_versions(self): sysctl.set('enabled_sync_versions', ['v1']) self.assertEqual(sysctl.get('enabled_sync_versions'), ['v1']) + def test_kill_all_connections(self): + manager = self.create_peer() + p2p_manager = manager.connections + sysctl = ConnectionsManagerSysctl(p2p_manager) + + p2p_manager.disconnect_all_peers = MagicMock() + self.assertEqual(p2p_manager.disconnect_all_peers.call_count, 0) + sysctl.set('kill_connection', '*') + self.assertEqual(p2p_manager.disconnect_all_peers.call_count, 1) + + def test_kill_one_connection(self): + manager = self.create_peer() + p2p_manager = manager.connections + sysctl = ConnectionsManagerSysctl(p2p_manager) + + peer_id = 'my-peer-id' + conn = MagicMock() + p2p_manager.connected_peers[peer_id] = conn + self.assertEqual(conn.disconnect.call_count, 0) + sysctl.set('kill_connection', peer_id) + self.assertEqual(conn.disconnect.call_count, 1) + + def test_kill_connection_unknown_peer_id(self): + manager = self.create_peer() + p2p_manager = manager.connections + sysctl = ConnectionsManagerSysctl(p2p_manager) + + with self.assertRaises(SysctlException): + sysctl.set('kill_connection', 'unknown-peer-id') + class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): __test__ = True From 1f68ec7d7ea8be533d258b411e2ce83042c50d04 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Fri, 9 Feb 2024 13:21:18 -0600 Subject: [PATCH 31/38] feat(sysctl): Add core module with profiler commands to sysctl --- hathor/builder/sysctl_builder.py | 3 +- hathor/manager.py | 15 ++++++-- hathor/sysctl/__init__.py | 2 + hathor/sysctl/core/__init__.py | 13 +++++++ hathor/sysctl/core/manager.py | 63 ++++++++++++++++++++++++++++++++ tests/sysctl/test_core.py | 36 ++++++++++++++++++ 6 files changed, 127 insertions(+), 5 deletions(-) create mode 100644 hathor/sysctl/core/__init__.py create mode 100644 hathor/sysctl/core/manager.py create mode 100644 tests/sysctl/test_core.py diff --git a/hathor/builder/sysctl_builder.py b/hathor/builder/sysctl_builder.py index 46c189ebd..e34cd4879 100644 --- a/hathor/builder/sysctl_builder.py +++ b/hathor/builder/sysctl_builder.py @@ -13,7 +13,7 @@ # limitations under the License. from hathor.builder import BuildArtifacts -from hathor.sysctl import ConnectionsManagerSysctl, Sysctl, WebsocketManagerSysctl +from hathor.sysctl import ConnectionsManagerSysctl, HathorManagerSysctl, Sysctl, WebsocketManagerSysctl class SysctlBuilder: @@ -25,6 +25,7 @@ def __init__(self, artifacts: BuildArtifacts) -> None: def build(self) -> Sysctl: """Build the sysctl tree.""" root = Sysctl() + root.put_child('core', HathorManagerSysctl(self.artifacts.manager)) root.put_child('p2p', ConnectionsManagerSysctl(self.artifacts.p2p_manager)) ws_factory = self.artifacts.manager.metrics.websocket_factory diff --git a/hathor/manager.py b/hathor/manager.py index 10af61697..566a7c936 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -15,8 +15,9 @@ import datetime import sys import time +from cProfile import Profile from enum import Enum -from typing import Any, Iterator, NamedTuple, Optional, Union +from typing import Iterator, NamedTuple, Optional, Union from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger @@ -144,7 +145,11 @@ def __init__(self, add_system_event_trigger('after', 'shutdown', self.stop) self.state: Optional[HathorManager.NodeState] = None - self.profiler: Optional[Any] = None + + # Profiler info + self.profiler: Optional[Profile] = None + self.is_profiler_running: bool = False + self.profiler_last_start_time: float = 0 # Hostname, used to be accessed by other peers. self.hostname = hostname @@ -356,9 +361,10 @@ def start_profiler(self, *, reset: bool = False) -> None: Start profiler. It can be activated from a web resource, as well. """ if reset or not self.profiler: - import cProfile - self.profiler = cProfile.Profile() + self.profiler = Profile() self.profiler.enable() + self.is_profiler_running = True + self.profiler_last_start_time = self.reactor.seconds() def stop_profiler(self, save_to: Optional[str] = None) -> None: """ @@ -369,6 +375,7 @@ def stop_profiler(self, save_to: Optional[str] = None) -> None: """ assert self.profiler is not None self.profiler.disable() + self.is_profiler_running = False if save_to: self.profiler.dump_stats(save_to) diff --git a/hathor/sysctl/__init__.py b/hathor/sysctl/__init__.py index dc2ef2a67..af9d30e17 100644 --- a/hathor/sysctl/__init__.py +++ b/hathor/sysctl/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from hathor.sysctl.core.manager import HathorManagerSysctl from hathor.sysctl.p2p.manager import ConnectionsManagerSysctl from hathor.sysctl.sysctl import Sysctl from hathor.sysctl.websocket.manager import WebsocketManagerSysctl @@ -19,5 +20,6 @@ __all__ = [ 'Sysctl', 'ConnectionsManagerSysctl', + 'HathorManagerSysctl', 'WebsocketManagerSysctl', ] diff --git a/hathor/sysctl/core/__init__.py b/hathor/sysctl/core/__init__.py new file mode 100644 index 000000000..cd58c6521 --- /dev/null +++ b/hathor/sysctl/core/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/hathor/sysctl/core/manager.py b/hathor/sysctl/core/manager.py new file mode 100644 index 000000000..5cd25ad5d --- /dev/null +++ b/hathor/sysctl/core/manager.py @@ -0,0 +1,63 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.manager import HathorManager +from hathor.sysctl.sysctl import Sysctl + + +class HathorManagerSysctl(Sysctl): + def __init__(self, manager: HathorManager) -> None: + super().__init__() + + self.manager = manager + self.register( + 'profiler.status', + self.get_profiler_status, + None + ) + self.register( + 'profiler.start', + None, + self.set_profiler_start, + ) + self.register( + 'profiler.stop', + None, + self.set_profiler_stop, + ) + + def get_profiler_status(self) -> tuple[int, float]: + """Return (enabled, duration) as a profiler status. + + enabled: 0 means disabled / 1 means enabled. + duration: time in seconds since the profiler has been started. + """ + if not self.manager.is_profiler_running: + return (0, 0) + now = self.manager.reactor.seconds() + duration = now - self.manager.profiler_last_start_time + return (1, duration) + + def set_profiler_start(self, reset: bool) -> None: + """Start the profiler. One can safely call start multiple times to reset it.""" + self.manager.start_profiler(reset=reset) + + def set_profiler_stop(self, save_to: str | None) -> None: + """Stop the profiler and optionally dump the statistics to a file. + + An empty save_to will skip the dump. + """ + if not save_to: + save_to = None + self.manager.stop_profiler(save_to=save_to) diff --git a/tests/sysctl/test_core.py b/tests/sysctl/test_core.py new file mode 100644 index 000000000..d0dd31d64 --- /dev/null +++ b/tests/sysctl/test_core.py @@ -0,0 +1,36 @@ +from unittest.mock import MagicMock, Mock, call + +from hathor.sysctl import HathorManagerSysctl +from tests.simulation.base import SimulatorTestCase + + +class HathorManagerSysctlTestCase(SimulatorTestCase): + __test__ = True + _enable_sync_v1 = True + _enable_sync_v2 = True + + def test_profiler(self): + manager = self.create_peer() + sysctl = HathorManagerSysctl(manager) + + status = sysctl.get('profiler.status') + self.assertEqual(status, (0, 0)) + + manager.start_profiler = Mock(wraps=manager.start_profiler) + self.assertEqual(manager.start_profiler.call_count, 0) + sysctl.set('profiler.start', False) + self.assertEqual(manager.start_profiler.call_count, 1) + + manager.reactor.advance(100) + status = sysctl.get('profiler.status') + self.assertEqual(status, (1, 100)) + + manager.stop_profiler = Mock(wraps=manager.stop_profiler) + manager.profiler = MagicMock() # prevents a call to profiler.dump_stats() + self.assertEqual(manager.stop_profiler.call_count, 0) + sysctl.set('profiler.stop', '/path/to/dump') + self.assertEqual(manager.stop_profiler.call_count, 1) + self.assertEqual(manager.stop_profiler.call_args, call(save_to='/path/to/dump',)) + + status = sysctl.get('profiler.status') + self.assertEqual(status, (0, 0)) From b33f03eec86ca72a7c4318b0049cbafe4d284eab Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Fri, 9 Feb 2024 12:06:19 -0600 Subject: [PATCH 32/38] feat(cli): Use SIGUSR2 to run sysctl commands --- hathor/cli/run_node.py | 112 ++++++++++++++++++++++++++++++--- hathor/sysctl/p2p/manager.py | 5 +- hathor/sysctl/runner.py | 19 ++++-- hathor/sysctl/sysctl.py | 22 +++++-- tests/sysctl/test_p2p.py | 46 +++++++------- tests/sysctl/test_sysctl.py | 28 ++++----- tests/sysctl/test_websocket.py | 16 ++--- 7 files changed, 182 insertions(+), 66 deletions(-) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 363814a20..43e861ee9 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -14,8 +14,10 @@ import os import sys +import tempfile from argparse import SUPPRESS, ArgumentParser, Namespace -from typing import TYPE_CHECKING, Any, Callable, Optional +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, TextIO from pydantic import ValidationError from structlog import get_logger @@ -25,6 +27,25 @@ if TYPE_CHECKING: from hathor.cli.run_node_args import RunNodeArgs + from hathor.sysctl.runner import SysctlRunner + + +@contextmanager +def temp_fifo(filename: str, tempdir: str | None) -> Iterator[TextIO]: + """Context Manager for creating named pipes.""" + mkfifo = getattr(os, 'mkfifo', None) + if mkfifo is None: + raise AttributeError('mkfifo is not available') + + mkfifo(filename, mode=0o666) + fp = open(filename, 'r') + try: + yield fp + finally: + fp.close() + os.unlink(filename) + if tempdir is not None: + os.rmdir(tempdir) class RunNode: @@ -236,12 +257,79 @@ def register_signal_handlers(self) -> None: if sigusr1 is not None: # USR1 is available in this OS. signal.signal(sigusr1, self.signal_usr1_handler) + sigusr2 = getattr(signal, 'SIGUSR2', None) + if sigusr2 is not None: + # USR1 is available in this OS. + signal.signal(sigusr2, self.signal_usr2_handler) def signal_usr1_handler(self, sig: int, frame: Any) -> None: """Called when USR1 signal is received.""" - self.log.warn('USR1 received. Killing all connections...') - if self.manager and self.manager.connections: - self.manager.connections.disconnect_all_peers(force=True) + try: + self.log.warn('USR1 received. Killing all connections...') + if self.manager and self.manager.connections: + self.manager.connections.disconnect_all_peers(force=True) + except Exception: + # see: https://docs.python.org/3/library/signal.html#note-on-signal-handlers-and-exceptions + self.log.error('prevented exception from escaping the signal handler', exc_info=True) + + def signal_usr2_handler(self, sig: int, frame: Any) -> None: + """Called when USR2 signal is received.""" + try: + self.log.warn('USR2 received.') + self.run_sysctl_from_signal() + except Exception: + # see: https://docs.python.org/3/library/signal.html#note-on-signal-handlers-and-exceptions + self.log.error('prevented exception from escaping the signal handler', exc_info=True) + + def run_sysctl_from_signal(self) -> None: + """Block the main loop, get commands from a named pipe and execute then using sysctl.""" + from hathor.sysctl.exception import ( + SysctlEntryNotFound, + SysctlException, + SysctlReadOnlyEntry, + SysctlRunnerException, + SysctlWriteOnlyEntry, + ) + + runner = self.get_sysctl_runner() + + if self._args.data is not None: + basedir = self._args.data + tempdir = None + else: + basedir = tempfile.mkdtemp() + tempdir = basedir + + filename = os.path.join(basedir, f'SIGUSR2-{os.getpid()}.pipe') + if os.path.exists(filename): + self.log.warn('[USR2] Pipe already exists.', pipe=filename) + return + + with temp_fifo(filename, tempdir) as fp: + self.log.warn('[USR2] Main loop paused, awaiting command to proceed.', pipe=filename) + lines = fp.readlines() + for cmd in lines: + cmd = cmd.strip() + self.log.warn('[USR2] Command received ', cmd=cmd) + + try: + output = runner.run(cmd, require_signal_handler_safe=True) + self.log.warn('[USR2] Output', output=output) + except SysctlEntryNotFound: + path, _, _ = runner.get_line_parts(cmd) + self.log.warn('[USR2] Error', errmsg=f'{path} not found') + except SysctlReadOnlyEntry: + path, _, _ = runner.get_line_parts(cmd) + self.log.warn('[USR2] Error', errmsg=f'cannot write to {path}') + except SysctlWriteOnlyEntry: + path, _, _ = runner.get_line_parts(cmd) + self.log.warn('[USR2] Error', errmsg=f'cannot read from {path}') + except SysctlException as e: + self.log.warn('[USR2] Error', errmsg=str(e)) + except ValidationError as e: + self.log.warn('[USR2] Error', errmsg=str(e)) + except SysctlRunnerException as e: + self.log.warn('[USR2] Error', errmsg=str(e)) def check_unsafe_arguments(self) -> None: unsafe_args_found = [] @@ -392,6 +480,16 @@ def __init__(self, *, argv=None): if self._args.sysctl: self.init_sysctl(self._args.sysctl, self._args.sysctl_init_file) + def get_sysctl_runner(self) -> 'SysctlRunner': + """Create and return a SysctlRunner.""" + from hathor.builder.sysctl_builder import SysctlBuilder + from hathor.sysctl.runner import SysctlRunner + + builder = SysctlBuilder(self.artifacts) + root = builder.build() + runner = SysctlRunner(root) + return runner + def init_sysctl(self, description: str, sysctl_init_file: Optional[str] = None) -> None: """Initialize sysctl, listen for connections and apply settings from config file if required. @@ -406,14 +504,10 @@ def init_sysctl(self, description: str, sysctl_init_file: Optional[str] = None) """ from twisted.internet.endpoints import serverFromString - from hathor.builder.sysctl_builder import SysctlBuilder from hathor.sysctl.factory import SysctlFactory from hathor.sysctl.init_file_loader import SysctlInitFileLoader - from hathor.sysctl.runner import SysctlRunner - builder = SysctlBuilder(self.artifacts) - root = builder.build() - runner = SysctlRunner(root) + runner = self.get_sysctl_runner() if sysctl_init_file: init_file_loader = SysctlInitFileLoader(runner, sysctl_init_file) diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index 09e4ad407..ab6ef5902 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -17,7 +17,7 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.sync_version import SyncVersion from hathor.sysctl.exception import SysctlException -from hathor.sysctl.sysctl import Sysctl +from hathor.sysctl.sysctl import Sysctl, signal_handler_safe def parse_text(text: str) -> list[str]: @@ -162,6 +162,7 @@ def get_max_enabled_sync(self) -> int: """Return the maximum number of peers running sync simultaneously.""" return self.connections.MAX_ENABLED_SYNC + @signal_handler_safe def set_max_enabled_sync(self, value: int) -> None: """Change the maximum number of peers running sync simultaneously.""" if value < 0: @@ -179,6 +180,7 @@ def get_enabled_sync_versions(self) -> list[str]: """Return the list of ENABLED sync versions.""" return sorted(map(pretty_sync_version, self.connections.get_enabled_sync_versions())) + @signal_handler_safe def set_enabled_sync_versions(self, sync_versions: list[str]) -> None: """Set the list of ENABLED sync versions.""" new_sync_versions = set(map(parse_sync_version, sync_versions)) @@ -202,6 +204,7 @@ def _disable_sync_version(self, sync_version: SyncVersion) -> None: """Disable the given sync version.""" self.connections.disable_sync_version(sync_version) + @signal_handler_safe def set_kill_connection(self, peer_id: str, force: bool = False) -> None: """Kill connection with peer_id or kill all connections if peer_id == '*'.""" if peer_id == '*': diff --git a/hathor/sysctl/runner.py b/hathor/sysctl/runner.py index 6ee32cb7b..5bb181ed9 100644 --- a/hathor/sysctl/runner.py +++ b/hathor/sysctl/runner.py @@ -28,7 +28,7 @@ class SysctlRunner: def __init__(self, root: 'Sysctl') -> None: self.root = root - def run(self, line: str) -> bytes: + def run(self, line: str, *, require_signal_handler_safe: bool = False) -> bytes: """Receives a string line, parses, interprets, acts over the Sysctl, and returns an UTF-8 encoding data as feedback. """ @@ -37,23 +37,32 @@ def run(self, line: str) -> bytes: head, separator, tail = self.get_line_parts(line) if separator == '=': - return self._set(head, tail) + return self._set(head, tail, require_signal_handler_safe=require_signal_handler_safe) else: return self._get(head) - def _set(self, path: str, value_str: str) -> bytes: + def _set(self, path: str, value_str: str, *, require_signal_handler_safe: bool) -> bytes: """Run a `set` command in sysctl, and return and empty feedback.""" try: value = self.deserialize(value_str) except json.JSONDecodeError: raise SysctlRunnerException('value: wrong format') - self.root.set(path, value) + setter = self.root.get_setter(path) + if require_signal_handler_safe: + if not hasattr(setter, '_signal_handler_safe'): + raise SysctlRunnerException('setter: not safe for signal handling') + + if isinstance(value, tuple): + setter(*value) + else: + setter(value) return b'' def _get(self, path: str) -> bytes: """Run a `get` command in sysctl.""" - value = self.root.get(path) + getter = self.root.get_getter(path) + value = getter() return self.serialize(value).encode('utf-8') def get_line_parts(self, line: str) -> tuple[str, ...]: diff --git a/hathor/sysctl/sysctl.py b/hathor/sysctl/sysctl.py index 1a045d0e3..79bf3c5b0 100644 --- a/hathor/sysctl/sysctl.py +++ b/hathor/sysctl/sysctl.py @@ -25,6 +25,15 @@ logger = get_logger() +def signal_handler_safe(f): + """Decorator to mark methods as signal handler safe. + + It should only be used if that method can be executed during a signal handling. + Notice that a signal handling can pause the code execution at any point and the execution will resume after.""" + f._signal_handler_safe = True + return f + + class SysctlCommand(NamedTuple): getter: Optional[Getter] setter: Optional[Setter] @@ -64,14 +73,14 @@ def get_command(self, path: str) -> SysctlCommand: return child.get_command(tail) raise SysctlEntryNotFound(path) - def _get_getter(self, path: str) -> Getter: + def get_getter(self, path: str) -> Getter: """Return the getter method of a path.""" cmd = self.get_command(path) if cmd.getter is None: raise SysctlWriteOnlyEntry(path) return cmd.getter - def _get_setter(self, path: str) -> Setter: + def get_setter(self, path: str) -> Setter: """Return the setter method of a path.""" cmd = self.get_command(path) if cmd.setter is None: @@ -80,12 +89,13 @@ def _get_setter(self, path: str) -> Setter: def get(self, path: str) -> Any: """Run a get in sysctl.""" - getter = self._get_getter(path) + getter = self.get_getter(path) return getter() - def set(self, path: str, value: Any) -> None: - """Run a set in sysctl.""" - setter = self._get_setter(path) + def unsafe_set(self, path: str, value: Any) -> None: + """Run a set in sysctl. You should use a runner instead of calling this method directly. + Should not be called unless you know it's safe.""" + setter = self.get_setter(path) if isinstance(value, tuple): setter(*value) else: diff --git a/tests/sysctl/test_p2p.py b/tests/sysctl/test_p2p.py index f8598b91d..6a3980c5e 100644 --- a/tests/sysctl/test_p2p.py +++ b/tests/sysctl/test_p2p.py @@ -17,28 +17,28 @@ def test_max_enabled_sync(self): connections._sync_rotate_if_needed = MagicMock() self.assertEqual(connections._sync_rotate_if_needed.call_count, 0) - sysctl.set('max_enabled_sync', 10) + sysctl.unsafe_set('max_enabled_sync', 10) self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) self.assertEqual(connections.MAX_ENABLED_SYNC, 10) self.assertEqual(sysctl.get('max_enabled_sync'), 10) - sysctl.set('max_enabled_sync', 10) + sysctl.unsafe_set('max_enabled_sync', 10) self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) self.assertEqual(connections.MAX_ENABLED_SYNC, 10) self.assertEqual(sysctl.get('max_enabled_sync'), 10) - sysctl.set('max_enabled_sync', 5) + sysctl.unsafe_set('max_enabled_sync', 5) self.assertEqual(connections._sync_rotate_if_needed.call_count, 2) self.assertEqual(connections.MAX_ENABLED_SYNC, 5) self.assertEqual(sysctl.get('max_enabled_sync'), 5) - sysctl.set('max_enabled_sync', 0) + sysctl.unsafe_set('max_enabled_sync', 0) self.assertEqual(connections._sync_rotate_if_needed.call_count, 3) self.assertEqual(connections.MAX_ENABLED_SYNC, 0) self.assertEqual(sysctl.get('max_enabled_sync'), 0) with self.assertRaises(SysctlException): - sysctl.set('max_enabled_sync', -1) + sysctl.unsafe_set('max_enabled_sync', -1) def test_global_rate_limiter_send_tips(self): manager = self.create_peer() @@ -47,29 +47,29 @@ def test_global_rate_limiter_send_tips(self): path = 'rate_limit.global.send_tips' - sysctl.set(path, (10, 4)) + sysctl.unsafe_set(path, (10, 4)) limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) self.assertEqual(limit, (10, 4)) self.assertEqual(sysctl.get(path), (10, 4)) - sysctl.set(path, (15, 5)) + sysctl.unsafe_set(path, (15, 5)) limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) self.assertEqual(limit, (15, 5)) self.assertEqual(sysctl.get(path), (15, 5)) - sysctl.set(path, (0, 0)) + sysctl.unsafe_set(path, (0, 0)) limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) self.assertEqual(limit, None) self.assertEqual(sysctl.get(path), (0, 0)) with self.assertRaises(SysctlException): - sysctl.set(path, (-1, 1)) + sysctl.unsafe_set(path, (-1, 1)) with self.assertRaises(SysctlException): - sysctl.set(path, (1, -1)) + sysctl.unsafe_set(path, (1, -1)) with self.assertRaises(SysctlException): - sysctl.set(path, (-1, -1)) + sysctl.unsafe_set(path, (-1, -1)) def test_force_sync_rotate(self): manager = self.create_peer() @@ -79,7 +79,7 @@ def test_force_sync_rotate(self): connections._sync_rotate_if_needed = MagicMock() self.assertEqual(connections._sync_rotate_if_needed.call_count, 0) - sysctl.set('force_sync_rotate', ()) + sysctl.unsafe_set('force_sync_rotate', ()) self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) self.assertEqual(connections._sync_rotate_if_needed.call_args.kwargs, {'force': True}) @@ -88,23 +88,23 @@ def test_sync_update_interval(self): connections = manager.connections sysctl = ConnectionsManagerSysctl(connections) - sysctl.set('sync_update_interval', 10) + sysctl.unsafe_set('sync_update_interval', 10) self.assertEqual(connections.lc_sync_update_interval, 10) self.assertEqual(sysctl.get('sync_update_interval'), 10) with self.assertRaises(SysctlException): - sysctl.set('sync_update_interval', -1) + sysctl.unsafe_set('sync_update_interval', -1) def test_always_enable_sync(self): manager = self.create_peer() connections = manager.connections sysctl = ConnectionsManagerSysctl(connections) - sysctl.set('always_enable_sync', ['peer-1', 'peer-2']) + sysctl.unsafe_set('always_enable_sync', ['peer-1', 'peer-2']) self.assertEqual(connections.always_enable_sync, {'peer-1', 'peer-2'}) self.assertEqual(set(sysctl.get('always_enable_sync')), {'peer-1', 'peer-2'}) - sysctl.set('always_enable_sync', []) + sysctl.unsafe_set('always_enable_sync', []) self.assertEqual(connections.always_enable_sync, set()) self.assertEqual(sysctl.get('always_enable_sync'), []) @@ -119,7 +119,7 @@ def test_always_enable_sync(self): fp.write('\n'.join(content)) fp.close() - sysctl.set('always_enable_sync.readtxt', file_path) + sysctl.unsafe_set('always_enable_sync.readtxt', file_path) self.assertEqual(connections.always_enable_sync, set(content)) self.assertEqual(set(sysctl.get('always_enable_sync')), set(content)) @@ -144,11 +144,11 @@ def test_enabled_sync_versions(self): sysctl = ConnectionsManagerSysctl(connections) self.assertEqual(sysctl.get('enabled_sync_versions'), self._default_enabled_sync_versions()) - sysctl.set('enabled_sync_versions', ['v1', 'v2']) + sysctl.unsafe_set('enabled_sync_versions', ['v1', 'v2']) self.assertEqual(sysctl.get('enabled_sync_versions'), ['v1', 'v2']) - sysctl.set('enabled_sync_versions', ['v2']) + sysctl.unsafe_set('enabled_sync_versions', ['v2']) self.assertEqual(sysctl.get('enabled_sync_versions'), ['v2']) - sysctl.set('enabled_sync_versions', ['v1']) + sysctl.unsafe_set('enabled_sync_versions', ['v1']) self.assertEqual(sysctl.get('enabled_sync_versions'), ['v1']) def test_kill_all_connections(self): @@ -158,7 +158,7 @@ def test_kill_all_connections(self): p2p_manager.disconnect_all_peers = MagicMock() self.assertEqual(p2p_manager.disconnect_all_peers.call_count, 0) - sysctl.set('kill_connection', '*') + sysctl.unsafe_set('kill_connection', '*') self.assertEqual(p2p_manager.disconnect_all_peers.call_count, 1) def test_kill_one_connection(self): @@ -170,7 +170,7 @@ def test_kill_one_connection(self): conn = MagicMock() p2p_manager.connected_peers[peer_id] = conn self.assertEqual(conn.disconnect.call_count, 0) - sysctl.set('kill_connection', peer_id) + sysctl.unsafe_set('kill_connection', peer_id) self.assertEqual(conn.disconnect.call_count, 1) def test_kill_connection_unknown_peer_id(self): @@ -179,7 +179,7 @@ def test_kill_connection_unknown_peer_id(self): sysctl = ConnectionsManagerSysctl(p2p_manager) with self.assertRaises(SysctlException): - sysctl.set('kill_connection', 'unknown-peer-id') + sysctl.unsafe_set('kill_connection', 'unknown-peer-id') class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): diff --git a/tests/sysctl/test_sysctl.py b/tests/sysctl/test_sysctl.py index 9ae554b9e..d629d4230 100644 --- a/tests/sysctl/test_sysctl.py +++ b/tests/sysctl/test_sysctl.py @@ -97,34 +97,34 @@ def test_get_writeonly(self) -> None: ############## def test_set_int(self) -> None: - self.root.set('net.max_connections', 3) - setter = cast(MagicMock, self.root._get_setter('net.max_connections')) + self.root.unsafe_set('net.max_connections', 3) + setter = cast(MagicMock, self.root.get_setter('net.max_connections')) self.assertEqual(1, setter.call_count) self.assertEqual((3,), setter.call_args.args) def test_set_str(self) -> None: - self.root.set('core.loglevel', 'debug') - setter = cast(MagicMock, self.root._get_setter('core.loglevel')) + self.root.unsafe_set('core.loglevel', 'debug') + setter = cast(MagicMock, self.root.get_setter('core.loglevel')) self.assertEqual(1, setter.call_count) self.assertEqual(('debug',), setter.call_args.args) def test_set_readonly(self) -> None: with self.assertRaises(SysctlReadOnlyEntry): - self.root.set('net.readonly', 0.50) + self.root.unsafe_set('net.readonly', 0.50) def test_set_tuple(self) -> None: - self.root.set('net.rate_limit', (8, 2)) - setter = cast(MagicMock, self.root._get_setter('net.rate_limit')) + self.root.unsafe_set('net.rate_limit', (8, 2)) + setter = cast(MagicMock, self.root.get_setter('net.rate_limit')) self.assertEqual(1, setter.call_count) self.assertEqual((8, 2), setter.call_args.args) def test_set_unknown(self) -> None: with self.assertRaises(SysctlEntryNotFound): - self.root.set('net.unknown', 1) + self.root.unsafe_set('net.unknown', 1) def test_set_writeonly(self) -> None: - self.root.set('core.writeonly', 1) - setter = cast(MagicMock, self.root._get_setter('core.writeonly')) + self.root.unsafe_set('core.writeonly', 1) + setter = cast(MagicMock, self.root.get_setter('core.writeonly')) self.assertEqual(1, setter.call_count) self.assertEqual((1,), setter.call_args.args) @@ -193,13 +193,13 @@ def test_proto_get_writeonly(self) -> None: def test_proto_set_int(self) -> None: self.proto.lineReceived(b'net.max_connections=3') - setter = cast(MagicMock, self.root._get_setter('net.max_connections')) + setter = cast(MagicMock, self.root.get_setter('net.max_connections')) self.assertEqual(1, setter.call_count) self.assertEqual((3,), setter.call_args.args) def test_proto_set_str(self) -> None: self.proto.lineReceived(b'core.loglevel="debug"') - setter = cast(MagicMock, self.root._get_setter('core.loglevel')) + setter = cast(MagicMock, self.root.get_setter('core.loglevel')) self.assertEqual(1, setter.call_count) self.assertEqual(('debug',), setter.call_args.args) @@ -213,13 +213,13 @@ def test_proto_set_unknown(self) -> None: def test_proto_set_tuple(self) -> None: self.proto.lineReceived(b'net.rate_limit=8, 2') - setter = cast(MagicMock, self.root._get_setter('net.rate_limit')) + setter = cast(MagicMock, self.root.get_setter('net.rate_limit')) self.assertEqual(1, setter.call_count) self.assertEqual((8, 2), setter.call_args.args) def test_proto_set_writeonly(self) -> None: self.proto.lineReceived(b'core.writeonly=1') - setter = cast(MagicMock, self.root._get_setter('core.writeonly')) + setter = cast(MagicMock, self.root.get_setter('core.writeonly')) self.assertEqual(1, setter.call_count) self.assertEqual((1,), setter.call_args.args) diff --git a/tests/sysctl/test_websocket.py b/tests/sysctl/test_websocket.py index f8dc44f08..3c7749f3e 100644 --- a/tests/sysctl/test_websocket.py +++ b/tests/sysctl/test_websocket.py @@ -9,36 +9,36 @@ def test_max_subs_addrs_conn(self): ws_factory = HathorAdminWebsocketFactory() sysctl = WebsocketManagerSysctl(ws_factory) - sysctl.set('max_subs_addrs_conn', 10) + sysctl.unsafe_set('max_subs_addrs_conn', 10) self.assertEqual(ws_factory.max_subs_addrs_conn, 10) self.assertEqual(sysctl.get('max_subs_addrs_conn'), 10) - sysctl.set('max_subs_addrs_conn', 0) + sysctl.unsafe_set('max_subs_addrs_conn', 0) self.assertEqual(ws_factory.max_subs_addrs_conn, 0) self.assertEqual(sysctl.get('max_subs_addrs_conn'), 0) - sysctl.set('max_subs_addrs_conn', -1) + sysctl.unsafe_set('max_subs_addrs_conn', -1) self.assertIsNone(ws_factory.max_subs_addrs_conn) self.assertEqual(sysctl.get('max_subs_addrs_conn'), -1) with self.assertRaises(SysctlException): - sysctl.set('max_subs_addrs_conn', -2) + sysctl.unsafe_set('max_subs_addrs_conn', -2) def test_max_subs_addrs_empty(self): ws_factory = HathorAdminWebsocketFactory() sysctl = WebsocketManagerSysctl(ws_factory) - sysctl.set('max_subs_addrs_empty', 10) + sysctl.unsafe_set('max_subs_addrs_empty', 10) self.assertEqual(ws_factory.max_subs_addrs_empty, 10) self.assertEqual(sysctl.get('max_subs_addrs_empty'), 10) - sysctl.set('max_subs_addrs_empty', 0) + sysctl.unsafe_set('max_subs_addrs_empty', 0) self.assertEqual(ws_factory.max_subs_addrs_empty, 0) self.assertEqual(sysctl.get('max_subs_addrs_empty'), 0) - sysctl.set('max_subs_addrs_empty', -1) + sysctl.unsafe_set('max_subs_addrs_empty', -1) self.assertIsNone(ws_factory.max_subs_addrs_empty) self.assertEqual(sysctl.get('max_subs_addrs_empty'), -1) with self.assertRaises(SysctlException): - sysctl.set('max_subs_addrs_empty', -2) + sysctl.unsafe_set('max_subs_addrs_empty', -2) From d885d7f42f849a964b0db9eefa7b831686a4264e Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Thu, 15 Feb 2024 17:07:53 +0100 Subject: [PATCH 33/38] fix: test broke after #951 --- tests/sysctl/test_core.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/sysctl/test_core.py b/tests/sysctl/test_core.py index d0dd31d64..4c3c8543b 100644 --- a/tests/sysctl/test_core.py +++ b/tests/sysctl/test_core.py @@ -18,7 +18,7 @@ def test_profiler(self): manager.start_profiler = Mock(wraps=manager.start_profiler) self.assertEqual(manager.start_profiler.call_count, 0) - sysctl.set('profiler.start', False) + sysctl.unsafe_set('profiler.start', False) self.assertEqual(manager.start_profiler.call_count, 1) manager.reactor.advance(100) @@ -28,7 +28,7 @@ def test_profiler(self): manager.stop_profiler = Mock(wraps=manager.stop_profiler) manager.profiler = MagicMock() # prevents a call to profiler.dump_stats() self.assertEqual(manager.stop_profiler.call_count, 0) - sysctl.set('profiler.stop', '/path/to/dump') + sysctl.unsafe_set('profiler.stop', '/path/to/dump') self.assertEqual(manager.stop_profiler.call_count, 1) self.assertEqual(manager.stop_profiler.call_args, call(save_to='/path/to/dump',)) From 00967a7f414321e4c76e7d25a8c8bfd775ffdec1 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Fri, 16 Feb 2024 10:54:15 -0600 Subject: [PATCH 34/38] fix(cli): Fix issue when running sysctl through USR2 in macOS --- hathor/cli/run_node.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 43e861ee9..7f99cc864 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -17,7 +17,7 @@ import tempfile from argparse import SUPPRESS, ArgumentParser, Namespace from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, TextIO +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional from pydantic import ValidationError from structlog import get_logger @@ -31,18 +31,16 @@ @contextmanager -def temp_fifo(filename: str, tempdir: str | None) -> Iterator[TextIO]: +def temp_fifo(filename: str, tempdir: str | None) -> Iterator[None]: """Context Manager for creating named pipes.""" mkfifo = getattr(os, 'mkfifo', None) if mkfifo is None: raise AttributeError('mkfifo is not available') mkfifo(filename, mode=0o666) - fp = open(filename, 'r') try: - yield fp + yield None finally: - fp.close() os.unlink(filename) if tempdir is not None: os.rmdir(tempdir) @@ -305,9 +303,15 @@ def run_sysctl_from_signal(self) -> None: self.log.warn('[USR2] Pipe already exists.', pipe=filename) return - with temp_fifo(filename, tempdir) as fp: + with temp_fifo(filename, tempdir): self.log.warn('[USR2] Main loop paused, awaiting command to proceed.', pipe=filename) - lines = fp.readlines() + + fp = open(filename, 'r') + try: + lines = fp.readlines() + finally: + fp.close() + for cmd in lines: cmd = cmd.strip() self.log.warn('[USR2] Command received ', cmd=cmd) From 7876fa54e5877c823df0ab3b9b8114eb76977648 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 14 Feb 2024 16:45:58 -0600 Subject: [PATCH 35/38] feat(sysctl): Add sysctl tools for debugging --- hathor/sysctl/core/manager.py | 177 +++++++++++++++++++++++++++++++++- 1 file changed, 176 insertions(+), 1 deletion(-) diff --git a/hathor/sysctl/core/manager.py b/hathor/sysctl/core/manager.py index 5cd25ad5d..00bcd1615 100644 --- a/hathor/sysctl/core/manager.py +++ b/hathor/sysctl/core/manager.py @@ -12,8 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io +import os +import sys +from typing import IO, Any, Optional + from hathor.manager import HathorManager -from hathor.sysctl.sysctl import Sysctl +from hathor.sysctl.sysctl import Sysctl, signal_handler_safe class HathorManagerSysctl(Sysctl): @@ -36,6 +41,31 @@ def __init__(self, manager: HathorManager) -> None: None, self.set_profiler_stop, ) + self.register( + 'pudb.set_trace.attach_tty', + None, + self.set_pudb_set_trace_attach_tty, + ) + self.register( + 'pudb.set_trace.create_tty', + None, + self.set_pudb_set_trace_create_tty, + ) + self.register( + 'pudb.status', + self.get_pudb_status, + None + ) + self.register( + 'pudb.stop', + None, + self.set_pudb_stop, + ) + self.register( + 'ipython.run.attach_tty', + None, + self.set_ipython_run, + ) def get_profiler_status(self) -> tuple[int, float]: """Return (enabled, duration) as a profiler status. @@ -49,10 +79,12 @@ def get_profiler_status(self) -> tuple[int, float]: duration = now - self.manager.profiler_last_start_time return (1, duration) + @signal_handler_safe def set_profiler_start(self, reset: bool) -> None: """Start the profiler. One can safely call start multiple times to reset it.""" self.manager.start_profiler(reset=reset) + @signal_handler_safe def set_profiler_stop(self, save_to: str | None) -> None: """Stop the profiler and optionally dump the statistics to a file. @@ -61,3 +93,146 @@ def set_profiler_stop(self, save_to: str | None) -> None: if not save_to: save_to = None self.manager.stop_profiler(save_to=save_to) + + @signal_handler_safe + def set_pudb_set_trace_attach_tty(self, tty: str) -> None: + """Stop execution and open pudb in a given tty. + + ATTENTION: This command can be destructive and the full node might not work after running it. + + Open a new terminal. First, you need to get the path of the tty of the + terminal you want to debug from. To do that, use the standard unix + command `tty`. It will print something like `/dev/pts/3`. + + Then you need to make sure that your terminal doesn't have a shell actively + reading and possibly capturing some of the input that should go to pudb. + To do that run a placeholder command that does nothing, such as `perl -MPOSIX -e pause`. + """ + fp = open(tty, 'r+b', buffering=0) + term_size = os.get_terminal_size(fp.fileno()) + self._run_pudb_set_trace(tty, fp, term_size=term_size) + + @signal_handler_safe + def set_pudb_set_trace_create_tty(self, cols: int, rows: int) -> None: + """Stop execution and open pudb for debugging in a newly created tty. + + ATTENTION: This command can be destructive and the full node might not work after running it. + + You must provide the terminal size (cols, rows). + + The newly created tty name will be printed in the logs. After you check the logs and get the name, + you can connect to it using `screen `. + """ + import fcntl + import struct + import termios + + # Some of these methods are not available in some operating system (e.g. Windows). + # This is a way to avoid mypy errors. + openpty = getattr(os, 'openpty', None) + ioctl = getattr(fcntl, 'ioctl', None) + ttyname = getattr(os, 'ttyname', None) + TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', None) + + if openpty is None or ioctl is None or ttyname is None or TIOCSWINSZ is None: + self.log.warn('Error opening pudb. You can try to attach to a tty using `set_pudb_set_trace_attach_tty`.', + openpty=openpty, + ioctl=ioctl, + ttyname=ttyname, + TIOCSWINSZ=TIOCSWINSZ) + return + + (term_master, term_slave) = openpty() + + term_size = (cols, rows) + term_size_bytes = struct.pack("HHHH", term_size[1], term_size[0], 0, 0) + ioctl(term_master, TIOCSWINSZ, term_size_bytes) + + tty_name = ttyname(term_slave) + fp = os.fdopen(term_master, 'wb+', buffering=0) + + self._run_pudb_set_trace(tty_name, fp, term_size=term_size) + + def _run_pudb_set_trace(self, tty: str, fp: IO[bytes], *, term_size: Optional[tuple[int, int]] = None) -> None: + from pudb.debugger import Debugger + + self.log.warn('main loop paused; pudb.set_trace running', tty=tty) + + tty_file = io.TextIOWrapper(fp) + kwargs: dict[str, Any] = { + 'stdin': tty_file, + 'stdout': tty_file, + } + if term_size is not None: + kwargs['term_size'] = term_size + + if Debugger._current_debugger: + Debugger._current_debugger.pop() + + dbg = Debugger(**kwargs) + dbg.set_trace(sys._getframe().f_back, paused=True) + + def get_pudb_status(self) -> str: + """Return whether the pudb is running or not.""" + from pudb.debugger import Debugger + + if not Debugger._current_debugger: + return 'not running' + + dbg = Debugger._current_debugger[0] + if dbg.ui.quit_event_loop: + return 'not running' + + return 'running' + + @signal_handler_safe + def set_pudb_stop(self) -> None: + """Stop pudb if it is running.""" + from pudb.debugger import Debugger + + if not Debugger._current_debugger: + return + + dbg = Debugger._current_debugger[0] + dbg.set_quit() + dbg.ui.quit_event_loop = True + + @signal_handler_safe + def set_ipython_run(self, tty: str) -> None: + """Stop execution and open an ipython shell in a given tty. + + ATTENTION: This command can be destructive and the full node might not work after running it. + + Open a new terminal. First, you need to get the path of the tty of the + terminal you want to debug from. To do that, use the standard unix + command `tty`. It will print something like `/dev/pts/3`. + + Then you need to make sure that your terminal doesn't have a shell actively + reading and possibly capturing some of the input that should go to pudb. + To do that run a placeholder command that does nothing, such as `perl -MPOSIX -e pause`. + """ + fp = open(tty, 'r+b', buffering=0) + tty_file = io.TextIOWrapper(fp) + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + + sys.stdin = tty_file + sys.stdout = tty_file + sys.stderr = tty_file + + self.log.warn('main loop paused; ipython running', tty=tty) + + from IPython import start_ipython + user_ns: dict[str, Any] = { + 'manager': self.manager, + 'tx_storage': self.manager.tx_storage, + } + start_ipython(argv=[], user_ns=user_ns) + + sys.stdin = old_stdin + sys.stdout = old_stdout + sys.stderr = old_stderr + + self.log.warn('main loop resumed') From d5ed0fa5113d3cded2e11de3284295985c3c8a9c Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Thu, 22 Feb 2024 20:44:46 -0300 Subject: [PATCH 36/38] feat(feature-activation): configure new NOP features on mainnet (#958) --- hathor/conf/mainnet.py | 24 +++++++++++++++++++++--- hathor/conf/mainnet.yml | 24 ++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index 6dd4acdc6..e88ac4cf3 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -203,7 +203,7 @@ features={ Feature.NOP_FEATURE_1: Criteria( bit=0, - start_height=4_213_440, # N (right now the best block is 4_169_000 on mainnet) + start_height=4_213_440, # N timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) minimum_activation_height=4_273_920, # N + 3 * 20160 (3 weeks after the start) lock_in_on_timeout=False, @@ -212,13 +212,31 @@ ), Feature.NOP_FEATURE_2: Criteria( bit=1, - start_height=4_213_440, # N (right now the best block is 4_169_000 on mainnet) + start_height=4_213_440, # N timeout_height=4_253_760, # N + 2 * 20160 (2 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=False, version='0.59.0', signal_support_by_default=False, - ) + ), + Feature.NOP_FEATURE_3: Criteria( + bit=2, + start_height=4_273_920, # N (on 2024/02/22, the best block is 4_251_000 on mainnet) + timeout_height=4_475_520, # N + 10 * 20160 (10 weeks after the start) + minimum_activation_height=4_495_680, # N + 11 * 20160 (11 weeks after the start) + lock_in_on_timeout=False, + version='0.59.0', + signal_support_by_default=True, + ), + Feature.NOP_FEATURE_4: Criteria( + bit=3, + start_height=4_273_920, # N (on 2024/02/22, the best block is 4_251_000 on mainnet) + timeout_height=4_475_520, # N + 10 * 20160 (10 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.59.0', + signal_support_by_default=False, + ), } ) ) diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index 650915685..9c51cde42 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -186,7 +186,7 @@ FEATURE_ACTIVATION: NOP_FEATURE_1: bit: 0 - start_height: 4_213_440 # N (right now the best block is 4_169_000 on mainnet) + start_height: 4_213_440 # N timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) minimum_activation_height: 4_273_920 # N + 3 * 20160 (3 weeks after the start) lock_in_on_timeout: false @@ -195,9 +195,29 @@ FEATURE_ACTIVATION: NOP_FEATURE_2: bit: 1 - start_height: 4_213_440 # N (right now the best block is 4_169_000 on mainnet) + start_height: 4_213_440 # N timeout_height: 4_253_760 # N + 2 * 20160 (2 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: false version: 0.59.0 signal_support_by_default: false + + #### Second Phased Testing features on mainnet #### + + NOP_FEATURE_3: + bit: 2 + start_height: 4_273_920 # N (on 2024/02/22, the best block is 4_251_000 on mainnet) + timeout_height: 4_475_520 # N + 10 * 20160 (10 weeks after the start) + minimum_activation_height: 4_495_680 # N + 11 * 20160 (11 weeks after the start) + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: true + + NOP_FEATURE_4: + bit: 3 + start_height: 4_273_920 # N (on 2024/02/22, the best block is 4_251_000 on mainnet) + timeout_height: 4_475_520 # N + 10 * 20160 (10 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: false From 20476d153c4a7ddbbbb93b63c0975cd4443acb15 Mon Sep 17 00:00:00 2001 From: Marcelo Salhab Brogliato Date: Wed, 21 Feb 2024 12:10:22 -0600 Subject: [PATCH 37/38] fix: Fix slow tx processing on some edge cases --- hathor/consensus/transaction_consensus.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 1cb250679..17a32202d 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -235,7 +235,12 @@ def update_voided_info(self, tx: Transaction) -> None: conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) conflict_tx_meta = conflict_tx.get_metadata() if conflict_tx_meta.voided_by: - self.mark_as_voided(conflict_tx) + if conflict_tx_meta.first_block is not None: + # do nothing + assert bool(self.context.consensus.soft_voided_tx_ids & conflict_tx_meta.voided_by) + self.log.info('skipping soft voided conflict', conflict_tx=conflict_tx.hash_hex) + else: + self.mark_as_voided(conflict_tx) # Finally, check our conflicts. meta = tx.get_metadata() From d3f29933b3a433f7cb16d0fbaaf36c97b32cb819 Mon Sep 17 00:00:00 2001 From: Jan Segre Date: Fri, 23 Feb 2024 00:47:18 +0100 Subject: [PATCH 38/38] chore(cli): make sync-bridge the default --- hathor/builder/cli_builder.py | 7 +++++-- hathor/cli/run_node.py | 1 + hathor/cli/run_node_args.py | 1 + tests/others/test_cli_builder.py | 9 +++++++-- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index af820f4c5..8c1f41fff 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -174,7 +174,10 @@ def create_manager(self, reactor: Reactor) -> HathorManager: sync_choice: SyncChoice if self._args.sync_bridge: + self.log.warn('--sync-bridge is the default, this parameter has no effect') sync_choice = SyncChoice.BRIDGE + elif self._args.sync_v1_only: + sync_choice = SyncChoice.V1_ONLY elif self._args.sync_v2_only: sync_choice = SyncChoice.V2_ONLY elif self._args.x_sync_bridge: @@ -183,8 +186,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: elif self._args.x_sync_v2_only: self.log.warn('--x-sync-v2-only is deprecated and will be removed, use --sync-v2-only instead') sync_choice = SyncChoice.V2_ONLY - else: # default - sync_choice = SyncChoice.V1_ONLY + else: + sync_choice = SyncChoice.BRIDGE enable_sync_v1: bool enable_sync_v2: bool diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 7f99cc864..89f4e37de 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -129,6 +129,7 @@ def create_parser(cls) -> ArgumentParser: sync_args = parser.add_mutually_exclusive_group() sync_args.add_argument('--sync-bridge', action='store_true', help='Enable running both sync protocols.') + sync_args.add_argument('--sync-v1-only', action='store_true', help='Disable support for running sync-v2.') sync_args.add_argument('--sync-v2-only', action='store_true', help='Disable support for running sync-v1.') sync_args.add_argument('--x-sync-v2-only', action='store_true', help=SUPPRESS) # old argument sync_args.add_argument('--x-sync-bridge', action='store_true', help=SUPPRESS) # old argument diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index fadc3521c..4df46f009 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -66,6 +66,7 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): x_sync_bridge: bool x_sync_v2_only: bool sync_bridge: bool + sync_v1_only: bool sync_v2_only: bool x_localhost_only: bool x_rocksdb_indexes: bool diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 1b3f6a53d..64e95e208 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -58,7 +58,7 @@ def test_all_default(self): self.assertIsNone(manager.wallet) self.assertEqual('unittests', manager.network) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) - self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) self.assertFalse(self.resources_builder._built_prometheus) self.assertFalse(self.resources_builder._built_status) self.assertFalse(manager._enable_event_queue) @@ -104,7 +104,7 @@ def test_memory_storage_with_rocksdb_indexes(self): def test_sync_default(self): manager = self._build(['--memory-storage']) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) - self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_bridge(self): manager = self._build(['--memory-storage', '--x-sync-bridge']) @@ -126,6 +126,11 @@ def test_sync_v2_only2(self): self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_sync_v1_only(self): + manager = self._build(['--memory-storage', '--sync-v1-only']) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V2)) + def test_keypair_wallet(self): manager = self._build(['--memory-storage', '--wallet', 'keypair']) self.assertIsInstance(manager.wallet, Wallet)