diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 83c5c6c36..203485172 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -13,7 +13,7 @@ # limitations under the License. from enum import Enum -from typing import Any, NamedTuple, Optional +from typing import Any, Callable, NamedTuple, Optional from structlog import get_logger @@ -63,6 +63,7 @@ class BuildArtifacts(NamedTuple): pubsub: PubSubManager consensus: ConsensusAlgorithm tx_storage: TransactionStorage + feature_service: FeatureService indexes: Optional[IndexesManager] wallet: Optional[BaseWallet] rocksdb_storage: Optional[RocksDBStorage] @@ -103,6 +104,7 @@ def __init__(self) -> None: self._bit_signaling_service: Optional[BitSignalingService] = None self._vertex_verifiers: Optional[VertexVerifiers] = None + self._vertex_verifiers_builder: Callable[[HathorSettingsType, FeatureService], VertexVerifiers] | None = None self._verification_service: Optional[VerificationService] = None self._rocksdb_path: Optional[str] = None @@ -158,9 +160,9 @@ def build(self) -> BuildArtifacts: wallet = self._get_or_create_wallet() event_manager = self._get_or_create_event_manager() indexes = self._get_or_create_indexes_manager() - tx_storage = self._get_or_create_tx_storage(indexes) - feature_service = self._get_or_create_feature_service(tx_storage) - bit_signaling_service = self._get_or_create_bit_signaling_service(tx_storage) + tx_storage = self._get_or_create_tx_storage() + feature_service = self._get_or_create_feature_service() + bit_signaling_service = self._get_or_create_bit_signaling_service() verification_service = self._get_or_create_verification_service() if self._enable_address_index: @@ -221,6 +223,7 @@ def build(self) -> BuildArtifacts: wallet=wallet, rocksdb_storage=self._rocksdb_storage, stratum_factory=stratum_factory, + feature_service=feature_service, ) return self.artifacts @@ -265,6 +268,7 @@ def set_peer_id(self, peer_id: PeerId) -> 'Builder': return self def _get_or_create_settings(self) -> HathorSettingsType: + """Return the HathorSettings instance set on this builder, or a new one if not set.""" if self._settings is None: self._settings = get_settings() return self._settings @@ -352,7 +356,9 @@ def _get_or_create_indexes_manager(self) -> IndexesManager: return self._indexes_manager - def _get_or_create_tx_storage(self, indexes: IndexesManager) -> TransactionStorage: + def _get_or_create_tx_storage(self) -> TransactionStorage: + indexes = self._get_or_create_indexes_manager() + if self._tx_storage is not None: # If a tx storage is provided, set the indexes manager to it. self._tx_storage.indexes = indexes @@ -415,9 +421,11 @@ def _get_or_create_event_manager(self) -> EventManager: return self._event_manager - def _get_or_create_feature_service(self, tx_storage: TransactionStorage) -> FeatureService: + def _get_or_create_feature_service(self) -> FeatureService: + """Return the FeatureService instance set on this builder, or a new one if not set.""" if self._feature_service is None: settings = self._get_or_create_settings() + tx_storage = self._get_or_create_tx_storage() self._feature_service = FeatureService( feature_settings=settings.FEATURE_ACTIVATION, tx_storage=tx_storage @@ -425,12 +433,14 @@ def _get_or_create_feature_service(self, tx_storage: TransactionStorage) -> Feat return self._feature_service - def _get_or_create_bit_signaling_service(self, tx_storage: TransactionStorage) -> BitSignalingService: + def _get_or_create_bit_signaling_service(self) -> BitSignalingService: if self._bit_signaling_service is None: settings = self._get_or_create_settings() + tx_storage = self._get_or_create_tx_storage() + feature_service = self._get_or_create_feature_service() self._bit_signaling_service = BitSignalingService( feature_settings=settings.FEATURE_ACTIVATION, - feature_service=self._get_or_create_feature_service(tx_storage), + feature_service=feature_service, tx_storage=tx_storage, support_features=self._support_features, not_support_features=self._not_support_features, @@ -448,7 +458,15 @@ def _get_or_create_verification_service(self) -> VerificationService: def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: if self._vertex_verifiers is None: settings = self._get_or_create_settings() - self._vertex_verifiers = VertexVerifiers.create_defaults(settings=settings) + feature_service = self._get_or_create_feature_service() + + if self._vertex_verifiers_builder: + self._vertex_verifiers = self._vertex_verifiers_builder(settings, feature_service) + else: + self._vertex_verifiers = VertexVerifiers.create_defaults( + settings=settings, + feature_service=feature_service + ) return self._vertex_verifiers @@ -554,6 +572,14 @@ def set_vertex_verifiers(self, vertex_verifiers: VertexVerifiers) -> 'Builder': self._vertex_verifiers = vertex_verifiers return self + def set_vertex_verifiers_builder( + self, + builder: Callable[[HathorSettingsType, FeatureService], VertexVerifiers] + ) -> 'Builder': + self.check_if_can_modify() + self._vertex_verifiers_builder = builder + return self + def set_reactor(self, reactor: Reactor) -> 'Builder': self.check_if_can_modify() self._reactor = reactor diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 5c1759c10..db9997dd8 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -207,7 +207,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: not_support_features=self._args.signal_not_support ) - vertex_verifiers = VertexVerifiers.create_defaults(settings=settings) + vertex_verifiers = VertexVerifiers.create_defaults(settings=settings, feature_service=self.feature_service) verification_service = VerificationService(verifiers=vertex_verifiers) p2p_manager = ConnectionsManager( diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 00ab40956..d2d75058c 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -191,6 +191,7 @@ def prepare(self, *, register_resources: bool = True) -> None: wallet=self.manager.wallet, rocksdb_storage=getattr(builder, 'rocksdb_storage', None), stratum_factory=self.manager.stratum_factory, + feature_service=self.manager._feature_service ) def start_sentry_if_possible(self) -> None: diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index 9d3d82c28..4d44dd5c2 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -12,28 +12,75 @@ # See the License for the specific language governing permissions and # limitations under the License. +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypeAlias + from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.transaction import Block -from hathor.transaction.storage import TransactionStorage + +if TYPE_CHECKING: + from hathor.transaction import Block + from hathor.transaction.storage import TransactionStorage + + +@dataclass(frozen=True, slots=True) +class BlockIsSignaling: + """Represent that a block is correctly signaling support for all currently mandatory features.""" + pass + + +@dataclass(frozen=True, slots=True) +class BlockIsMissingSignal: + """Represent that a block is not signaling support for at least one currently mandatory feature.""" + feature: Feature + + +BlockSignalingState: TypeAlias = BlockIsSignaling | BlockIsMissingSignal class FeatureService: __slots__ = ('_feature_settings', '_tx_storage') - def __init__(self, *, feature_settings: FeatureSettings, tx_storage: TransactionStorage) -> None: + def __init__(self, *, feature_settings: FeatureSettings, tx_storage: 'TransactionStorage') -> None: self._feature_settings = feature_settings self._tx_storage = tx_storage - def is_feature_active(self, *, block: Block, feature: Feature) -> bool: + def is_feature_active(self, *, block: 'Block', feature: Feature) -> bool: """Returns whether a Feature is active at a certain block.""" state = self.get_state(block=block, feature=feature) return state == FeatureState.ACTIVE - def get_state(self, *, block: Block, feature: Feature) -> FeatureState: + def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState: + """ + Return whether a block is signaling features that are mandatory, that is, any feature currently in the + MUST_SIGNAL phase. + """ + bit_counts = block.get_feature_activation_bit_counts() + height = block.get_height() + offset_to_boundary = height % self._feature_settings.evaluation_interval + remaining_blocks = self._feature_settings.evaluation_interval - offset_to_boundary - 1 + descriptions = self.get_bits_description(block=block) + + must_signal_features = ( + feature for feature, description in descriptions.items() + if description.state is FeatureState.MUST_SIGNAL + ) + + for feature in must_signal_features: + criteria = self._feature_settings.features[feature] + threshold = criteria.get_threshold(self._feature_settings) + count = bit_counts[criteria.bit] + missing_signals = threshold - count + + if missing_signals > remaining_blocks: + return BlockIsMissingSignal(feature=feature) + + return BlockIsSignaling() + + def get_state(self, *, block: 'Block', feature: Feature) -> FeatureState: """Returns the state of a feature at a certain block. Uses block metadata to cache states.""" # per definition, the genesis block is in the DEFINED state for all features @@ -54,6 +101,9 @@ def get_state(self, *, block: Block, feature: Feature) -> FeatureState: previous_boundary_block = self._get_ancestor_at_height(block=block, height=previous_boundary_height) previous_boundary_state = self.get_state(block=previous_boundary_block, feature=feature) + # We cache _and save_ the state of the previous boundary block that we just got. + previous_boundary_block.set_feature_state(feature=feature, state=previous_boundary_state, save=True) + if offset_to_boundary != 0: return previous_boundary_state @@ -63,14 +113,16 @@ def get_state(self, *, block: Block, feature: Feature) -> FeatureState: previous_state=previous_boundary_state ) - block.update_feature_state(feature=feature, state=new_state) + # We cache the just calculated state of the current block _without saving it_, as it may still be unverified, + # so we cannot persist its metadata. That's why we cache and save the previous boundary block above. + block.set_feature_state(feature=feature, state=new_state) return new_state def _calculate_new_state( self, *, - boundary_block: Block, + boundary_block: 'Block', feature: Feature, previous_state: FeatureState ) -> FeatureState: @@ -136,7 +188,7 @@ def _calculate_new_state( raise ValueError(f'Unknown previous state: {previous_state}') - def get_bits_description(self, *, block: Block) -> dict[Feature, FeatureDescription]: + def get_bits_description(self, *, block: 'Block') -> dict[Feature, FeatureDescription]: """Returns the criteria definition and feature state for all features at a certain block.""" return { feature: FeatureDescription( @@ -146,7 +198,7 @@ def get_bits_description(self, *, block: Block) -> dict[Feature, FeatureDescript for feature, criteria in self._feature_settings.features.items() } - def _get_ancestor_at_height(self, *, block: Block, height: int) -> Block: + def _get_ancestor_at_height(self, *, block: 'Block', height: int) -> 'Block': """ Given a block, returns its ancestor at a specific height. Uses the height index if the block is in the best blockchain, or search iteratively otherwise. @@ -158,13 +210,14 @@ def _get_ancestor_at_height(self, *, block: Block, height: int) -> Block: metadata = block.get_metadata() if not metadata.voided_by and (ancestor := self._tx_storage.get_transaction_by_height(height)): + from hathor.transaction import Block assert isinstance(ancestor, Block) return ancestor return _get_ancestor_iteratively(block=block, ancestor_height=height) -def _get_ancestor_iteratively(*, block: Block, ancestor_height: int) -> Block: +def _get_ancestor_iteratively(*, block: 'Block', ancestor_height: int) -> 'Block': """Given a block, returns its ancestor at a specific height by iterating over its ancestors. This is slow.""" # TODO: there are further optimizations to be done here, the latest common block height could be persisted in # metadata, so we could still use the height index if the requested height is before that height. diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index a0f7a22a9..d27e4aa82 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -22,7 +22,9 @@ from hathor.builder import BuildArtifacts, Builder from hathor.conf.get_settings import get_settings +from hathor.conf.settings import HathorSettings from hathor.daa import TestMode, _set_test_mode +from hathor.feature_activation.feature_service import FeatureService from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock @@ -168,18 +170,11 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: wallet = HDWallet(gap_limit=2) wallet._manually_initialize() - vertex_verifiers = VertexVerifiers( - block=SimulatorBlockVerifier(settings=self.settings), - merge_mined_block=SimulatorMergeMinedBlockVerifier(settings=self.settings), - tx=SimulatorTransactionVerifier(settings=self.settings), - token_creation_tx=SimulatorTokenCreationTransactionVerifier(settings=self.settings), - ) - artifacts = builder \ .set_reactor(self._clock) \ .set_rng(Random(self.rng.getrandbits(64))) \ .set_wallet(wallet) \ - .set_vertex_verifiers(vertex_verifiers) \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ .build() artifacts.manager.start() @@ -303,3 +298,18 @@ def run(self, if trigger is not None: return False return True + + +def _build_vertex_verifiers(settings: HathorSettings, feature_service: FeatureService) -> VertexVerifiers: + """ + A custom VertexVerifiers builder to be used by the simulator. + """ + return VertexVerifiers( + block=SimulatorBlockVerifier(settings=settings, feature_service=feature_service), + merge_mined_block=SimulatorMergeMinedBlockVerifier( + settings=settings, + feature_service=feature_service + ), + tx=SimulatorTransactionVerifier(settings=settings), + token_creation_tx=SimulatorTokenCreationTransactionVerifier(settings=settings), + ) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 02de172a7..5247d3874 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -672,12 +672,6 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) # happens include generating new mining blocks and some tests height = self.calculate_height() if self.storage else None score = self.weight if self.is_genesis else 0 - kwargs: dict[str, Any] = {} - - if self.is_block: - from hathor.transaction import Block - assert isinstance(self, Block) - kwargs['feature_activation_bit_counts'] = self.calculate_feature_activation_bit_counts() metadata = TransactionMetadata( hash=self.hash, @@ -685,7 +679,6 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) height=height, score=score, min_height=0, - **kwargs ) self._metadata = metadata if not metadata.hash: @@ -769,7 +762,6 @@ def update_initial_metadata(self, *, save: bool = True) -> None: self._update_height_metadata() self._update_parents_children_metadata() self._update_reward_lock_metadata() - self._update_feature_activation_bit_counts_metadata() if save: assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) @@ -795,16 +787,6 @@ def _update_parents_children_metadata(self) -> None: metadata.children.append(self.hash) self.storage.save_transaction(parent, only_metadata=True) - def _update_feature_activation_bit_counts_metadata(self) -> None: - """Update the block feature_activation_bit_counts metadata.""" - if not self.is_block: - return - - from hathor.transaction import Block - assert isinstance(self, Block) - metadata = self.get_metadata() - metadata.feature_activation_bit_counts = self.calculate_feature_activation_bit_counts() - def update_timestamp(self, now: int) -> None: """Update this tx's timestamp diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index b416d9fd3..c5666b6a7 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -110,21 +110,27 @@ def calculate_min_height(self) -> int: return min_height - def calculate_feature_activation_bit_counts(self) -> list[int]: + def get_feature_activation_bit_counts(self) -> list[int]: """ - Calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation bit - counts. + Lazily calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation + bit counts. After it's calculated for the first time, it's persisted in block metadata and must not be changed. Each list index corresponds to a bit position, and its respective value is the rolling count of active bits from the previous boundary block up to this block, including it. LSB is on the left. """ + metadata = self.get_metadata() + + if metadata.feature_activation_bit_counts is not None: + return metadata.feature_activation_bit_counts + previous_counts = self._get_previous_feature_activation_bit_counts() bit_list = self._get_feature_activation_bit_list() count_and_bit_pairs = zip_longest(previous_counts, bit_list, fillvalue=0) updated_counts = starmap(add, count_and_bit_pairs) + metadata.feature_activation_bit_counts = list(updated_counts) - return list(updated_counts) + return metadata.feature_activation_bit_counts def _get_previous_feature_activation_bit_counts(self) -> list[int]: """ @@ -338,13 +344,6 @@ def get_height(self) -> int: assert meta.height is not None return meta.height - def get_feature_activation_bit_counts(self) -> list[int]: - """Returns the block's feature_activation_bit_counts metadata attribute.""" - metadata = self.get_metadata() - assert metadata.feature_activation_bit_counts is not None, 'Blocks must always have this attribute set.' - - return metadata.feature_activation_bit_counts - def _get_feature_activation_bit_list(self) -> list[int]: """ Extracts feature activation bits from the signal bits, as a list where each index corresponds to the bit @@ -372,15 +371,30 @@ def get_feature_state(self, *, feature: Feature) -> Optional[FeatureState]: return feature_states.get(feature) - def update_feature_state(self, *, feature: Feature, state: FeatureState) -> None: - """Updates the state of a feature in metadata and persists it.""" + def set_feature_state(self, *, feature: Feature, state: FeatureState, save: bool = False) -> None: + """ + Set the state of a feature in metadata, if it's not set. Fails if it's set and the value is different. + + Args: + feature: the feature to set the state of. + state: the state to set. + save: whether to save this block's metadata in storage. + """ + previous_state = self.get_feature_state(feature=feature) + + if state == previous_state: + return + + assert previous_state is None assert self.storage is not None + metadata = self.get_metadata() feature_states = metadata.feature_states or {} feature_states[feature] = state metadata.feature_states = feature_states - self.storage.save_transaction(self, only_metadata=True) + if save: + self.storage.save_transaction(self, only_metadata=True) def get_feature_activation_bit_value(self, bit: int) -> int: """Get the feature activation bit value for a specific bit position.""" diff --git a/hathor/transaction/exceptions.py b/hathor/transaction/exceptions.py index 6c1a3eb56..25e61596c 100644 --- a/hathor/transaction/exceptions.py +++ b/hathor/transaction/exceptions.py @@ -146,6 +146,10 @@ class CheckpointError(BlockError): """Block hash does not match checkpoint hash for its height""" +class BlockMustSignalError(BlockError): + """Block does not signal support for a feature during mandatory signaling.""" + + class ScriptError(HathorError): """Base class for script evaluation errors""" diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index c7bbbaf72..c223d928f 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -53,7 +53,8 @@ class TransactionMetadata: feature_activation_bit_counts: Optional[list[int]] # A dict of features in the feature activation process and their respective state. Must only be used by Blocks, - # is None otherwise. + # is None otherwise. This is only used for caching, so it can be safely cleared up, as it would be recalculated + # when necessary. feature_states: Optional[dict[Feature, FeatureState]] = None # It must be a weakref. _tx_ref: Optional['ReferenceType[BaseTransaction]'] diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index d49a6a869..9bc23c802 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -13,9 +13,12 @@ # limitations under the License. from hathor import daa +from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block from hathor.transaction.exceptions import ( + BlockMustSignalError, BlockWithInputs, BlockWithTokensError, InvalidBlockReward, @@ -29,7 +32,11 @@ class BlockVerifier(VertexVerifier): - __slots__ = () + __slots__ = ('_feature_service', ) + + def __init__(self, *, settings: HathorSettings, feature_service: FeatureService | None = None) -> None: + super().__init__(settings=settings) + self._feature_service = feature_service def verify_basic(self, block: Block, *, skip_block_weight_verification: bool = False) -> None: """Partially run validations, the ones that need parents/inputs are skipped.""" @@ -45,6 +52,7 @@ def verify(self, block: Block) -> None: (3) creates the correct amount of tokens in the output (done in HathorManager) (4) all parents must exist and have timestamp smaller than ours (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes + (6) whether this block must signal feature support """ # TODO Should we validate a limit of outputs? if block.is_genesis: @@ -58,6 +66,8 @@ def verify(self, block: Block) -> None: self.verify_height(block) + self.verify_mandatory_signaling(block) + def verify_without_storage(self, block: Block) -> None: """ Run all verifications that do not need a storage. """ @@ -107,3 +117,23 @@ def verify_outputs(self, block: BaseTransaction) -> None: def verify_data(self, block: Block) -> None: if len(block.data) > self._settings.BLOCK_DATA_MAX_SIZE: raise TransactionDataError('block data has {} bytes'.format(len(block.data))) + + def verify_mandatory_signaling(self, block: Block) -> None: + """Verify whether this block is missing mandatory signaling for any feature.""" + if not self._settings.FEATURE_ACTIVATION.enable_usage: + return + + assert self._feature_service is not None + + signaling_state = self._feature_service.is_signaling_mandatory_features(block) + + match signaling_state: + case BlockIsSignaling(): + return + case BlockIsMissingSignal(feature): + raise BlockMustSignalError( + f"Block must signal support for feature '{feature.value}' during MUST_SIGNAL phase." + ) + case _: + # TODO: This will be changed to assert_never() so mypy can check it. + raise NotImplementedError diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 408cbb35f..48f4e2c98 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -15,6 +15,7 @@ from typing import NamedTuple from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature_service import FeatureService from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.exceptions import TxValidationError from hathor.transaction.token_creation_tx import TokenCreationTransaction @@ -33,14 +34,19 @@ class VertexVerifiers(NamedTuple): token_creation_tx: TokenCreationTransactionVerifier @classmethod - def create_defaults(cls, *, settings: HathorSettings) -> 'VertexVerifiers': + def create_defaults( + cls, + *, + settings: HathorSettings, + feature_service: FeatureService | None = None + ) -> 'VertexVerifiers': """ Create a VertexVerifiers instance using the default verifier for each vertex type, from all required dependencies. """ return VertexVerifiers( - block=BlockVerifier(settings=settings), - merge_mined_block=MergeMinedBlockVerifier(settings=settings), + block=BlockVerifier(settings=settings, feature_service=feature_service), + merge_mined_block=MergeMinedBlockVerifier(settings=settings, feature_service=feature_service), tx=TransactionVerifier(settings=settings), token_creation_tx=TokenCreationTransactionVerifier(settings=settings), ) diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index 4cc781095..4a01069d3 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -19,7 +19,12 @@ from hathor.conf import HathorSettings from hathor.feature_activation.feature import Feature -from hathor.feature_activation.feature_service import FeatureService +from hathor.feature_activation.feature_service import ( + BlockIsMissingSignal, + BlockIsSignaling, + BlockSignalingState, + FeatureService, +) from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState @@ -43,8 +48,8 @@ def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: 0b0011, 0b0001, - 0b0000, # 8: boundary block - 0b0000, + 0b0010, # 8: boundary block + 0b0110, 0b0000, 0b0000, @@ -475,10 +480,11 @@ def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStor assert result1 == FeatureState.ACTIVE assert calculate_new_state_mock.call_count == 4 + calculate_new_state_mock.reset_mock() result2 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result2 == FeatureState.ACTIVE - assert calculate_new_state_mock.call_count == 4 + assert calculate_new_state_mock.call_count == 0 @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @@ -651,3 +657,58 @@ def test_get_ancestor_at_height_voided( assert result == block_mocks[ancestor_height] assert result.get_height() == ancestor_height assert cast(Mock, tx_storage.get_transaction_by_height).call_count == 0 + + +@pytest.mark.parametrize( + ['bit', 'threshold', 'block_height', 'signaling_state'], + [ + (0, 4, 0, BlockIsSignaling()), + (0, 4, 3, BlockIsSignaling()), + (0, 4, 7, BlockIsSignaling()), + (0, 4, 8, BlockIsSignaling()), + (0, 4, 11, BlockIsSignaling()), + (0, 4, 12, BlockIsSignaling()), + + (1, 4, 0, BlockIsSignaling()), + (1, 4, 3, BlockIsSignaling()), + (1, 4, 7, BlockIsSignaling()), + (1, 4, 8, BlockIsSignaling()), + (1, 4, 9, BlockIsSignaling()), + (1, 4, 10, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (1, 4, 11, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (1, 4, 12, BlockIsSignaling()), + + (2, 2, 8, BlockIsSignaling()), + (2, 2, 9, BlockIsSignaling()), + (2, 2, 10, BlockIsSignaling()), + (2, 2, 11, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (2, 2, 12, BlockIsSignaling()), + ] +) +def test_check_must_signal( + tx_storage: TransactionStorage, + block_mocks: list[Block], + bit: int, + threshold: int, + block_height: int, + signaling_state: BlockSignalingState +) -> None: + feature_settings = FeatureSettings( + evaluation_interval=4, + default_threshold=threshold, + features={ + Feature.NOP_FEATURE_1: Criteria( + bit=bit, + start_height=0, + timeout_height=12, + lock_in_on_timeout=True, + version='0.0.0' + ) + } + ) + service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + block = block_mocks[block_height] + + result = service.is_signaling_mandatory_features(block) + + assert result == signaling_state diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 5b5f0b475..6dc74305a 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -18,6 +18,7 @@ import pytest from hathor.builder import Builder +from hathor.conf.get_settings import get_settings from hathor.feature_activation import feature_service as feature_service_module from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService @@ -25,15 +26,17 @@ from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.simulator import FakeConnection -from hathor.simulator.trigger import StopAfterNMinedBlocks +from hathor.transaction.exceptions import BlockMustSignalError from tests import unittest from tests.resources.base_resource import StubSite from tests.simulation.base import SimulatorTestCase -from tests.utils import HAS_ROCKSDB +from tests.utils import HAS_ROCKSDB, add_new_blocks class BaseFeatureSimulationTest(SimulatorTestCase): - builder: Builder + def get_simulator_builder(self) -> Builder: + """Return a pre-configured builder to be used in tests.""" + raise NotImplementedError @staticmethod def _get_result(web_client: StubSite) -> dict[str, Any]: @@ -46,20 +49,17 @@ def _get_result(web_client: StubSite) -> dict[str, Any]: return result @staticmethod - def _get_state_mock_block_height_calls(get_state_mock: Mock) -> list[int]: - """Returns the heights of blocks that get_state_mock was called with.""" - return [call.kwargs['block'].get_height() for call in get_state_mock.call_args_list] + def _calculate_new_state_mock_block_height_calls(calculate_new_state_mock: Mock) -> list[int]: + """Return the heights of blocks that calculate_new_state_mock was called with.""" + return [call.kwargs['boundary_block'].get_height() for call in calculate_new_state_mock.call_args_list] def test_feature(self) -> None: """ Tests that a feature goes through all possible states in the correct block heights, and also assert internal - method call counts and args to make sure we're executing it in the most performatic way. + method calls to make sure we're executing it in the intended, most performatic way. """ - artifacts = self.simulator.create_artifacts(self.builder) - manager = artifacts.manager - manager.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -75,10 +75,12 @@ def test_feature(self) -> None: } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts.tx_storage - ) + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + builder = self.get_simulator_builder().set_settings(settings) + artifacts = self.simulator.create_artifacts(builder) + feature_service = artifacts.feature_service + manager = artifacts.manager + feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -86,19 +88,16 @@ def test_feature(self) -> None: ) web_client = StubSite(feature_resource) - miner = self.simulator.create_miner(manager, hashpower=1e6) - miner.start() - - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): # at the beginning, the feature is DEFINED: - trigger = StopAfterNMinedBlocks(miner, quantity=10) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=10, @@ -116,15 +115,15 @@ def test_feature(self) -> None: ) ] ) - # so we query states all the way down to genesis: - assert self._get_state_mock_block_height_calls(get_state_mock) == [10, 8, 4, 0] + # so we calculate states all the way down to the first evaluation boundary (after genesis): + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 # no blocks are voided, so we only use the height index, and not get_ancestor_iteratively: assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 19, the feature is DEFINED, just before becoming STARTED: - trigger = StopAfterNMinedBlocks(miner, quantity=9) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 9) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=19, @@ -142,14 +141,14 @@ def test_feature(self) -> None: ) ] ) - # so we query states from block 19 to 8, as it's cached: - assert self._get_state_mock_block_height_calls(get_state_mock) == [19, 16, 12, 8] + # so we calculate states down to block 12, as block 8's state is saved: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 12 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 20, the feature becomes STARTED: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=20, @@ -167,13 +166,16 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [20, 16] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 20 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 20 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: - trigger = StopAfterNMinedBlocks(miner, quantity=35) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 34) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=55, @@ -191,15 +193,13 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [55, 52, 48, 44, 40, 36, 32, 28, 24, 20] - ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 24 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 56, the feature becomes MUST_SIGNAL: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=56, @@ -217,13 +217,26 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [56, 52] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 56 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 56 gets a chance to be saved + add_new_blocks(manager, 1, signal_bits=0b1) + calculate_new_state_mock.reset_mock() + + # if we try to propagate a non-signaling block, it is not accepted + non_signaling_block = manager.generate_mining_block() + non_signaling_block.resolve() + non_signaling_block.signal_bits = 0b10 + + with pytest.raises(BlockMustSignalError): + manager.verification_service.verify(non_signaling_block) + + assert not manager.propagate_tx(non_signaling_block) # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: - trigger = StopAfterNMinedBlocks(miner, quantity=3) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, num_blocks=2, signal_bits=0b1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=59, @@ -231,7 +244,7 @@ def test_feature(self) -> None: dict( name='NOP_FEATURE_1', state='MUST_SIGNAL', - acceptance=0, + acceptance=0.75, threshold=0.75, start_height=20, timeout_height=60, @@ -241,15 +254,14 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [59, 56] - ) + # we don't need to calculate any new state, as block 56's state is saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 60, the feature becomes LOCKED_IN: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=60, @@ -267,13 +279,16 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [60, 56] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 60 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 60 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: - trigger = StopAfterNMinedBlocks(miner, quantity=11) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=71, @@ -291,15 +306,13 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [71, 68, 64, 60] - ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 64 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 72, the feature becomes ACTIVE, forever: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=72, @@ -317,16 +330,13 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [72, 68] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 72 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() def test_reorg(self) -> None: - artifacts = self.simulator.create_artifacts(self.builder) - manager = artifacts.manager - manager.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -340,10 +350,13 @@ def test_reorg(self) -> None: ) } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts.tx_storage - ) + + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + builder = self.get_simulator_builder().set_settings(settings) + artifacts = self.simulator.create_artifacts(builder) + feature_service = artifacts.feature_service + manager = artifacts.manager + feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -351,19 +364,8 @@ def test_reorg(self) -> None: ) web_client = StubSite(feature_resource) - # 4 blocks per evaluation interval, and the genesis is skipped - signal_bits = [ - 0b0000, 0b0000, 0b0000, # 0% acceptance - 0b0000, 0b0000, 0b0010, 0b0000, # 25% acceptance - 0b0010, 0b0000, 0b0010, 0b0010, # 75% acceptance - ] - - miner = self.simulator.create_miner(manager, hashpower=1e6, signal_bits=signal_bits) - miner.start() - # at the beginning, the feature is DEFINED: - trigger = StopAfterNMinedBlocks(miner, quantity=0) - self.simulator.run(36000, trigger=trigger) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=0, @@ -383,8 +385,8 @@ def test_reorg(self) -> None: ) # at block 4, the feature becomes STARTED with 0% acceptance - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 4) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=4, @@ -403,9 +405,10 @@ def test_reorg(self) -> None: ] ) - # at block 7, acceptance was 25% - trigger = StopAfterNMinedBlocks(miner, quantity=3) - self.simulator.run(36000, trigger=trigger) + # at block 7, acceptance is 25% (we're signaling 1 block out of 4) + add_new_blocks(manager, 2) + add_new_blocks(manager, 1, signal_bits=0b10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=7, @@ -424,9 +427,11 @@ def test_reorg(self) -> None: ] ) - # at block 11, acceptance was 75%, so the feature will be locked-in in the next block - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + # at block 11, acceptance is 75% (we're signaling 3 blocks out of 4), + # so the feature will be locked-in in the next block + add_new_blocks(manager, 1) + add_new_blocks(manager, 3, signal_bits=0b10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=11, @@ -446,8 +451,8 @@ def test_reorg(self) -> None: ) # at block 12, the feature is locked-in - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=12, @@ -467,8 +472,8 @@ def test_reorg(self) -> None: ) # at block 16, the feature is activated - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 4) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=16, @@ -487,19 +492,14 @@ def test_reorg(self) -> None: ] ) - miner.stop() - - # We then create a new manager with a miner that mines one more block (17 vs 16), so its blockchain wins when + # We then create a new manager with one more block (17 vs 16), so its blockchain wins when # both managers are connected. This causes a reorg and the feature goes back to the STARTED state. - manager2 = self.simulator.create_peer() - manager2.allow_mining_without_peers() - - miner2 = self.simulator.create_miner(manager2, hashpower=1e6) + builder2 = self.get_simulator_builder().set_settings(settings) + artifacts2 = self.simulator.create_artifacts(builder2) + manager2 = artifacts2.manager - miner2.start() - trigger = StopAfterNMinedBlocks(miner2, quantity=17) - self.simulator.run(36000, trigger=trigger) - miner2.stop() + add_new_blocks(manager2, 17) + self.simulator.run(60) connection = FakeConnection(manager, manager2) self.simulator.add_connection(connection) @@ -525,33 +525,33 @@ def test_reorg(self) -> None: class BaseMemoryStorageFeatureSimulationTest(BaseFeatureSimulationTest): - def setUp(self): - super().setUp() - self.builder = self.simulator.get_default_builder() + def get_simulator_builder(self) -> Builder: + return self.simulator.get_default_builder() @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') class BaseRocksDBStorageFeatureSimulationTest(BaseFeatureSimulationTest): - def setUp(self): - super().setUp() + def get_rocksdb_directory(self) -> str: import tempfile + tmp_dir = tempfile.mkdtemp() + self.tmpdirs.append(tmp_dir) + return tmp_dir - self.rocksdb_directory = tempfile.mkdtemp() - self.tmpdirs.append(self.rocksdb_directory) - - self.builder = self.simulator.get_default_builder() \ - .use_rocksdb(path=self.rocksdb_directory) \ + def get_simulator_builder_from_dir(self, rocksdb_directory: str) -> Builder: + return self.simulator.get_default_builder() \ + .use_rocksdb(path=rocksdb_directory) \ .disable_full_verification() + def get_simulator_builder(self) -> Builder: + rocksdb_directory = self.get_rocksdb_directory() + return self.get_simulator_builder_from_dir(rocksdb_directory) + def test_feature_from_existing_storage(self) -> None: """ Tests that feature states are correctly retrieved from an existing storage, so no recalculation is required. """ - artifacts1 = self.simulator.create_artifacts(self.builder) - manager1 = artifacts1.manager - manager1.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -566,31 +566,33 @@ def test_feature_from_existing_storage(self) -> None: } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts1.tx_storage - ) + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + rocksdb_dir = self.get_rocksdb_directory() + builder1 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) + artifacts1 = self.simulator.create_artifacts(builder1) + feature_service1 = artifacts1.feature_service + manager1 = artifacts1.manager + feature_resource = FeatureResource( feature_settings=feature_settings, - feature_service=feature_service, + feature_service=feature_service1, tx_storage=artifacts1.tx_storage ) web_client = StubSite(feature_resource) - miner = self.simulator.create_miner(manager1, hashpower=1e6) - miner.start() - - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service1._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage - trigger = StopAfterNMinedBlocks(miner, quantity=64) - self.simulator.run(36000, trigger=trigger) + # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, + # we just set signal bits to conform with the MUST_SIGNAL phase. + add_new_blocks(manager1, 64, signal_bits=0b1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=64, @@ -608,28 +610,22 @@ def test_feature_from_existing_storage(self) -> None: ) ] ) - # feature states have to be calculated for all blocks in evaluation interval boundaries, as this is the - # first run: - assert self._get_state_mock_block_height_calls(get_state_mock) == list(range(64, -4, -4)) + # feature states have to be calculated for all blocks in evaluation interval boundaries, + # down to the first one (after genesis), as this is the first run: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 # no blocks are voided, so we only use the height index: assert get_ancestor_iteratively_mock.call_count == 0 assert artifacts1.tx_storage.get_vertices_count() == 67 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() - miner.stop() manager1.stop() artifacts1.rocksdb_storage.close() - builder = self.simulator.get_default_builder() \ - .use_rocksdb(path=self.rocksdb_directory) \ - .disable_full_verification() - artifacts2 = self.simulator.create_artifacts(builder) + # new builder is created with the same storage from the previous manager + builder2 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) + artifacts2 = self.simulator.create_artifacts(builder2) + feature_service = artifacts2.feature_service - # new feature_service is created with the same storage generated above - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts2.tx_storage - ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -637,19 +633,20 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): # the new storage starts populated assert artifacts2.tx_storage.get_vertices_count() == 67 - self.simulator.run(3600) + self.simulator.run(60) result = self._get_result(web_client) + # the result should be the same as before assert result == dict( block_height=64, features=[ @@ -666,11 +663,11 @@ def test_feature_from_existing_storage(self) -> None: ) ] ) - # features states are not queried for previous blocks, as they have it cached: - assert self._get_state_mock_block_height_calls(get_state_mock) == [64] + # features states are not calculate for any block, as they're all saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 assert get_ancestor_iteratively_mock.call_count == 0 assert artifacts2.tx_storage.get_vertices_count() == 67 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() class SyncV1MemoryStorageFeatureSimulationTest(unittest.SyncV1Params, BaseMemoryStorageFeatureSimulationTest): diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 0981794bd..c559d6f65 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -39,7 +39,7 @@ def test_get_block_template_with_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': None }, 'tokens': [], 'data': '', @@ -72,7 +72,7 @@ def test_get_block_template_without_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': None }, 'tokens': [], 'data': '', diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index a7b362dfe..eeaf89ec2 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -16,10 +16,14 @@ import pytest -from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings +from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.transaction import Block, TransactionMetadata +from hathor.transaction.exceptions import BlockMustSignalError from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage +from hathor.verification.block_verifier import BlockVerifier def test_calculate_feature_activation_bit_counts_genesis(): @@ -27,13 +31,14 @@ def test_calculate_feature_activation_bit_counts_genesis(): storage = TransactionMemoryStorage() genesis_block = storage.get_transaction(settings.GENESIS_BLOCK_HASH) assert isinstance(genesis_block, Block) - result = genesis_block.calculate_feature_activation_bit_counts() + result = genesis_block.get_feature_activation_bit_counts() assert result == [0, 0, 0, 0] @pytest.fixture def block_mocks() -> list[Block]: + settings = get_settings() blocks: list[Block] = [] feature_activation_bits = [ 0b0000, # 0: boundary block @@ -51,7 +56,6 @@ def block_mocks() -> list[Block]: ] for i, bits in enumerate(feature_activation_bits): - settings = HathorSettings() genesis_hash = settings.GENESIS_BLOCK_HASH block_hash = genesis_hash if i == 0 else b'some_hash' @@ -88,7 +92,7 @@ def test_calculate_feature_activation_bit_counts( expected_counts: list[int] ) -> None: block = block_mocks[block_height] - result = block.calculate_feature_activation_bit_counts() + result = block.get_feature_activation_bit_counts() assert result == expected_counts @@ -132,3 +136,45 @@ def test_get_feature_activation_bit_value() -> None: assert block.get_feature_activation_bit_value(1) == 0 assert block.get_feature_activation_bit_value(2) == 1 assert block.get_feature_activation_bit_value(3) == 0 + + +@pytest.mark.parametrize( + 'is_signaling_mandatory_features', + [BlockIsSignaling(), BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)] +) +def test_verify_must_signal_when_feature_activation_is_disabled(is_signaling_mandatory_features: bool) -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = False + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock(return_value=is_signaling_mandatory_features) + verifier = BlockVerifier(settings=settings, feature_service=feature_service) + block = Block() + + verifier.verify_mandatory_signaling(block) + + +def test_verify_must_signal() -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = True + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock( + return_value=BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1) + ) + verifier = BlockVerifier(settings=settings, feature_service=feature_service) + block = Block() + + with pytest.raises(BlockMustSignalError) as e: + verifier.verify_mandatory_signaling(block) + + assert str(e.value) == "Block must signal support for feature 'NOP_FEATURE_1' during MUST_SIGNAL phase." + + +def test_verify_must_not_signal() -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = True + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock(return_value=BlockIsSignaling()) + verifier = BlockVerifier(settings=settings, feature_service=feature_service) + block = Block() + + verifier.verify_mandatory_signaling(block) diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index 4e570a01f..d2e8f2583 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -160,6 +160,7 @@ def test_block_verify(self) -> None: verify_sigops_output_wrapped = Mock(wraps=verifier.verify_sigops_output) verify_parents_wrapped = Mock(wraps=verifier.verify_parents) verify_height_wrapped = Mock(wraps=verifier.verify_height) + verify_mandatory_signaling_wrapped = Mock(wraps=verifier.verify_mandatory_signaling) with ( patch.object(BlockVerifier, 'verify_pow', verify_pow_wrapped), @@ -170,6 +171,7 @@ def test_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), patch.object(BlockVerifier, 'verify_parents', verify_parents_wrapped), patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): self.manager.verification_service.verify(block) @@ -182,6 +184,7 @@ def test_block_verify(self) -> None: verify_sigops_output_wrapped.assert_called_once() verify_parents_wrapped.assert_called_once() verify_height_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() def test_block_validate_basic(self) -> None: verifier = self.manager.verification_service.verifiers.block @@ -214,6 +217,7 @@ def test_block_validate_full(self) -> None: verify_height_wrapped = Mock(wraps=verifier.verify_height) verify_weight_wrapped = Mock(wraps=verifier.verify_weight) verify_reward_wrapped = Mock(wraps=verifier.verify_reward) + verify_mandatory_signaling_wrapped = Mock(wraps=verifier.verify_mandatory_signaling) with ( patch.object(BlockVerifier, 'verify_pow', verify_pow_wrapped), @@ -226,6 +230,7 @@ def test_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): self.manager.verification_service.validate_full(block) @@ -240,6 +245,7 @@ def test_block_validate_full(self) -> None: verify_height_wrapped.assert_called_once() verify_weight_wrapped.assert_called_once() verify_reward_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() def test_merge_mined_block_verify_basic(self) -> None: verifier = self.manager.verification_service.verifiers.merge_mined_block @@ -305,6 +311,7 @@ def test_merge_mined_block_verify(self) -> None: verify_sigops_output_wrapped = Mock(wraps=verifier.verify_sigops_output) verify_parents_wrapped = Mock(wraps=verifier.verify_parents) verify_height_wrapped = Mock(wraps=verifier.verify_height) + verify_mandatory_signaling_wrapped = Mock(wraps=verifier.verify_mandatory_signaling) verify_aux_pow_wrapped = Mock(wraps=verifier.verify_aux_pow) @@ -318,6 +325,7 @@ def test_merge_mined_block_verify(self) -> None: patch.object(MergeMinedBlockVerifier, 'verify_parents', verify_parents_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_height', verify_height_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), + patch.object(MergeMinedBlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): self.manager.verification_service.verify(block) @@ -330,6 +338,7 @@ def test_merge_mined_block_verify(self) -> None: verify_sigops_output_wrapped.assert_called_once() verify_parents_wrapped.assert_called_once() verify_height_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() # MergeMinedBlock methods verify_pow_wrapped.assert_called_once() @@ -365,6 +374,7 @@ def test_merge_mined_block_validate_full(self) -> None: verify_height_wrapped = Mock(wraps=verifier.verify_height) verify_weight_wrapped = Mock(wraps=verifier.verify_weight) verify_reward_wrapped = Mock(wraps=verifier.verify_reward) + verify_mandatory_signaling_wrapped = Mock(wraps=verifier.verify_mandatory_signaling) verify_aux_pow_wrapped = Mock(wraps=verifier.verify_aux_pow) @@ -380,6 +390,7 @@ def test_merge_mined_block_validate_full(self) -> None: patch.object(MergeMinedBlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), + patch.object(MergeMinedBlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): self.manager.verification_service.validate_full(block) @@ -394,6 +405,7 @@ def test_merge_mined_block_validate_full(self) -> None: verify_height_wrapped.assert_called_once() verify_weight_wrapped.assert_called_once() verify_reward_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() # MergeMinedBlock methods verify_pow_wrapped.assert_called_once() diff --git a/tests/utils.py b/tests/utils.py index 43803bef2..8935f7a18 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -250,7 +250,7 @@ def add_new_transactions(manager, num_txs, advance_clock=None, propagate=True): def add_new_block(manager, advance_clock=None, *, parent_block_hash=None, - data=b'', weight=None, address=None, propagate=True): + data=b'', weight=None, address=None, propagate=True, signal_bits=None): """ Create, resolve and propagate a new block :param manager: Manager object to handle the creation @@ -262,6 +262,8 @@ def add_new_block(manager, advance_clock=None, *, parent_block_hash=None, block = manager.generate_mining_block(parent_block_hash=parent_block_hash, data=data, address=address) if weight is not None: block.weight = weight + if signal_bits is not None: + block.signal_bits = signal_bits block.resolve() manager.verification_service.validate_full(block) if propagate: @@ -272,7 +274,7 @@ def add_new_block(manager, advance_clock=None, *, parent_block_hash=None, def add_new_blocks(manager, num_blocks, advance_clock=None, *, parent_block_hash=None, - block_data=b'', weight=None, address=None): + block_data=b'', weight=None, address=None, signal_bits=None): """ Create, resolve and propagate some blocks :param manager: Manager object to handle the creation @@ -288,7 +290,7 @@ def add_new_blocks(manager, num_blocks, advance_clock=None, *, parent_block_hash for _ in range(num_blocks): blocks.append( add_new_block(manager, advance_clock, parent_block_hash=parent_block_hash, - data=block_data, weight=weight, address=address) + data=block_data, weight=weight, address=address, signal_bits=signal_bits) ) if parent_block_hash: parent_block_hash = blocks[-1].hash