Skip to content

Commit 3c084df

Browse files
committed
refactor(settings): change HathorManager to use injected settings
1 parent 54a1a66 commit 3c084df

File tree

4 files changed

+44
-30
lines changed

4 files changed

+44
-30
lines changed

hathor/builder/builder.py

+1
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,7 @@ def build(self) -> BuildArtifacts:
181181

182182
manager = HathorManager(
183183
reactor,
184+
settings=settings,
184185
network=self._network,
185186
pubsub=pubsub,
186187
consensus_algorithm=consensus_algorithm,

hathor/builder/cli_builder.py

+1
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager:
220220

221221
self.manager = HathorManager(
222222
reactor,
223+
settings=settings,
223224
network=network,
224225
hostname=hostname,
225226
pubsub=pubsub,

hathor/manager.py

+40-28
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
from hathor import daa
2929
from hathor.checkpoint import Checkpoint
30-
from hathor.conf import HathorSettings
30+
from hathor.conf.settings import HathorSettings
3131
from hathor.consensus import ConsensusAlgorithm
3232
from hathor.event.event_manager import EventManager
3333
from hathor.exception import (
@@ -60,18 +60,10 @@
6060
from hathor.verification.verification_service import VerificationService
6161
from hathor.wallet import BaseWallet
6262

63-
settings = HathorSettings()
6463
logger = get_logger()
6564
cpu = get_cpu_profiler()
6665

6766

68-
DEFAULT_CAPABILITIES = [
69-
settings.CAPABILITY_WHITELIST,
70-
settings.CAPABILITY_SYNC_VERSION,
71-
settings.CAPABILITY_GET_BEST_BLOCKCHAIN
72-
]
73-
74-
7567
class HathorManager:
7668
""" HathorManager manages the node with the help of other specialized classes.
7769
@@ -95,6 +87,7 @@ class UnhealthinessReason(str, Enum):
9587
def __init__(self,
9688
reactor: Reactor,
9789
*,
90+
settings: HathorSettings,
9891
pubsub: PubSubManager,
9992
consensus_algorithm: ConsensusAlgorithm,
10093
peer_id: PeerId,
@@ -130,6 +123,7 @@ def __init__(self,
130123
'Either enable it, or use the reset-event-queue CLI command to remove all event-related data'
131124
)
132125

126+
self.settings = settings
133127
self._cmd_path: Optional[str] = None
134128

135129
self.log = logger.new()
@@ -223,7 +217,7 @@ def __init__(self,
223217
if capabilities is not None:
224218
self.capabilities = capabilities
225219
else:
226-
self.capabilities = DEFAULT_CAPABILITIES
220+
self.capabilities = self.get_default_capabilities()
227221

228222
# This is included in some logs to provide more context
229223
self.environment_info = environment_info
@@ -233,6 +227,13 @@ def __init__(self,
233227
self.lc_check_sync_state.clock = self.reactor
234228
self.lc_check_sync_state_interval = self.CHECK_SYNC_STATE_INTERVAL
235229

230+
def get_default_capabilities(self) -> list[str]:
231+
return [
232+
self.settings.CAPABILITY_WHITELIST,
233+
self.settings.CAPABILITY_SYNC_VERSION,
234+
self.settings.CAPABILITY_GET_BEST_BLOCKCHAIN
235+
]
236+
236237
def start(self) -> None:
237238
""" A factory must be started only once. And it is usually automatically started.
238239
"""
@@ -443,7 +444,7 @@ def _initialize_components_full_verification(self) -> None:
443444
# It's safe to skip block weight verification during initialization because
444445
# we trust the difficulty stored in metadata
445446
skip_block_weight_verification = True
446-
if block_count % settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0:
447+
if block_count % self.settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0:
447448
skip_block_weight_verification = False
448449

449450
try:
@@ -628,14 +629,14 @@ def _verify_soft_voided_txs(self) -> None:
628629
soft_voided_meta = soft_voided_tx.get_metadata()
629630
voided_set = soft_voided_meta.voided_by or set()
630631
# If the tx is not marked as soft voided, then we can't continue the initialization
631-
if settings.SOFT_VOIDED_ID not in voided_set:
632+
if self.settings.SOFT_VOIDED_ID not in voided_set:
632633
self.log.error(
633634
'Error initializing node. Your database is not compatible with the current version of the'
634635
' full node. You must use the latest available snapshot or sync from the beginning.'
635636
)
636637
sys.exit(-1)
637638

638-
assert {soft_voided_id, settings.SOFT_VOIDED_ID}.issubset(voided_set)
639+
assert {soft_voided_id, self.settings.SOFT_VOIDED_ID}.issubset(voided_set)
639640

640641
def _verify_checkpoints(self) -> None:
641642
""" Method to verify if all checkpoints that exist in the database have the correct hash and are winners.
@@ -774,7 +775,7 @@ def make_block_template(self, parent_block_hash: VertexId, timestamp: Optional[i
774775
"""
775776
parent_block = self.tx_storage.get_transaction(parent_block_hash)
776777
assert isinstance(parent_block, Block)
777-
parent_txs = self.generate_parent_txs(parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS)
778+
parent_txs = self.generate_parent_txs(parent_block.timestamp + self.settings.MAX_DISTANCE_BETWEEN_BLOCKS)
778779
if timestamp is None:
779780
current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds()))
780781
else:
@@ -810,7 +811,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
810811
timestamp_abs_min = parent_block.timestamp + 1
811812
# and absolute maximum limited by max time between blocks
812813
if not parent_block.is_genesis:
813-
timestamp_abs_max = parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1
814+
timestamp_abs_max = parent_block.timestamp + self.settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1
814815
else:
815816
timestamp_abs_max = 0xffffffff
816817
assert timestamp_abs_max > timestamp_abs_min
@@ -819,12 +820,12 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
819820
timestamp_min = max(timestamp_abs_min, parent_txs.max_timestamp + 1)
820821
assert timestamp_min <= timestamp_abs_max
821822
# when we have weight decay, the max timestamp will be when the next decay happens
822-
if with_weight_decay and settings.WEIGHT_DECAY_ENABLED:
823+
if with_weight_decay and self.settings.WEIGHT_DECAY_ENABLED:
823824
# we either have passed the first decay or not, the range will vary depending on that
824-
if timestamp_min > timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE:
825-
timestamp_max_decay = timestamp_min + settings.WEIGHT_DECAY_WINDOW_SIZE
825+
if timestamp_min > timestamp_abs_min + self.settings.WEIGHT_DECAY_ACTIVATE_DISTANCE:
826+
timestamp_max_decay = timestamp_min + self.settings.WEIGHT_DECAY_WINDOW_SIZE
826827
else:
827-
timestamp_max_decay = timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE
828+
timestamp_max_decay = timestamp_abs_min + self.settings.WEIGHT_DECAY_ACTIVATE_DISTANCE
828829
timestamp_max = min(timestamp_abs_max, timestamp_max_decay)
829830
else:
830831
timestamp_max = timestamp_abs_max
@@ -833,7 +834,10 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
833834
# this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with
834835
# at least this weight (note that the user of the API can set its own weight, the block sumit API will also
835836
# protect agains a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL)
836-
min_significant_weight = calculate_min_significant_weight(parent_block_metadata.score, 2 * settings.WEIGHT_TOL)
837+
min_significant_weight = calculate_min_significant_weight(
838+
parent_block_metadata.score,
839+
2 * self.settings.WEIGHT_TOL
840+
)
837841
weight = max(daa.calculate_next_weight(parent_block, timestamp), min_significant_weight)
838842
height = parent_block.get_height() + 1
839843
parents = [parent_block.hash] + parent_txs.must_include
@@ -898,15 +902,21 @@ def submit_block(self, blk: Block, fails_silently: bool = True) -> bool:
898902
parent_block = self.tx_storage.get_transaction(parent_hash)
899903
parent_block_metadata = parent_block.get_metadata()
900904
# this is the smallest weight that won't cause the score to increase, anything equal or smaller is bad
901-
min_insignificant_weight = calculate_min_significant_weight(parent_block_metadata.score, settings.WEIGHT_TOL)
905+
min_insignificant_weight = calculate_min_significant_weight(
906+
parent_block_metadata.score,
907+
self.settings.WEIGHT_TOL
908+
)
902909
if blk.weight <= min_insignificant_weight:
903910
self.log.warn('submit_block(): insignificant weight? accepted anyway', blk=blk.hash_hex, weight=blk.weight)
904911
return self.propagate_tx(blk, fails_silently=fails_silently)
905912

906913
def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False,
907-
max_output_script_size: int = settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE) -> None:
914+
max_output_script_size: int | None = None) -> None:
908915
"""Used by all APIs that accept a new transaction (like push_tx)
909916
"""
917+
if max_output_script_size is None:
918+
max_output_script_size = self.settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE
919+
910920
is_double_spending = tx.is_double_spending()
911921
if is_double_spending:
912922
raise DoubleSpendingError('Invalid transaction. At least one of your inputs has already been spent.')
@@ -968,7 +978,7 @@ def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = Non
968978
self.tx_storage.compare_bytes_with_local_tx(tx)
969979
already_exists = True
970980

971-
if tx.timestamp - self.reactor.seconds() > settings.MAX_FUTURE_TIMESTAMP_ALLOWED:
981+
if tx.timestamp - self.reactor.seconds() > self.settings.MAX_FUTURE_TIMESTAMP_ALLOWED:
972982
if not fails_silently:
973983
raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format(
974984
tx.hash_hex, tx.timestamp))
@@ -1117,7 +1127,7 @@ def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None:
11171127

11181128
def _log_feature_states(self, vertex: BaseTransaction) -> None:
11191129
"""Log features states for a block. Used as part of the Feature Activation Phased Testing."""
1120-
if not settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block):
1130+
if not self.settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block):
11211131
return
11221132

11231133
feature_descriptions = self._feature_service.get_bits_description(block=vertex)
@@ -1153,10 +1163,10 @@ def listen(self, description: str, use_ssl: Optional[bool] = None) -> None:
11531163
self.my_peer.entrypoints.append(address)
11541164

11551165
def has_sync_version_capability(self) -> bool:
1156-
return settings.CAPABILITY_SYNC_VERSION in self.capabilities
1166+
return self.settings.CAPABILITY_SYNC_VERSION in self.capabilities
11571167

11581168
def add_peer_to_whitelist(self, peer_id):
1159-
if not settings.ENABLE_PEER_WHITELIST:
1169+
if not self.settings.ENABLE_PEER_WHITELIST:
11601170
return
11611171

11621172
if peer_id in self.peers_whitelist:
@@ -1165,7 +1175,7 @@ def add_peer_to_whitelist(self, peer_id):
11651175
self.peers_whitelist.append(peer_id)
11661176

11671177
def remove_peer_from_whitelist_and_disconnect(self, peer_id: str) -> None:
1168-
if not settings.ENABLE_PEER_WHITELIST:
1178+
if not self.settings.ENABLE_PEER_WHITELIST:
11691179
return
11701180

11711181
if peer_id in self.peers_whitelist:
@@ -1179,7 +1189,9 @@ def has_recent_activity(self) -> bool:
11791189

11801190
# We use the avg time between blocks as a basis to know how much time we should use to consider the fullnode
11811191
# as not synced.
1182-
maximum_timestamp_delta = settings.P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * settings.AVG_TIME_BETWEEN_BLOCKS
1192+
maximum_timestamp_delta = (
1193+
self.settings.P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * self.settings.AVG_TIME_BETWEEN_BLOCKS
1194+
)
11831195

11841196
if current_timestamp - latest_blockchain_timestamp > maximum_timestamp_delta:
11851197
return False

tests/p2p/test_get_best_blockchain.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
from hathor.conf import HathorSettings
44
from hathor.indexes.height_index import HeightInfo
5-
from hathor.manager import DEFAULT_CAPABILITIES
65
from hathor.p2p.messages import ProtocolMessages
76
from hathor.p2p.resources import StatusResource
87
from hathor.p2p.states import ReadyState
@@ -229,7 +228,8 @@ def test_node_without_get_best_blockchain_capability(self):
229228
protocol2 = connected_peers1[0]
230229
self.assertTrue(protocol2.capabilities.issuperset(set(cababilities_without_get_best_blockchain)))
231230
protocol1 = connected_peers2[0]
232-
self.assertTrue(protocol1.capabilities.issuperset(set(DEFAULT_CAPABILITIES)))
231+
default_capabilities = manager2.get_default_capabilities()
232+
self.assertTrue(protocol1.capabilities.issuperset(set(default_capabilities)))
233233

234234
# assert the peers don't engage in get_best_blockchain messages
235235
state2 = protocol2.state

0 commit comments

Comments
 (0)