diff --git a/configs/hive.yaml b/configs/hive.yaml new file mode 100644 index 0000000000..3ac969c0bc --- /dev/null +++ b/configs/hive.yaml @@ -0,0 +1,95 @@ +# Hive config + +# Extends the hive preset +PRESET_BASE: 'hive' + +# Free-form short name of the network that this configuration applies to - known +# canonical network names include: +# * 'mainnet' - there can be only one +# * 'prater' - testnet +# Must match the regex: [a-z0-9\-] +CONFIG_NAME: 'hive' + +# Transition +# --------------------------------------------------------------- +# Estimated on Sept 15, 2022 +TERMINAL_TOTAL_DIFFICULTY: 58750000000000000000000 +# By default, don't use these params +TERMINAL_BLOCK_HASH: 0x0000000000000000000000000000000000000000000000000000000000000000 +TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: 18446744073709551615 + + + +# Genesis +# --------------------------------------------------------------- +# `2**14` (= 16,384) +MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 16384 +# Dec 1, 2020, 12pm UTC +MIN_GENESIS_TIME: 1606824000 +# Hive initial fork version +GENESIS_FORK_VERSION: 0x0000000a +# 604800 seconds (7 days) +GENESIS_DELAY: 604800 + + +# Forking +# --------------------------------------------------------------- +# Some forks are disabled for now: +# - These may be re-assigned to another fork-version later +# - Temporarily set to max uint64 value: 2**64 - 1 + +# Altair +ALTAIR_FORK_VERSION: 0x0100000a +ALTAIR_FORK_EPOCH: 74240 # Oct 27, 2021, 10:56:23am UTC +# Bellatrix +BELLATRIX_FORK_VERSION: 0x0200000a +BELLATRIX_FORK_EPOCH: 144896 # Sept 6, 2022, 11:34:47am UTC +# Capella +CAPELLA_FORK_VERSION: 0x0300000a +CAPELLA_FORK_EPOCH: 194048 # April 12, 2023, 10:27:35pm UTC +# Deneb +DENEB_FORK_VERSION: 0x0400000a +DENEB_FORK_EPOCH: 18446744073709551615 + + + + +# Time parameters +# --------------------------------------------------------------- +# 12 seconds +SECONDS_PER_SLOT: 12 +# 14 (estimate from Eth1 mainnet) +SECONDS_PER_ETH1_BLOCK: 14 +# 2**8 (= 256) epochs ~27 hours +MIN_VALIDATOR_WITHDRAWABILITY_DELAY: 256 +# 2**8 (= 256) epochs ~27 hours +SHARD_COMMITTEE_PERIOD: 256 +# 2**11 (= 2,048) Eth1 blocks ~8 hours +ETH1_FOLLOW_DISTANCE: 2048 + + +# Validator cycle +# --------------------------------------------------------------- +# 2**2 (= 4) +INACTIVITY_SCORE_BIAS: 4 +# 2**4 (= 16) +INACTIVITY_SCORE_RECOVERY_RATE: 16 +# 2**4 * 10**9 (= 16,000,000,000) Gwei +EJECTION_BALANCE: 16000000000 +# 2**2 (= 4) +MIN_PER_EPOCH_CHURN_LIMIT: 4 +# 2**16 (= 65,536) +CHURN_LIMIT_QUOTIENT: 65536 + + +# Fork choice +# --------------------------------------------------------------- +# 40% +PROPOSER_SCORE_BOOST: 40 + +# Deposit contract +# --------------------------------------------------------------- +# Ethereum PoW Mainnet +DEPOSIT_CHAIN_ID: 1 +DEPOSIT_NETWORK_ID: 1 +DEPOSIT_CONTRACT_ADDRESS: 0x00000000219ab540356cBB839Cbe05303d7705Fa diff --git a/setup.py b/setup.py index f053412b5c..f8b758e927 100644 --- a/setup.py +++ b/setup.py @@ -164,6 +164,10 @@ def _load_kzg_trusted_setups(preset_name): 'mainnet': _load_kzg_trusted_setups('mainnet') } +EQUIVALENT_KZG_SETUPS = { + 'hive': 'mainnet', +} + ETH2_SPEC_COMMENT_PREFIX = "eth2spec:" @@ -193,6 +197,8 @@ def _parse_value(name: str, typed_value: str, type_hint: Optional[str]=None) -> def _update_constant_vars_with_kzg_setups(constant_vars, preset_name): comment = "noqa: E501" + if preset_name in EQUIVALENT_KZG_SETUPS: + preset_name = EQUIVALENT_KZG_SETUPS[preset_name] kzg_setups = ALL_KZG_SETUPS[preset_name] constant_vars['KZG_SETUP_G1'] = VariableDefinition(constant_vars['KZG_SETUP_G1'].value, str(kzg_setups[0]), comment, None) constant_vars['KZG_SETUP_G2'] = VariableDefinition(constant_vars['KZG_SETUP_G2'].value, str(kzg_setups[1]), comment, None) @@ -210,6 +216,9 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr ssz_objects: Dict[str, str] = {} dataclasses: Dict[str, str] = {} custom_types: Dict[str, str] = {} + preset_base = config.get('PRESET_BASE') + if preset_base is not None: + preset_base = preset_base.strip("'") with open(file_name) as source_file: document = gfm.parse(source_file.read()) @@ -291,7 +300,7 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr # Load KZG trusted setup from files if any('KZG_SETUP' in name for name in constant_vars): - _update_constant_vars_with_kzg_setups(constant_vars, preset_name) + _update_constant_vars_with_kzg_setups(constant_vars, preset_name if preset_base is None else preset_base) return SpecObject( functions=functions, @@ -692,6 +701,7 @@ def is_byte_vector(value: str) -> bool: def objects_to_spec(preset_name: str, + preset_base: Optional[str], spec_object: SpecObject, builder: SpecBuilder, ordered_class_objects: Dict[str, str]) -> str: @@ -744,7 +754,7 @@ def format_config_var(name: str, vardef: VariableDefinition) -> str: config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}' for k, v in spec_object.config_vars.items()) config_spec += '\n\n\nconfig = Configuration(\n' - config_spec += f' PRESET_BASE="{preset_name}",\n' + config_spec += f' PRESET_BASE="{preset_name if preset_base is None else preset_base}",\n' config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items()) config_spec += '\n)\n' @@ -936,8 +946,10 @@ def _build_spec(preset_name: str, fork: str, while OrderedDict(new_objects) != OrderedDict(class_objects): new_objects = copy.deepcopy(class_objects) dependency_order_class_objects(class_objects, spec_object.custom_types) - - return objects_to_spec(preset_name, spec_object, spec_builders[fork], class_objects) + preset_base = config.get('PRESET_BASE') + if preset_base is not None: + preset_base = preset_base.strip("'") + return objects_to_spec(preset_name, preset_base, spec_object, spec_builders[fork], class_objects) class BuildTarget(NamedTuple): @@ -975,6 +987,7 @@ def initialize_options(self): self.build_targets = """ minimal:presets/minimal:configs/minimal.yaml mainnet:presets/mainnet:configs/mainnet.yaml + hive:presets/mainnet:configs/hive.yaml """ def finalize_options(self): diff --git a/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py index b951a6a85c..76d9ec3522 100644 --- a/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py +++ b/tests/core/pyspec/eth2spec/gen_helpers/gen_from_tests/gen.py @@ -3,7 +3,7 @@ from typing import Any, Callable, Dict, Iterable, Optional, List, Union from eth2spec.utils import bls -from eth2spec.test.helpers.constants import ALL_PRESETS, TESTGEN_FORKS +from eth2spec.test.helpers.constants import ALL_PRESETS, HIVE, TESTGEN_FORKS from eth2spec.test.helpers.typing import SpecForkName, PresetBaseName from eth2spec.gen_helpers.gen_base import gen_runner @@ -96,10 +96,13 @@ def cases_fn() -> Iterable[TestCase]: def run_state_test_generators(runner_name: str, all_mods: Dict[str, Dict[str, str]], presets: Iterable[PresetBaseName] = ALL_PRESETS, - forks: Iterable[SpecForkName] = TESTGEN_FORKS) -> None: + forks: Iterable[SpecForkName] = TESTGEN_FORKS, + is_hive: bool = False) -> None: """ Generate all available state tests of `TESTGEN_FORKS` forks of `ALL_PRESETS` presets of the given runner. """ + if is_hive: + presets = [HIVE] for preset_name in presets: for fork_name in forks: if fork_name in all_mods: diff --git a/tests/core/pyspec/eth2spec/test/capella/api/__init__.py b/tests/core/pyspec/eth2spec/test/capella/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/capella/api/test_api.py b/tests/core/pyspec/eth2spec/test/capella/api/test_api.py new file mode 100644 index 0000000000..7105c3ab28 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/capella/api/test_api.py @@ -0,0 +1,90 @@ +from eth2spec.test.helpers.hive import ( + StateID, + Eth2BeaconChainRequestBeaconBlocksByRange, + EthV1BeaconStatesFinalityCheckpoints, + EthV1BeaconStatesFork, +) +from eth2spec.test.context import ( + with_capella_and_later, + spec_state_test_with_matching_config, + hive_state, +) +from eth2spec.test.helpers.state import ( + state_transition_and_sign_block, + next_slot, +) +from eth2spec.test.helpers.block import ( + build_empty_block_for_next_slot, +) +from eth2spec.test.helpers.withdrawals import ( + prepare_expected_withdrawals, +) + + +def _perform_valid_withdrawal(spec, state): + fully_withdrawable_indices, partial_withdrawals_indices = prepare_expected_withdrawals( + spec, state, num_partial_withdrawals=spec.MAX_WITHDRAWALS_PER_PAYLOAD * 2, + num_full_withdrawals=spec.MAX_WITHDRAWALS_PER_PAYLOAD * 2) + + next_slot(spec, state) + pre_next_withdrawal_index = state.next_withdrawal_index + + expected_withdrawals = spec.get_expected_withdrawals(state) + + pre_state = state.copy() + + # Block 1 + block = build_empty_block_for_next_slot(spec, state) + signed_block_1 = state_transition_and_sign_block(spec, state, block) + + withdrawn_indices = [withdrawal.validator_index for withdrawal in expected_withdrawals] + fully_withdrawable_indices = list(set(fully_withdrawable_indices).difference(set(withdrawn_indices))) + partial_withdrawals_indices = list(set(partial_withdrawals_indices).difference(set(withdrawn_indices))) + assert state.next_withdrawal_index == pre_next_withdrawal_index + spec.MAX_WITHDRAWALS_PER_PAYLOAD + + withdrawn_indices = [withdrawal.validator_index for withdrawal in expected_withdrawals] + fully_withdrawable_indices = list(set(fully_withdrawable_indices).difference(set(withdrawn_indices))) + partial_withdrawals_indices = list(set(partial_withdrawals_indices).difference(set(withdrawn_indices))) + assert state.next_withdrawal_index == pre_next_withdrawal_index + spec.MAX_WITHDRAWALS_PER_PAYLOAD + + return pre_state, signed_block_1, pre_next_withdrawal_index + + +@with_capella_and_later +@spec_state_test_with_matching_config +@hive_state() +def test_debug_beacon_state_v2(spec, state): + _, signed_block_1, pre_next_withdrawal_index = (_perform_valid_withdrawal(spec, state)) + + # Block 2 + block = build_empty_block_for_next_slot(spec, state) + signed_block_2 = state_transition_and_sign_block(spec, state, block) + + assert state.next_withdrawal_index == pre_next_withdrawal_index + spec.MAX_WITHDRAWALS_PER_PAYLOAD * 2 + yield 'blocks', [signed_block_1, signed_block_2] + yield 'post', state + + yield 'hive', [ + ( + EthV1BeaconStatesFinalityCheckpoints(id=StateID.Head(), finalized=False). + from_state(state) + ), + ( + EthV1BeaconStatesFork(id=StateID.Head(), finalized=False). + from_state(state) + ), + ( + Eth2BeaconChainRequestBeaconBlocksByRange( + start_slot=1, + count=2, # Slot 2 is empty + expected_roots=[signed_block_1.hash_tree_root()] + ) + ), + ( + Eth2BeaconChainRequestBeaconBlocksByRange( + start_slot=1, + count=3, + expected_roots=[block.hash_tree_root() for block in [signed_block_1, signed_block_2]], + ) + ), + ] diff --git a/tests/core/pyspec/eth2spec/test/context.py b/tests/core/pyspec/eth2spec/test/context.py index 626ffc1dbc..94f7bb16ea 100644 --- a/tests/core/pyspec/eth2spec/test/context.py +++ b/tests/core/pyspec/eth2spec/test/context.py @@ -3,11 +3,11 @@ from dataclasses import dataclass import importlib -from eth2spec.phase0 import mainnet as spec_phase0_mainnet, minimal as spec_phase0_minimal -from eth2spec.altair import mainnet as spec_altair_mainnet, minimal as spec_altair_minimal -from eth2spec.bellatrix import mainnet as spec_bellatrix_mainnet, minimal as spec_bellatrix_minimal -from eth2spec.capella import mainnet as spec_capella_mainnet, minimal as spec_capella_minimal -from eth2spec.deneb import mainnet as spec_deneb_mainnet, minimal as spec_deneb_minimal +from eth2spec.phase0 import mainnet as spec_phase0_mainnet, minimal as spec_phase0_minimal, hive as spec_phase0_hive +from eth2spec.altair import mainnet as spec_altair_mainnet, minimal as spec_altair_minimal, hive as spec_altair_hive +from eth2spec.bellatrix import mainnet as spec_bellatrix_mainnet, minimal as spec_bellatrix_minimal, hive as spec_bellatrix_hive +from eth2spec.capella import mainnet as spec_capella_mainnet, minimal as spec_capella_minimal, hive as spec_capella_hive +from eth2spec.deneb import mainnet as spec_deneb_mainnet, minimal as spec_deneb_minimal, hive as spec_deneb_hive from eth2spec.eip6110 import mainnet as spec_eip6110_mainnet, minimal as spec_eip6110_minimal from eth2spec.utils import bls @@ -15,7 +15,7 @@ from .helpers.constants import ( PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110, - MINIMAL, MAINNET, + MINIMAL, MAINNET, HIVE, ALL_PHASES, ALL_FORK_UPGRADES, ) @@ -91,6 +91,13 @@ class ForkMeta: DENEB: spec_deneb_mainnet, EIP6110: spec_eip6110_mainnet, }, + HIVE: { + PHASE0: spec_phase0_hive, + ALTAIR: spec_altair_hive, + BELLATRIX: spec_bellatrix_hive, + CAPELLA: spec_capella_hive, + DENEB: spec_deneb_hive, + }, } @@ -744,3 +751,62 @@ def post_tag(obj): return wrapper return decorator + + +# +# Hive state modifiers +# + +def hive_state(**decorator_kwargs): + def decorator(fn): + """ + Makes necessary changes to the state in order for the client to accept it in hive mode. + """ + def wrapper(*args, **kwargs): + if 'state' not in kwargs or 'spec' not in kwargs: + raise Exception("hive_state decorator requires state and spec") + state = kwargs['state'] + spec = kwargs['spec'] + + # Increase genesis time to min genesis time + state.genesis_time = spec.config.MIN_GENESIS_TIME + kwargs['state'] = state + + res = fn(*args, **kwargs) + if res is not None: + yield 'genesis', state + yield from res + + # Also yield extra configuration that is for the client in hive mode + + # Time is the next slot after test ends + time = state.genesis_time + ((state.slot + 1) * spec.config.SECONDS_PER_SLOT) + if "time" in decorator_kwargs: + time = decorator_kwargs["time"] + elif "slot_time" in decorator_kwargs: + time = state.genesis_time + (decorator_kwargs["slot_time"] * spec.config.SECONDS_PER_SLOT) + + head = state + head_epoch = spec.compute_epoch_at_slot(head.slot) + fork_version = spec.compute_fork_version(head_epoch) + fork_digest = spec.compute_fork_digest(fork_version, state.genesis_validators_root) + finalized_checkpoint = state.finalized_checkpoint + + yield 'hive_config', { + 'genesis_time': int(state.genesis_time), + 'genesis_validators_root': state.genesis_validators_root.hex(), + 'time': int(time), + 'fork_version': fork_version.hex(), + 'fork_digest': fork_digest.hex(), + 'finalized_checkpoint': { + 'epoch': int(finalized_checkpoint.epoch), + 'root': finalized_checkpoint.root.hex(), + }, + 'head': { + 'epoch': int(head_epoch), + 'root': head.hash_tree_root().hex(), + }, + 'head_slot': int(head.slot), + } + return wrapper + return decorator diff --git a/tests/core/pyspec/eth2spec/test/helpers/constants.py b/tests/core/pyspec/eth2spec/test/helpers/constants.py index 2140c96e45..419ee55273 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/constants.py +++ b/tests/core/pyspec/eth2spec/test/helpers/constants.py @@ -50,6 +50,7 @@ # MAINNET = PresetBaseName('mainnet') MINIMAL = PresetBaseName('minimal') +HIVE = PresetBaseName('hive') ALL_PRESETS = (MINIMAL, MAINNET) diff --git a/tests/core/pyspec/eth2spec/test/helpers/hive.py b/tests/core/pyspec/eth2spec/test/helpers/hive.py new file mode 100644 index 0000000000..8914e862eb --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/helpers/hive.py @@ -0,0 +1,413 @@ +from ruamel.yaml import YAML +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional +yaml = YAML() + +# +# Beacon API actions/verifications +# + + +class StateID(str): + + @classmethod + def Root(cls, root): + return cls(f"root:{root.hex()}") + + @classmethod + def Slot(cls, slot): + return cls(f"slot:{slot}") + + @classmethod + def Head(cls): + return cls("head") + + @classmethod + def Genesis(cls): + return cls("genesis") + + @classmethod + def Finalized(cls): + return cls("finalized") + + @classmethod + def Justified(cls): + return cls("justified") + + +@dataclass +class EthV2DebugBeaconStates: + id: StateID = StateID.Head() + fields: Dict = field(default_factory=dict) + + def __post_init__(self): + self.id = str(self.id) + + # Common Attributes + def state_root(self, state_root): + self.fields["state_root"] = state_root.hex() + return self + + def slot(self, slot): + self.fields["slot"] = int(slot) + return self + + def genesis_time(self, genesis_time): + self.fields["genesis_time"] = int(genesis_time) + return self + + def genesis_validators_root(self, genesis_validators_root): + self.fields["genesis_validators_root"] = str(genesis_validators_root) + return self + + def fork(self, fork): + self.fields["fork"] = { + "previous_version": fork.previous_version.hex(), + "current_version": fork.current_version.hex(), + "epoch": int(fork.epoch), + } + return self + + def latest_block_header(self, latest_block_header): + self.fields["latest_block_header"] = { + "slot": int(latest_block_header.slot), + "proposer_index": int(latest_block_header.proposer_index), + "parent_root": latest_block_header.parent_root.hex(), + "state_root": latest_block_header.state_root.hex(), + "body_root": latest_block_header.body_root.hex(), + } + return self + + def block_roots(self, block_roots): + self.fields["block_roots"] = [root.hex() for root in block_roots] + return self + + def state_roots(self, state_roots): + self.fields["state_roots"] = [root.hex() for root in state_roots] + return self + + def historical_roots(self, historical_roots): + self.fields["historical_roots"] = [root.hex() for root in historical_roots] + return self + + # Eth1 + def eth1_data(self, eth1_data): + self.fields["eth1_data"] = { + "deposit_root": eth1_data.deposit_root.hex(), + "deposit_count": int(eth1_data.deposit_count), + "block_hash": eth1_data.block_hash.hex(), + } + return self + + def eth1_data_votes(self, eth1_data_votes): + self.fields["eth1_data_votes"] = [{ + "deposit_root": eth1_data.deposit_root.hex(), + "deposit_count": int(eth1_data.deposit_count), + "block_hash": eth1_data.block_hash.hex(), + } for eth1_data in eth1_data_votes] + return self + + def eth1_deposit_index(self, eth1_deposit_index): + self.fields["eth1_deposit_index"] = int(eth1_deposit_index) + return self + + # Registry + def validators(self, validators): + self.fields["validators"] = [{ + "pubkey": validator.pubkey.hex(), + "withdrawal_credentials": validator.withdrawal_credentials.hex(), + "effective_balance": int(validator.effective_balance), + "slashed": bool(validator.slashed), + "activation_eligibility_epoch": int(validator.activation_eligibility_epoch), + "activation_epoch": int(validator.activation_epoch), + "exit_epoch": int(validator.exit_epoch), + "withdrawable_epoch": int(validator.withdrawable_epoch), + } for validator in validators] + return self + + def balances(self, balances): + self.fields["balances"] = [int(balance) for balance in balances] + return self + + # Randomness + def randao_mixes(self, randao_mixes): + self.fields["randao_mixes"] = [mix.hex() for mix in randao_mixes] + return self + + # Slashings + def slashings(self, slashings): + self.fields["slashings"] = [int(slash) for slash in slashings] + return self + + # Attestations + def previous_epoch_attestations(self, previous_epoch_attestations): + self.fields["previous_epoch_attestations"] = [{ + "aggregation_bits": attestation.aggregation_bits.hex(), + "data": { + "slot": int(attestation.data.slot), + "index": int(attestation.data.index), + "beacon_block_root": attestation.data.beacon_block_root.hex(), + "source": { + "epoch": int(attestation.data.source.epoch), + "root": attestation.data.source.root.hex(), + }, + "target": { + "epoch": int(attestation.data.target.epoch), + "root": attestation.data.target.root.hex(), + }, + }, + "inclusion_delay": int(attestation.inclusion_delay), + "proposer_index": int(attestation.proposer_index), + } for attestation in previous_epoch_attestations] + return self + + def current_epoch_attestations(self, current_epoch_attestations): + self.fields["current_epoch_attestations"] = [{ + # TODO: this is incorrect + "aggregation_bits": attestation.aggregation_bits.hex(), + "data": { + "slot": int(attestation.data.slot), + "index": int(attestation.data.index), + "beacon_block_root": attestation.data.beacon_block_root.hex(), + "source": { + "epoch": int(attestation.data.source.epoch), + "root": attestation.data.source.root.hex(), + }, + "target": { + "epoch": int(attestation.data.target.epoch), + "root": attestation.data.target.root.hex(), + }, + }, + "inclusion_delay": int(attestation.inclusion_delay), + "proposer_index": int(attestation.proposer_index), + } for attestation in current_epoch_attestations] + return self + + def previous_epoch_participation(self, previous_epoch_participation): + self.fields["previous_epoch_participation"] = [ + int(participation) + for participation in previous_epoch_participation + ] + return self + + def current_epoch_participation(self, current_epoch_participation): + self.fields["current_epoch_participation"] = [ + int(participation) + for participation in current_epoch_participation + ] + return self + + # Finality + """ + TODO + def justification_bits(self, justification_bits): + self.fields["justification_bits"] = justification_bits.hex() + return self + """ + + def previous_justified_checkpoint(self, previous_justified_checkpoint): + self.fields["previous_justified_checkpoint"] = { + "epoch": int(previous_justified_checkpoint.epoch), + "root": previous_justified_checkpoint.root.hex(), + } + return self + + def current_justified_checkpoint(self, current_justified_checkpoint): + self.fields["current_justified_checkpoint"] = { + "epoch": int(current_justified_checkpoint.epoch), + "root": current_justified_checkpoint.root.hex(), + } + return self + + def finalized_checkpoint(self, finalized_checkpoint): + self.fields["finalized_checkpoint"] = { + "epoch": int(finalized_checkpoint.epoch), + "root": finalized_checkpoint.root.hex(), + } + return self + + # Altair + def inactivity_scores(self, inactivity_scores): + self.fields["inactivity_scores"] = [int(score) for score in inactivity_scores] + return self + + def current_sync_committee(self, current_sync_committee): + self.fields["current_sync_committee"] = { + "pubkeys": [pubkey.hex() for pubkey in current_sync_committee.pubkeys], + "aggregate_pubkey": current_sync_committee.aggregate_pubkey.hex(), + } + return self + + def next_sync_committee(self, next_sync_committee): + self.fields["next_sync_committee"] = { + "pubkeys": [pubkey.hex() for pubkey in next_sync_committee.pubkeys], + "aggregate_pubkey": next_sync_committee.aggregate_pubkey.hex(), + } + return self + + def from_state(self, state): + """ + Constructs a full state verification object from the given ``state``. + Might be too expensive because it checks every single field, and the + produced yaml file is too big. + """ + # Common Attributes + self.state_root(state.hash_tree_root()) + self.slot(state.slot) + self.genesis_time(state.genesis_time) + self.genesis_validators_root(state.genesis_validators_root) + self.fork(state.fork) + self.latest_block_header(state.latest_block_header) + self.block_roots(state.block_roots) + self.state_roots(state.state_roots) + self.historical_roots(state.historical_roots) + + # Eth1 + self.eth1_data(state.eth1_data) + self.eth1_data_votes(state.eth1_data_votes) + self.eth1_deposit_index(state.eth1_deposit_index) + + # Registry + self.validators(state.validators) + self.balances(state.balances) + + # Randomness + self.randao_mixes(state.randao_mixes) + + # Slashings + self.slashings(state.slashings) + + # Attestations / Participation + if hasattr(state, "previous_epoch_attestations"): + self.previous_epoch_attestations(state.previous_epoch_attestations) + else: + self.previous_epoch_participation(state.previous_epoch_participation) + if hasattr(state, "current_epoch_attestations"): + self.current_epoch_attestations(state.current_epoch_attestations) + else: + self.current_epoch_participation(state.current_epoch_participation) + + # Finality + # TODO: self.justification_bits(state.justification_bits) + self.previous_justified_checkpoint(state.previous_justified_checkpoint) + self.current_justified_checkpoint(state.current_justified_checkpoint) + self.finalized_checkpoint(state.finalized_checkpoint) + + # Altair + if hasattr(state, "inactivity_scores"): + self.inactivity_scores(state.inactivity_scores) + if hasattr(state, "current_sync_committee"): + self.current_sync_committee(state.current_sync_committee) + if hasattr(state, "next_sync_committee"): + self.next_sync_committee(state.next_sync_committee) + + return self + + # TODO: Bellatrix, Capella, Deneb fields + + +def CheckpointToDict(checkpoint): + return { + "epoch": int(checkpoint.epoch), + "root": checkpoint.root.hex(), + } + + +@dataclass +class EthV1BeaconStatesFinalityCheckpoints: + id: StateID + finalized: bool + execution_optimistic: Optional[bool] = None + data: Dict = field(default_factory=dict) + + def __post_init__(self): + self.id = str(self.id) + + def previous_justified_checkpoint(self, checkpoint): + self.data["previous_justified"] = { + "epoch": int(checkpoint.epoch), + "root": checkpoint.root.hex(), + } + return self + + def current_justified_checkpoint(self, checkpoint): + self.data["current_justified"] = { + "epoch": int(checkpoint.epoch), + "root": checkpoint.root.hex(), + } + return self + + def finalized_checkpoint(self, checkpoint): + self.data["finalized"] = { + "epoch": int(checkpoint.epoch), + "root": checkpoint.root.hex(), + } + return self + + def from_state(self, state): + """ + Constructs a finality checkpoint verification object from a given + state. + """ + self.previous_justified_checkpoint(state.previous_justified_checkpoint) + self.current_justified_checkpoint(state.current_justified_checkpoint) + self.finalized_checkpoint(state.finalized_checkpoint) + return self + + +@dataclass +class EthV1BeaconStatesFork: + id: StateID + finalized: bool + execution_optimistic: Optional[bool] = None + data: Dict = field(default_factory=dict) + + def __post_init__(self): + self.id = str(self.id) + + def previous_version(self, version): + self.data["previous_version"] = version.hex() + return self + + def current_version(self, version): + self.data["current_version"] = version.hex() + return self + + def epoch(self, epoch): + self.data["epoch"] = int(epoch) + return self + + def from_state(self, state): + """ + Constructs a finality checkpoint verification object from a given + state. + """ + self.previous_version(state.fork.previous_version) + self.current_version(state.fork.current_version) + self.epoch(state.fork.epoch) + return self + + +yaml.register_class(EthV2DebugBeaconStates) +yaml.register_class(EthV1BeaconStatesFinalityCheckpoints) +yaml.register_class(EthV1BeaconStatesFork) + + +# +# Beacon P2P actions/verifications +# + + +@dataclass +class Eth2BeaconChainRequestBeaconBlocksByRange: + start_slot: int + count: int + expected_roots: List[Any] = field(default_factory=list) + step: int = 1 + version: int = 2 + + def __post_init__(self): + self.expected_roots = [root.hex() for root in self.expected_roots] + + +yaml.register_class(Eth2BeaconChainRequestBeaconBlocksByRange) diff --git a/tests/formats/api/README.md b/tests/formats/api/README.md new file mode 100644 index 0000000000..20b36208a4 --- /dev/null +++ b/tests/formats/api/README.md @@ -0,0 +1,7 @@ +# Sanity tests + +The aim of the sanity tests is to set a base-line on what really needs to pass, i.e. the essentials. + +There are two handlers, documented individually: +- [`slots`](./slots.md): transitions of one or more slots (and epoch transitions within) +- [`blocks`](./blocks.md): transitions triggered by one or more blocks diff --git a/tests/formats/api/blocks.md b/tests/formats/api/blocks.md new file mode 100644 index 0000000000..4e25ba809f --- /dev/null +++ b/tests/formats/api/blocks.md @@ -0,0 +1,41 @@ +# Beacon API testing + +Sanity tests to cover a series of one or more blocks being processed, aiming to cover common changes. + +## Test case format + +### `hive.yaml` + +TBD. + + +### `meta.yaml` + +```yaml +description: string -- Optional. Description of test case, purely for debugging purposes. +bls_setting: int -- see general test-format spec. +reveal_deadlines_setting: int -- see general test-format spec. +blocks_count: int -- the number of blocks processed in this test. +``` + + +### `genesis.ssz_snappy` + +An SSZ-snappy encoded `BeaconState` of the Beacon chain genesis. + + +### `blocks_.ssz_snappy` + +A series of files, with `` in range `[0, blocks_count)`. Blocks need to be processed in order, + following the main transition function (i.e. process slot and epoch transitions in between blocks as normal) + +Each file is a SSZ-snappy encoded `SignedBeaconBlock`. + +### `post.ssz_snappy` + +An SSZ-snappy encoded `BeaconState`, the state after applying the block transitions. + + +## Condition + +The beacon API verifications have to match the expected result after the post-state has been applied. diff --git a/tests/generators/api/README.md b/tests/generators/api/README.md new file mode 100644 index 0000000000..bd17ab2d29 --- /dev/null +++ b/tests/generators/api/README.md @@ -0,0 +1,8 @@ +# Beacon API tests + +Beacon API tests, which need to be run using `beacon/api` hive simulator. + +Information on the format of the tests can be found in the [api test formats documentation](../../formats/api/README.md). + + + diff --git a/tests/generators/api/main.py b/tests/generators/api/main.py new file mode 100644 index 0000000000..9d4e420e4b --- /dev/null +++ b/tests/generators/api/main.py @@ -0,0 +1,38 @@ +from eth2spec.test.helpers.constants import PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB +from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators, combine_mods + + +if __name__ == "__main__": + phase_0_mods = {key: 'eth2spec.test.phase0.api.test_' + key for key in [ + # 'api', TODO + ]} + + _new_altair_mods = {key: 'eth2spec.test.altair.api.test_' + key for key in [ + # 'api', TODO + ]} + altair_mods = combine_mods(_new_altair_mods, phase_0_mods) + + _new_bellatrix_mods = {key: 'eth2spec.test.bellatrix.api.test_' + key for key in [ + # 'api', TODO + ]} + bellatrix_mods = combine_mods(_new_bellatrix_mods, altair_mods) + + _new_capella_mods = {key: 'eth2spec.test.capella.api.test_' + key for key in [ + 'api', + ]} + capella_mods = combine_mods(_new_capella_mods, bellatrix_mods) + + _new_deneb_mods = {key: 'eth2spec.test.deneb.api.test_' + key for key in [ + # 'api', TODO + ]} + deneb_mods = combine_mods(_new_deneb_mods, capella_mods) + + all_mods = { + PHASE0: phase_0_mods, + ALTAIR: altair_mods, + BELLATRIX: bellatrix_mods, + CAPELLA: capella_mods, + DENEB: deneb_mods, + } + + run_state_test_generators(runner_name="api", all_mods=all_mods, is_hive=True) diff --git a/tests/generators/api/requirements.txt b/tests/generators/api/requirements.txt new file mode 100644 index 0000000000..1822486863 --- /dev/null +++ b/tests/generators/api/requirements.txt @@ -0,0 +1,2 @@ +pytest>=4.4 +../../../[generator]