Skip to content

Commit

Permalink
Merge branch 'dev' into engine-versioned-hashes
Browse files Browse the repository at this point in the history
  • Loading branch information
hwwhww committed May 24, 2023
2 parents 289d814 + 738b981 commit 6b5513b
Show file tree
Hide file tree
Showing 28 changed files with 296 additions and 144 deletions.
30 changes: 30 additions & 0 deletions configs/mainnet.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,3 +94,33 @@ PROPOSER_SCORE_BOOST: 40
DEPOSIT_CHAIN_ID: 1
DEPOSIT_NETWORK_ID: 1
DEPOSIT_CONTRACT_ADDRESS: 0x00000000219ab540356cBB839Cbe05303d7705Fa


# Networking
# ---------------------------------------------------------------
# `2**20` (= 1048576, 1 MiB)
GOSSIP_MAX_SIZE: 1048576
# `2**10` (= 1024)
MAX_REQUEST_BLOCKS: 1024
# `2**8` (= 256)
EPOCHS_PER_SUBNET_SUBSCRIPTION: 256
# `MIN_VALIDATOR_WITHDRAWABILITY_DELAY + CHURN_LIMIT_QUOTIENT // 2` (= 33024, ~5 months)
MIN_EPOCHS_FOR_BLOCK_REQUESTS: 33024
# `2**20` (=1048576, 1 MiB)
MAX_CHUNK_SIZE: 1048576
# 5s
TTFB_TIMEOUT: 5
# 10s
RESP_TIMEOUT: 10
ATTESTATION_PROPAGATION_SLOT_RANGE: 32
# 500ms
MAXIMUM_GOSSIP_CLOCK_DISPARITY: 500
MESSAGE_DOMAIN_INVALID_SNAPPY: 0x00000000
MESSAGE_DOMAIN_VALID_SNAPPY: 0x01000000
# 2 subnets per node
SUBNETS_PER_NODE: 2
# 2**8 (= 64)
ATTESTATION_SUBNET_COUNT: 64
ATTESTATION_SUBNET_EXTRA_BITS: 0
# ceillog2(ATTESTATION_SUBNET_COUNT) + ATTESTATION_SUBNET_EXTRA_BITS
ATTESTATION_SUBNET_PREFIX_BITS: 6
30 changes: 30 additions & 0 deletions configs/minimal.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -95,3 +95,33 @@ DEPOSIT_CHAIN_ID: 5
DEPOSIT_NETWORK_ID: 5
# Configured on a per testnet basis
DEPOSIT_CONTRACT_ADDRESS: 0x1234567890123456789012345678901234567890


# Networking
# ---------------------------------------------------------------
# `2**20` (= 1048576, 1 MiB)
GOSSIP_MAX_SIZE: 1048576
# `2**10` (= 1024)
MAX_REQUEST_BLOCKS: 1024
# `2**8` (= 256)
EPOCHS_PER_SUBNET_SUBSCRIPTION: 256
# [customized] `MIN_VALIDATOR_WITHDRAWABILITY_DELAY + CHURN_LIMIT_QUOTIENT // 2` (= 272)
MIN_EPOCHS_FOR_BLOCK_REQUESTS: 272
# `2**20` (=1048576, 1 MiB)
MAX_CHUNK_SIZE: 1048576
# 5s
TTFB_TIMEOUT: 5
# 10s
RESP_TIMEOUT: 10
ATTESTATION_PROPAGATION_SLOT_RANGE: 32
# 500ms
MAXIMUM_GOSSIP_CLOCK_DISPARITY: 500
MESSAGE_DOMAIN_INVALID_SNAPPY: 0x00000000
MESSAGE_DOMAIN_VALID_SNAPPY: 0x01000000
# 2 subnets per node
SUBNETS_PER_NODE: 2
# 2**8 (= 64)
ATTESTATION_SUBNET_COUNT: 64
ATTESTATION_SUBNET_EXTRA_BITS: 0
# ceillog2(ATTESTATION_SUBNET_COUNT) + ATTESTATION_SUBNET_EXTRA_BITS
ATTESTATION_SUBNET_PREFIX_BITS: 6
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr
elif name in config:
config_vars[name] = VariableDefinition(value_def.type_name, config[name], value_def.comment, None)
else:
if name == 'ENDIANNESS':
if name in ('ENDIANNESS', 'KZG_ENDIANNESS'):
# Deal with mypy Literal typing check
value_def = _parse_value(name, value, type_hint='Final')
constant_vars[name] = value_def
Expand Down Expand Up @@ -1051,6 +1051,7 @@ def finalize_options(self):
specs/phase0/fork-choice.md
specs/phase0/validator.md
specs/phase0/weak-subjectivity.md
specs/phase0/p2p-interface.md
"""
if self.spec_fork in (ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110):
self.md_doc_paths += """
Expand Down
19 changes: 2 additions & 17 deletions specs/_features/eip4788/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,27 +64,12 @@ parameter to the `PayloadAttributes`.

```python
def prepare_execution_payload(state: BeaconState,
pow_chain: Dict[Hash32, PowBlock],
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
suggested_fee_recipient: ExecutionAddress,
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
if not is_merge_transition_complete(state):
is_terminal_block_hash_set = TERMINAL_BLOCK_HASH != Hash32()
is_activation_epoch_reached = get_current_epoch(state) >= TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH
if is_terminal_block_hash_set and not is_activation_epoch_reached:
# Terminal block hash is set but activation epoch is not yet reached, no prepare payload call is needed
return None

terminal_pow_block = get_terminal_pow_block(pow_chain)
if terminal_pow_block is None:
# Pre-merge, no prepare payload call is needed
return None
# Signify merge via producing on top of the terminal PoW block
parent_hash = terminal_pow_block.block_hash
else:
# Post-merge, normal payload
parent_hash = state.latest_execution_payload_header.block_hash
# Verify consistency of the parent hash with respect to the previous execution payload header
parent_hash = state.latest_execution_payload_header.block_hash

# Set the forkchoice head and initiate the payload build process
payload_attributes = PayloadAttributes(
Expand Down
11 changes: 4 additions & 7 deletions specs/_features/eip6110/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,8 @@ class BeaconState(Container):
```python
def process_block(state: BeaconState, block: BeaconBlock) -> None:
process_block_header(state, block)
if is_execution_enabled(state, block.body):
process_withdrawals(state, block.body.execution_payload)
process_execution_payload(state, block.body, EXECUTION_ENGINE) # [Modified in EIP6110]
process_withdrawals(state, block.body.execution_payload)
process_execution_payload(state, block.body, EXECUTION_ENGINE) # [Modified in EIP6110]
process_randao(state, block.body)
process_eth1_data(state, block.body)
process_operations(state, block.body) # [Modified in EIP6110]
Expand Down Expand Up @@ -211,8 +210,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None:
for_ops(body.bls_to_execution_changes, process_bls_to_execution_change)

# [New in EIP6110]
if is_execution_enabled(state, body):
for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt)
for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt)
```

#### New `process_deposit_receipt`
Expand Down Expand Up @@ -241,8 +239,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
payload = body.execution_payload

# Verify consistency of the parent hash with respect to the previous execution payload header
if is_merge_transition_complete(state):
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
# Verify prev_randao
assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state))
# Verify timestamp
Expand Down
6 changes: 2 additions & 4 deletions specs/_features/sharding/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,7 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
process_block_header(state, block)
verify_builder_block_bid(state, block)
process_sharded_data(state, block)
if is_execution_enabled(state, block.body):
process_execution_payload(state, block, EXECUTION_ENGINE)
process_execution_payload(state, block, EXECUTION_ENGINE)

if not is_builder_block_slot(block.slot):
process_randao(state, block.body)
Expand Down Expand Up @@ -371,8 +370,7 @@ def process_execution_payload(state: BeaconState, block: BeaconBlock, execution_
payload = block.body.payload_data.value.execution_payload

# Verify consistency of the parent hash with respect to the previous execution payload header
if is_merge_transition_complete(state):
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
# Verify random
assert payload.random == get_randao_mix(state, get_current_epoch(state))
# Verify timestamp
Expand Down
5 changes: 3 additions & 2 deletions specs/bellatrix/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,12 +127,13 @@ To obtain an execution payload, a block proposer building a block on top of a `s

```python
def prepare_execution_payload(state: BeaconState,
pow_chain: Dict[Hash32, PowBlock],
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
suggested_fee_recipient: ExecutionAddress,
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
execution_engine: ExecutionEngine,
pow_chain: Optional[Dict[Hash32, PowBlock]]=None) -> Optional[PayloadId]:
if not is_merge_transition_complete(state):
assert pow_chain is not None
is_terminal_block_hash_set = TERMINAL_BLOCK_HASH != Hash32()
is_activation_epoch_reached = get_current_epoch(state) >= TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH
if is_terminal_block_hash_set and not is_activation_epoch_reached:
Expand Down
14 changes: 7 additions & 7 deletions specs/capella/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -331,9 +331,9 @@ def process_historical_summaries_update(state: BeaconState) -> None:
```python
def process_block(state: BeaconState, block: BeaconBlock) -> None:
process_block_header(state, block)
if is_execution_enabled(state, block.body):
process_withdrawals(state, block.body.execution_payload) # [New in Capella]
process_execution_payload(state, block.body, EXECUTION_ENGINE) # [Modified in Capella]
# [Modified in Capella] Removed `is_execution_enabled` check in Capella
process_withdrawals(state, block.body.execution_payload) # [New in Capella]
process_execution_payload(state, block.body, EXECUTION_ENGINE) # [Modified in Capella]
process_randao(state, block.body)
process_eth1_data(state, block.body)
process_operations(state, block.body) # [Modified in Capella]
Expand Down Expand Up @@ -404,15 +404,15 @@ def process_withdrawals(state: BeaconState, payload: ExecutionPayload) -> None:

#### Modified `process_execution_payload`

*Note*: The function `process_execution_payload` is modified to use the new `ExecutionPayloadHeader` type.
*Note*: The function `process_execution_payload` is modified to use the new `ExecutionPayloadHeader` type
and removed the `is_merge_transition_complete` check.

```python
def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None:
payload = body.execution_payload

# [Modified in Capella] Removed `is_merge_transition_complete` check in Capella
# Verify consistency of the parent hash with respect to the previous execution payload header
if is_merge_transition_complete(state):
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
# Verify prev_randao
assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state))
# Verify timestamp
Expand Down
55 changes: 55 additions & 0 deletions specs/capella/fork-choice.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
- [`notify_forkchoice_updated`](#notify_forkchoice_updated)
- [Helpers](#helpers)
- [Extended `PayloadAttributes`](#extended-payloadattributes)
- [Updated fork-choice handlers](#updated-fork-choice-handlers)
- [`on_block`](#on_block)

<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
Expand Down Expand Up @@ -60,3 +62,56 @@ class PayloadAttributes(object):
suggested_fee_recipient: ExecutionAddress
withdrawals: Sequence[Withdrawal] # [New in Capella]
```

## Updated fork-choice handlers

### `on_block`

*Note*: The only modification is the deletion of the verification of merge transition block conditions.

```python
def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
"""
Run ``on_block`` upon receiving a new block.
"""
block = signed_block.message
# Parent block must be known
assert block.parent_root in store.block_states
# Make a copy of the state to avoid mutability issues
pre_state = copy(store.block_states[block.parent_root])
# Blocks cannot be in the future. If they are, their consideration must be delayed until they are in the past.
assert get_current_slot(store) >= block.slot

# Check that block is later than the finalized epoch slot (optimization to reduce calls to get_ancestor)
finalized_slot = compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)
assert block.slot > finalized_slot
# Check block is a descendant of the finalized block at the checkpoint finalized slot
finalized_checkpoint_block = get_checkpoint_block(
store,
block.parent_root,
store.finalized_checkpoint.epoch,
)
assert store.finalized_checkpoint.root == finalized_checkpoint_block

# Check the block is valid and compute the post-state
state = pre_state.copy()
block_root = hash_tree_root(block)
state_transition(state, signed_block, True)

# Add new block to the store
store.blocks[block_root] = block
# Add new state for this block to the store
store.block_states[block_root] = state

# Add proposer score boost if the block is timely
time_into_slot = (store.time - store.genesis_time) % SECONDS_PER_SLOT
is_before_attesting_interval = time_into_slot < SECONDS_PER_SLOT // INTERVALS_PER_SLOT
if get_current_slot(store) == block.slot and is_before_attesting_interval:
store.proposer_boost_root = hash_tree_root(block)

# Update checkpoints in store if necessary
update_checkpoints(store, state.current_justified_checkpoint, state.finalized_checkpoint)

# Eagerly compute unrealized justification and finality.
compute_pulled_up_tip(store, block_root)
```
19 changes: 2 additions & 17 deletions specs/capella/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,27 +79,12 @@ That is, `state` is the `previous_state` processed through any empty slots up to

```python
def prepare_execution_payload(state: BeaconState,
pow_chain: Dict[Hash32, PowBlock],
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
suggested_fee_recipient: ExecutionAddress,
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
if not is_merge_transition_complete(state):
is_terminal_block_hash_set = TERMINAL_BLOCK_HASH != Hash32()
is_activation_epoch_reached = get_current_epoch(state) >= TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH
if is_terminal_block_hash_set and not is_activation_epoch_reached:
# Terminal block hash is set but activation epoch is not yet reached, no prepare payload call is needed
return None

terminal_pow_block = get_terminal_pow_block(pow_chain)
if terminal_pow_block is None:
# Pre-merge, no prepare payload call is needed
return None
# Signify merge via producing on top of the terminal PoW block
parent_hash = terminal_pow_block.block_hash
else:
# Post-merge, normal payload
parent_hash = state.latest_execution_payload_header.block_hash
# [Modified in Capella] Removed `is_merge_transition_complete` check in Capella
parent_hash = state.latest_execution_payload_header.block_hash

# Set the forkchoice head and initiate the payload build process
payload_attributes = PayloadAttributes(
Expand Down
3 changes: 1 addition & 2 deletions specs/deneb/beacon-chain.md
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
payload = body.execution_payload

# Verify consistency of the parent hash with respect to the previous execution payload header
if is_merge_transition_complete(state):
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
# Verify prev_randao
assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state))
# Verify timestamp
Expand Down
4 changes: 0 additions & 4 deletions specs/deneb/fork-choice.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,6 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
block_root = hash_tree_root(block)
state_transition(state, signed_block, True)

# Check the merge transition
if is_merge_transition_block(pre_state, block.body):
validate_merge_block(block)

# Add new block to the store
store.blocks[block_root] = block
# Add new state for this block to the store
Expand Down
17 changes: 9 additions & 8 deletions specs/deneb/polynomial-commitments.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ Public functions MUST accept raw bytes as input and perform the required cryptog
| `BYTES_PER_FIELD_ELEMENT` | `uint64(32)` | Bytes used to encode a BLS scalar field element |
| `BYTES_PER_BLOB` | `uint64(BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB)` | The number of bytes in a blob |
| `G1_POINT_AT_INFINITY` | `Bytes48(b'\xc0' + b'\x00' * 47)` | Serialized form of the point at infinity on the G1 group |
| `KZG_ENDIANNESS` | `'big'` | The endianness of the field elements including blobs |


## Preset
Expand Down Expand Up @@ -161,7 +162,7 @@ def hash_to_bls_field(data: bytes) -> BLSFieldElement:
The output is not uniform over the BLS field.
"""
hashed_data = hash(data)
return BLSFieldElement(int.from_bytes(hashed_data, ENDIANNESS) % BLS_MODULUS)
return BLSFieldElement(int.from_bytes(hashed_data, KZG_ENDIANNESS) % BLS_MODULUS)
```

#### `bytes_to_bls_field`
Expand All @@ -172,7 +173,7 @@ def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement:
Convert untrusted bytes to a trusted and validated BLS scalar field element.
This function does not accept inputs greater than the BLS modulus.
"""
field_element = int.from_bytes(b, ENDIANNESS)
field_element = int.from_bytes(b, KZG_ENDIANNESS)
assert field_element < BLS_MODULUS
return BLSFieldElement(field_element)
```
Expand Down Expand Up @@ -237,7 +238,7 @@ def compute_challenge(blob: Blob,
"""

# Append the degree of the polynomial as a domain separator
degree_poly = int.to_bytes(FIELD_ELEMENTS_PER_BLOB, 16, ENDIANNESS)
degree_poly = int.to_bytes(FIELD_ELEMENTS_PER_BLOB, 16, KZG_ENDIANNESS)
data = FIAT_SHAMIR_PROTOCOL_DOMAIN + degree_poly

data += blob
Expand Down Expand Up @@ -406,15 +407,15 @@ def verify_kzg_proof_batch(commitments: Sequence[KZGCommitment],

# Compute a random challenge. Note that it does not have to be computed from a hash,
# r just has to be random.
degree_poly = int.to_bytes(FIELD_ELEMENTS_PER_BLOB, 8, ENDIANNESS)
num_commitments = int.to_bytes(len(commitments), 8, ENDIANNESS)
degree_poly = int.to_bytes(FIELD_ELEMENTS_PER_BLOB, 8, KZG_ENDIANNESS)
num_commitments = int.to_bytes(len(commitments), 8, KZG_ENDIANNESS)
data = RANDOM_CHALLENGE_KZG_BATCH_DOMAIN + degree_poly + num_commitments

# Append all inputs to the transcript before we hash
for commitment, z, y, proof in zip(commitments, zs, ys, proofs):
data += commitment \
+ int.to_bytes(z, BYTES_PER_FIELD_ELEMENT, ENDIANNESS) \
+ int.to_bytes(y, BYTES_PER_FIELD_ELEMENT, ENDIANNESS) \
+ int.to_bytes(z, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS) \
+ int.to_bytes(y, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS) \
+ proof

r = hash_to_bls_field(data)
Expand Down Expand Up @@ -451,7 +452,7 @@ def compute_kzg_proof(blob: Blob, z_bytes: Bytes32) -> Tuple[KZGProof, Bytes32]:
assert len(z_bytes) == BYTES_PER_FIELD_ELEMENT
polynomial = blob_to_polynomial(blob)
proof, y = compute_kzg_proof_impl(polynomial, bytes_to_bls_field(z_bytes))
return proof, y.to_bytes(BYTES_PER_FIELD_ELEMENT, ENDIANNESS)
return proof, y.to_bytes(BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS)
```

#### `compute_quotient_eval_within_domain`
Expand Down
Loading

0 comments on commit 6b5513b

Please sign in to comment.