diff --git a/src/lean_spec/subspecs/networking/config.py b/src/lean_spec/subspecs/networking/config.py index 46d79f45..e09054f9 100644 --- a/src/lean_spec/subspecs/networking/config.py +++ b/src/lean_spec/subspecs/networking/config.py @@ -80,3 +80,6 @@ Per Ethereum spec, message IDs are the first 20 bytes of SHA256(domain + topic_len + topic + data). """ + +MAX_ERROR_MESSAGE_SIZE: Final[int] = 256 +"""Maximum error message size in bytes per Ethereum P2P spec (ErrorMessage: List[byte, 256]).""" diff --git a/src/lean_spec/subspecs/networking/reqresp/__init__.py b/src/lean_spec/subspecs/networking/reqresp/__init__.py index 5bba3609..b51d92a3 100644 --- a/src/lean_spec/subspecs/networking/reqresp/__init__.py +++ b/src/lean_spec/subspecs/networking/reqresp/__init__.py @@ -1,14 +1,10 @@ """ReqResp specs for the Lean Ethereum consensus specification.""" from .codec import ( - CONTEXT_BYTES_LENGTH, CodecError, - ForkDigestMismatchError, ResponseCode, decode_request, encode_request, - prepend_context_bytes, - validate_context_bytes, ) from .handler import ( REQRESP_PROTOCOL_IDS, @@ -17,7 +13,7 @@ ReqRespServer, RequestHandler, ResponseStream, - YamuxResponseStream, + StreamResponseAdapter, ) from .message import ( BLOCKS_BY_ROOT_PROTOCOL_V1, @@ -38,19 +34,14 @@ "Status", # Codec "CodecError", - "ForkDigestMismatchError", "ResponseCode", "encode_request", "decode_request", - # Context bytes - "CONTEXT_BYTES_LENGTH", - "prepend_context_bytes", - "validate_context_bytes", # Inbound handlers "BlockLookup", "DefaultRequestHandler", "RequestHandler", "ReqRespServer", "ResponseStream", - "YamuxResponseStream", + "StreamResponseAdapter", ] diff --git a/src/lean_spec/subspecs/networking/reqresp/codec.py b/src/lean_spec/subspecs/networking/reqresp/codec.py index 3caa7ad1..33a3cbd9 100644 --- a/src/lean_spec/subspecs/networking/reqresp/codec.py +++ b/src/lean_spec/subspecs/networking/reqresp/codec.py @@ -92,78 +92,6 @@ class CodecError(Exception): """ -class ForkDigestMismatchError(CodecError): - """Raised when context bytes (fork_digest) do not match expected value. - - Context bytes are 4 bytes prepended to each response chunk in - protocols that return fork-specific data (BlocksByRange, BlobSidecars). - """ - - def __init__(self, expected: bytes, actual: bytes) -> None: - """Initialize with expected and actual fork digests.""" - self.expected = expected - self.actual = actual - super().__init__(f"Fork digest mismatch: expected {expected.hex()}, got {actual.hex()}") - - -CONTEXT_BYTES_LENGTH: int = 4 -"""Length of context bytes (fork_digest) in responses.""" - - -def validate_context_bytes(data: bytes, expected_fork_digest: bytes) -> bytes: - """ - Validate and strip context bytes from a response chunk. - - Some req/resp protocols prepend a 4-byte fork_digest (context bytes) - to each response chunk. This allows clients to verify they're receiving - data for the expected fork. - - Args: - data: Response data with context bytes prepended. - expected_fork_digest: Expected 4-byte fork_digest. - - Returns: - Response data with context bytes stripped. - - Raises: - CodecError: If data is too short to contain context bytes. - ForkDigestMismatchError: If context bytes don't match expected. - """ - if len(data) < CONTEXT_BYTES_LENGTH: - raise CodecError( - f"Response too short for context bytes: {len(data)} < {CONTEXT_BYTES_LENGTH}" - ) - - context_bytes = data[:CONTEXT_BYTES_LENGTH] - if context_bytes != expected_fork_digest: - raise ForkDigestMismatchError(expected_fork_digest, context_bytes) - - return data[CONTEXT_BYTES_LENGTH:] - - -def prepend_context_bytes(data: bytes, fork_digest: bytes) -> bytes: - """ - Prepend context bytes (fork_digest) to a response chunk. - - Used when sending responses for protocols that require context bytes. - - Args: - data: Response data to send. - fork_digest: 4-byte fork_digest to prepend. - - Returns: - Response data with context bytes prepended. - - Raises: - ValueError: If fork_digest is not exactly 4 bytes. - """ - if len(fork_digest) != CONTEXT_BYTES_LENGTH: - raise ValueError( - f"Fork digest must be {CONTEXT_BYTES_LENGTH} bytes, got {len(fork_digest)}" - ) - return fork_digest + data - - def encode_request(ssz_data: bytes) -> bytes: """ Encode an SSZ-serialized request for transmission. diff --git a/src/lean_spec/subspecs/networking/reqresp/handler.py b/src/lean_spec/subspecs/networking/reqresp/handler.py index 22c3bbbc..c32c29f4 100644 --- a/src/lean_spec/subspecs/networking/reqresp/handler.py +++ b/src/lean_spec/subspecs/networking/reqresp/handler.py @@ -26,7 +26,7 @@ This design provides three benefits: 1. Testability: Unit tests provide mock streams without network I/O. -2. Flexibility: Different transports (yamux, memory, etc.) work with the same handlers. +2. Flexibility: Different transports work with the same handlers. 3. Clarity: Handlers focus on protocol logic, not wire format encoding. The ResponseStream translates high-level operations (send success, send error) into the @@ -75,13 +75,14 @@ import logging from abc import ABC, abstractmethod from collections.abc import Awaitable, Callable -from dataclasses import dataclass, field +from dataclasses import dataclass from typing import Protocol -from lean_spec.snappy import frame_decompress +from lean_spec.snappy import SnappyDecompressionError, frame_decompress from lean_spec.subspecs.containers import SignedBlockWithAttestation +from lean_spec.subspecs.networking.config import MAX_ERROR_MESSAGE_SIZE from lean_spec.subspecs.networking.transport.connection.types import Stream -from lean_spec.subspecs.networking.varint import decode_varint +from lean_spec.subspecs.networking.varint import VarintError, decode_varint from lean_spec.types import Bytes32 from .codec import ResponseCode @@ -94,9 +95,6 @@ logger = logging.getLogger(__name__) -REQUEST_TIMEOUT_SECONDS: float = 10.0 -"""Default timeout for processing inbound requests.""" - class ResponseStream(Protocol): """ @@ -143,68 +141,34 @@ async def finish(self) -> None: @dataclass(slots=True) -class YamuxResponseStream: - """ - ResponseStream implementation wrapping a yamux stream. +class StreamResponseAdapter: + """Adapts a transport Stream to the ResponseStream protocol. Encodes responses using the wire format from codec.py and writes them to the underlying stream. """ _stream: Stream - """Underlying yamux stream.""" + """Underlying transport stream.""" async def send_success(self, ssz_data: bytes) -> None: - """ - Send a SUCCESS response chunk. + """Send a SUCCESS response chunk. Args: ssz_data: SSZ-encoded response payload. """ - # Encode the response using the protocol wire format. - # - # ResponseCode.SUCCESS (0x00) tells the peer this chunk contains valid data. - # The encode method handles: - # - # 1. Prepending the response code byte - # 2. Adding the varint length prefix - # 3. Compressing with Snappy framing encoded = ResponseCode.SUCCESS.encode(ssz_data) - - # Write using sync write + async drain for compatibility with both - # raw QUIC streams (async write) and wrapper streams (sync write + drain). - write_result = self._stream.write(encoded) - if hasattr(write_result, "__await__"): - await write_result - drain = getattr(self._stream, "drain", None) - if drain is not None: - await drain() + await self._stream.write(encoded) async def send_error(self, code: ResponseCode, message: str) -> None: - """ - Send an error response. + """Send an error response. Args: code: Error code. message: Human-readable error description. """ - # Error messages must be UTF-8 encoded per the Ethereum P2P spec. - # - # The spec mandates UTF-8 for interoperability across clients. - # Common error codes: - # - # - INVALID_REQUEST (1): Malformed request, bad SSZ, protocol violation - # - SERVER_ERROR (2): Internal failure, handler exception - # - RESOURCE_UNAVAILABLE (3): Block/blob not found - encoded = code.encode(message.encode("utf-8")) - - # Write using sync write + async drain for compatibility. - write_result = self._stream.write(encoded) - if hasattr(write_result, "__await__"): - await write_result - drain = getattr(self._stream, "drain", None) - if drain is not None: - await drain() + encoded = code.encode(message.encode("utf-8")[:MAX_ERROR_MESSAGE_SIZE]) + await self._stream.write(encoded) async def finish(self) -> None: """Close the stream gracefully.""" @@ -425,13 +389,6 @@ class ReqRespServer: handler: RequestHandler """Handler for processing requests.""" - _pending_data: dict[int, bytearray] = field(default_factory=dict) - """Buffer for accumulating request data by stream ID. - - Request data may arrive in multiple chunks. We accumulate until - the stream closes, then process the complete request. - """ - async def handle_stream(self, stream: Stream, protocol_id: str) -> None: """ Handle an incoming ReqResp stream. @@ -439,10 +396,10 @@ async def handle_stream(self, stream: Stream, protocol_id: str) -> None: Reads the request, decodes it, and dispatches to the appropriate handler. Args: - stream: Incoming yamux stream. + stream: Incoming transport stream. protocol_id: Negotiated protocol ID. """ - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) try: # Step 1: Read and decode the request. @@ -523,20 +480,27 @@ async def _read_request(self, stream: Stream) -> bytes: # Try to decode the varint try: declared_length, varint_size = decode_varint(bytes(buffer)) - except Exception: + except VarintError: # Need more data for varint continue + # Guard against unbounded compressed data. + # Snappy's worst-case expansion is ~(n + n/6 + 1024). + max_compressed = declared_length + declared_length // 6 + 1024 + # Now read until we can successfully decompress compressed_data = buffer[varint_size:] while True: + if len(compressed_data) > max_compressed: + return b"" + try: decompressed = frame_decompress(bytes(compressed_data)) if len(decompressed) == declared_length: return decompressed # Length mismatch - need more data - except Exception: + except SnappyDecompressionError: # Decompression failed - need more data pass @@ -545,7 +509,7 @@ async def _read_request(self, stream: Stream) -> bytes: # Stream closed, try one more decompress try: return frame_decompress(bytes(compressed_data)) - except Exception: + except SnappyDecompressionError: return bytes(buffer) compressed_data.extend(chunk) diff --git a/src/lean_spec/subspecs/networking/reqresp/message.py b/src/lean_spec/subspecs/networking/reqresp/message.py index 4c84d6f7..fc5b04c2 100644 --- a/src/lean_spec/subspecs/networking/reqresp/message.py +++ b/src/lean_spec/subspecs/networking/reqresp/message.py @@ -14,8 +14,6 @@ from ..config import MAX_REQUEST_BLOCKS from ..types import ProtocolId -# --- Status v1 --- - STATUS_PROTOCOL_V1: ProtocolId = "/leanconsensus/req/status/1/ssz_snappy" """The protocol ID for the Status v1 request/response message.""" @@ -42,8 +40,6 @@ class Status(Container): """The client's current head checkpoint.""" -# --- BlocksByRoot v1 --- - BLOCKS_BY_ROOT_PROTOCOL_V1: ProtocolId = "/leanconsensus/req/blocks_by_root/1/ssz_snappy" """The protocol ID for the BlocksByRoot v1 request/response message.""" diff --git a/tests/lean_spec/subspecs/networking/reqresp/test_handler.py b/tests/lean_spec/subspecs/networking/reqresp/test_handler.py index 5393df3d..95c38433 100644 --- a/tests/lean_spec/subspecs/networking/reqresp/test_handler.py +++ b/tests/lean_spec/subspecs/networking/reqresp/test_handler.py @@ -7,17 +7,16 @@ from lean_spec.subspecs.containers import Checkpoint, SignedBlockWithAttestation from lean_spec.subspecs.containers.slot import Slot +from lean_spec.subspecs.networking.config import MAX_ERROR_MESSAGE_SIZE from lean_spec.subspecs.networking.reqresp.codec import ( ResponseCode, encode_request, ) from lean_spec.subspecs.networking.reqresp.handler import ( REQRESP_PROTOCOL_IDS, - REQUEST_TIMEOUT_SECONDS, - BlockLookup, DefaultRequestHandler, ReqRespServer, - YamuxResponseStream, + StreamResponseAdapter, ) from lean_spec.subspecs.networking.reqresp.message import ( BLOCKS_BY_ROOT_PROTOCOL_V1, @@ -26,6 +25,7 @@ RequestedBlockRoots, Status, ) +from lean_spec.subspecs.networking.varint import encode_varint from lean_spec.types import Bytes32 from tests.lean_spec.helpers import make_test_block, make_test_status @@ -110,13 +110,13 @@ async def finish(self) -> None: self.finished = True -class TestYamuxResponseStream: - """Tests for YamuxResponseStream wire format encoding.""" +class TestStreamResponseAdapter: + """Tests for StreamResponseAdapter wire format encoding.""" async def test_send_success_encodes_correctly(self) -> None: """Success response uses SUCCESS code and encodes SSZ data.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) ssz_data = b"\x01\x02\x03\x04" await response.send_success(ssz_data) @@ -137,7 +137,7 @@ async def test_send_success_encodes_correctly(self) -> None: async def test_send_error_encodes_correctly(self) -> None: """Error response uses specified code and UTF-8 message.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_error(ResponseCode.INVALID_REQUEST, "Bad request") @@ -157,7 +157,7 @@ async def test_send_error_encodes_correctly(self) -> None: async def test_send_error_server_error(self) -> None: """SERVER_ERROR code encodes correctly.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_error(ResponseCode.SERVER_ERROR, "Internal error") @@ -170,7 +170,7 @@ async def test_send_error_server_error(self) -> None: async def test_send_error_resource_unavailable(self) -> None: """RESOURCE_UNAVAILABLE code encodes correctly.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_error(ResponseCode.RESOURCE_UNAVAILABLE, "Block not found") @@ -183,7 +183,7 @@ async def test_send_error_resource_unavailable(self) -> None: async def test_finish_closes_stream(self) -> None: """Finish closes the underlying stream.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) assert not stream.closed await response.finish() @@ -664,68 +664,13 @@ async def lookup(root: Bytes32) -> SignedBlockWithAttestation | None: assert blocks[0].message.block.slot == Slot(10) -class TestResponseStreamProtocol: - """Tests verifying ResponseStream protocol compliance.""" - - def test_mock_response_stream_is_protocol_compliant(self) -> None: - """MockResponseStream implements ResponseStream protocol.""" - # This test verifies our mock is usable with the handler - mock = MockResponseStream() - - # Should have the required methods - assert hasattr(mock, "send_success") - assert hasattr(mock, "send_error") - assert hasattr(mock, "finish") - - # Methods should be callable - assert callable(mock.send_success) - assert callable(mock.send_error) - assert callable(mock.finish) - - def test_yamux_response_stream_is_protocol_compliant(self) -> None: - """YamuxResponseStream implements ResponseStream protocol.""" - stream = MockStream() - yamux = YamuxResponseStream(_stream=stream) - - assert hasattr(yamux, "send_success") - assert hasattr(yamux, "send_error") - assert hasattr(yamux, "finish") - - -class TestBlockLookupTypeAlias: - """Tests for BlockLookup type alias usage.""" - - async def test_async_function_matches_block_lookup_signature(self) -> None: - """Verify async function can be used as BlockLookup.""" - - async def my_lookup(root: Bytes32) -> SignedBlockWithAttestation | None: - return None - - # Should type-check as BlockLookup - lookup: BlockLookup = my_lookup - - result = await lookup(Bytes32(b"\x00" * 32)) - assert result is None - - async def test_block_lookup_returning_block(self) -> None: - """BlockLookup returning a block works correctly.""" - block = make_test_block(slot=42, seed=42) - - async def my_lookup(root: Bytes32) -> SignedBlockWithAttestation | None: - return block - - result = await my_lookup(Bytes32(b"\x00" * 32)) - assert result is not None - assert result.message.block.slot == Slot(42) - - -class TestYamuxResponseStreamMultipleResponses: - """Tests for YamuxResponseStream with multiple responses in sequence.""" +class TestStreamResponseAdapterMultipleResponses: + """Tests for StreamResponseAdapter with multiple responses in sequence.""" async def test_send_multiple_success_responses(self) -> None: """Multiple SUCCESS responses are written independently.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_success(b"\x01\x02") await response.send_success(b"\x03\x04") @@ -745,7 +690,7 @@ async def test_send_multiple_success_responses(self) -> None: async def test_send_success_then_error(self) -> None: """Success response followed by error response.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_success(b"\xaa\xbb") await response.send_error(ResponseCode.RESOURCE_UNAVAILABLE, "Done") @@ -765,7 +710,7 @@ async def test_send_success_then_error(self) -> None: async def test_send_empty_success_response(self) -> None: """Empty SUCCESS response payload is handled.""" stream = MockStream() - response = YamuxResponseStream(_stream=stream) + response = StreamResponseAdapter(_stream=stream) await response.send_success(b"") @@ -1196,56 +1141,58 @@ async def test_error_response_sent_despite_write_exception(self) -> None: assert stream.close_attempts >= 1 -class TestRequestTimeoutConstant: - """Tests for REQUEST_TIMEOUT_SECONDS constant.""" - - def test_timeout_is_positive(self) -> None: - """Request timeout is a positive number.""" - assert REQUEST_TIMEOUT_SECONDS > 0 +class TestStreamResponseAdapterErrorTruncation: + """Tests for error message truncation in StreamResponseAdapter.""" - def test_timeout_is_reasonable(self) -> None: - """Request timeout is within reasonable bounds.""" - # Should be at least a few seconds - assert REQUEST_TIMEOUT_SECONDS >= 1.0 - # Should not be excessively long - assert REQUEST_TIMEOUT_SECONDS <= 60.0 + async def test_send_error_truncates_long_messages(self) -> None: + """Error messages exceeding 256 bytes are truncated.""" + stream = MockStream() + response = StreamResponseAdapter(_stream=stream) + long_message = "X" * 500 + await response.send_error(ResponseCode.INVALID_REQUEST, long_message) -class TestMockStreamProtocolCompliance: - """Tests verifying mock streams match the Stream protocol.""" + encoded = stream.written[0] + code, decoded = ResponseCode.decode(encoded) + assert code == ResponseCode.INVALID_REQUEST + assert len(decoded) == MAX_ERROR_MESSAGE_SIZE - def test_mock_stream_has_protocol_id(self) -> None: - """MockStream has protocol_id property.""" + async def test_send_error_short_message_unchanged(self) -> None: + """Short error messages are not truncated.""" stream = MockStream() - assert hasattr(stream, "protocol_id") - assert isinstance(stream.protocol_id, str) + response = StreamResponseAdapter(_stream=stream) - def test_mock_stream_has_read_method(self) -> None: - """MockStream has read method.""" - stream = MockStream() - assert hasattr(stream, "read") - assert callable(stream.read) + short_message = "Bad request" + await response.send_error(ResponseCode.INVALID_REQUEST, short_message) - def test_mock_stream_has_write_method(self) -> None: - """MockStream has write method.""" - stream = MockStream() - assert hasattr(stream, "write") - assert callable(stream.write) + encoded = stream.written[0] + code, decoded = ResponseCode.decode(encoded) + assert code == ResponseCode.INVALID_REQUEST + assert decoded == b"Bad request" - def test_mock_stream_has_close_method(self) -> None: - """MockStream has close method.""" - stream = MockStream() - assert hasattr(stream, "close") - assert callable(stream.close) - def test_mock_stream_has_reset_method(self) -> None: - """MockStream has reset method.""" - stream = MockStream() - assert hasattr(stream, "reset") - assert callable(stream.reset) +class TestReadRequestBufferLimit: + """Tests for buffer size limits in _read_request.""" + + async def test_read_request_rejects_oversized_compressed_data(self) -> None: + """Unbounded compressed data stream is rejected.""" + handler = DefaultRequestHandler(our_status=make_test_status()) + server = ReqRespServer(handler=handler) + + # Send a small varint claiming length 10, then flood with garbage data + # exceeding the max compressed size limit + declared_length = 10 + varint_bytes = encode_varint(declared_length) + max_compressed = declared_length + declared_length // 6 + 1024 + + # Create a stream with varint + way more data than max_compressed + oversized_data = varint_bytes + b"\x00" * (max_compressed + 5000) + stream = MockStream(request_data=oversized_data) + + await server.handle_stream(stream, STATUS_PROTOCOL_V1) - async def test_mock_stream_reset_closes_stream(self) -> None: - """MockStream reset marks stream as closed.""" - stream = MockStream() - await stream.reset() assert stream.closed is True + assert len(stream.written) >= 1 + + code, _ = ResponseCode.decode(stream.written[0]) + assert code == ResponseCode.INVALID_REQUEST diff --git a/tests/lean_spec/subspecs/networking/test_reqresp.py b/tests/lean_spec/subspecs/networking/test_reqresp.py index d188687d..f041cef0 100644 --- a/tests/lean_spec/subspecs/networking/test_reqresp.py +++ b/tests/lean_spec/subspecs/networking/test_reqresp.py @@ -21,16 +21,12 @@ import pytest -from lean_spec.subspecs.networking import ResponseCode -from lean_spec.subspecs.networking.config import MAX_PAYLOAD_SIZE +from lean_spec.subspecs.networking.config import MAX_ERROR_MESSAGE_SIZE, MAX_PAYLOAD_SIZE from lean_spec.subspecs.networking.reqresp import ( - CONTEXT_BYTES_LENGTH, CodecError, - ForkDigestMismatchError, + ResponseCode, decode_request, encode_request, - prepend_context_bytes, - validate_context_bytes, ) from lean_spec.subspecs.networking.varint import ( VarintError, @@ -602,94 +598,17 @@ def test_incompressible_data(self) -> None: assert decoded == incompressible -class TestContextBytesValidation: - """Tests for context bytes (fork_digest) validation in responses. +class TestErrorMessageMaxSize: + """Tests for error message size limits per Ethereum P2P spec.""" - Some req/resp protocols prepend a 4-byte fork_digest to each response - chunk. This allows clients to verify they're receiving data for the - expected fork. - - Reference: Ethereum P2P Interface Spec - https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/p2p-interface.md - """ - - def test_validate_context_bytes_success(self) -> None: - """Valid context bytes are validated and stripped.""" - fork_digest = b"\x12\x34\x56\x78" - payload = b"block data here" - data = fork_digest + payload - - result = validate_context_bytes(data, fork_digest) - assert result == payload - - def test_validate_context_bytes_mismatch(self) -> None: - """Mismatched context bytes raise ForkDigestMismatchError.""" - expected_fork = b"\x12\x34\x56\x78" - actual_fork = b"\xde\xad\xbe\xef" - payload = b"block data" - data = actual_fork + payload - - with pytest.raises(ForkDigestMismatchError) as exc_info: - validate_context_bytes(data, expected_fork) - - assert exc_info.value.expected == expected_fork - assert exc_info.value.actual == actual_fork - - def test_validate_context_bytes_too_short(self) -> None: - """Data shorter than context bytes raises CodecError.""" - fork_digest = b"\x12\x34\x56\x78" - too_short = b"\x12\x34" # Only 2 bytes - - with pytest.raises(CodecError, match="too short"): - validate_context_bytes(too_short, fork_digest) - - def test_validate_context_bytes_exactly_4_bytes(self) -> None: - """Data of exactly 4 bytes (context only, no payload) works.""" - fork_digest = b"\x12\x34\x56\x78" - data = fork_digest # No payload, just context bytes - - result = validate_context_bytes(data, fork_digest) - assert result == b"" - - def test_prepend_context_bytes(self) -> None: - """Context bytes are correctly prepended to payload.""" - fork_digest = b"\x12\x34\x56\x78" - payload = b"block data" - - result = prepend_context_bytes(payload, fork_digest) - assert result == fork_digest + payload - assert len(result) == len(fork_digest) + len(payload) - - def test_prepend_context_bytes_wrong_length(self) -> None: - """Prepending context bytes with wrong length raises ValueError.""" - invalid_fork = b"\x12\x34\x56" # Only 3 bytes - payload = b"block data" - - with pytest.raises(ValueError, match="4 bytes"): - prepend_context_bytes(payload, invalid_fork) - - def test_context_bytes_roundtrip(self) -> None: - """Prepend and validate context bytes roundtrip.""" - fork_digest = b"\xab\xcd\xef\x01" - original_payload = b"some response data" - - # Prepend context bytes (sender side) - with_context = prepend_context_bytes(original_payload, fork_digest) - - # Validate and strip context bytes (receiver side) - recovered_payload = validate_context_bytes(with_context, fork_digest) - - assert recovered_payload == original_payload - - def test_fork_digest_mismatch_error_message(self) -> None: - """ForkDigestMismatchError has informative message.""" - expected = b"\x12\x34\x56\x78" - actual = b"\xde\xad\xbe\xef" - - error = ForkDigestMismatchError(expected, actual) - assert "12345678" in str(error) - assert "deadbeef" in str(error) + def test_error_payload_within_limit(self) -> None: + """Error payload at exactly MAX_ERROR_MESSAGE_SIZE roundtrips.""" + message = b"A" * MAX_ERROR_MESSAGE_SIZE + encoded = ResponseCode.INVALID_REQUEST.encode(message) + code, decoded = ResponseCode.decode(encoded) + assert code == ResponseCode.INVALID_REQUEST + assert decoded == message - def test_context_bytes_length_constant(self) -> None: - """CONTEXT_BYTES_LENGTH constant is 4.""" - assert CONTEXT_BYTES_LENGTH == 4 + def test_max_error_message_size_is_256(self) -> None: + """MAX_ERROR_MESSAGE_SIZE matches Ethereum spec (ErrorMessage: List[byte, 256]).""" + assert MAX_ERROR_MESSAGE_SIZE == 256