Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: could not decode ABIs when missing leading zeroes #2430

Merged
merged 2 commits into from
Dec 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 45 additions & 26 deletions src/ape/utils/abi.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,14 @@
from dataclasses import make_dataclass
from typing import Any, Optional, Union

from eth_abi import decode, grammar
from eth_abi import grammar
from eth_abi.abi import decode
from eth_abi.decoding import UnsignedIntegerDecoder
from eth_abi.encoding import UnsignedIntegerEncoder
from eth_abi.exceptions import DecodingError, InsufficientDataBytes
from eth_abi.registry import BaseEquals, registry
from eth_pydantic_types import HexBytes
from eth_pydantic_types.validators import validate_bytes_size
from eth_utils import decode_hex
from ethpm_types.abi import ABIType, ConstructorABI, EventABI, EventABIType, MethodABI

Expand All @@ -15,6 +20,37 @@
NATSPEC_KEY_PATTERN = re.compile(r"(@\w+)")


class _ApeUnsignedIntegerDecoder(UnsignedIntegerDecoder):
"""
This class exists because uint256 values when not-padded
always cause issues, even with strict=False.
It can be deleted if https://github.com/ethereum/eth-abi/pull/240
merges.
"""

def read_data_from_stream(self, stream):
"""
Override to pad the value instead of raising an error.
"""
data_byte_size: int = self.data_byte_size # type: ignore
data = stream.read(data_byte_size)

if len(data) != data_byte_size:
# Pad the value (instead of raising InsufficientBytesError).
data = validate_bytes_size(data, 32)

return data


registry.unregister("uint")
registry.register(
BaseEquals("uint"),
UnsignedIntegerEncoder,
_ApeUnsignedIntegerDecoder,
label="uint",
)


def is_array(abi_type: Union[str, ABIType]) -> bool:
"""
Returns ``True`` if the given type is a probably an array.
Expand Down Expand Up @@ -418,7 +454,9 @@ def __init__(self, abi: EventABI):
def event_name(self):
return self.abi.name

def decode(self, topics: list[str], data: str, use_hex_on_fail: bool = False) -> dict:
def decode(
self, topics: list[str], data: Union[str, bytes], use_hex_on_fail: bool = False
) -> dict:
decoded = {}
for abi, topic_value in zip(self.topic_abi_types, topics[1:]):
# reference types as indexed arguments are written as a hash
Expand All @@ -427,42 +465,23 @@ def decode(self, topics: list[str], data: str, use_hex_on_fail: bool = False) ->
hex_value = decode_hex(topic_value)

try:
value = decode([abi_type], hex_value)[0]
value = decode([abi_type], hex_value, strict=False)[0]
except InsufficientDataBytes as err:
warning_message = f"Failed to decode log topic '{self.event_name}'."

# Try again with strict=False
try:
value = decode([abi_type], hex_value, strict=False)[0]
except Exception:
# Even with strict=False, we failed to decode.
# This should be a rare occasion, if it ever happens.
logger.warn_from_exception(err, warning_message)
if use_hex_on_fail:
if abi.name not in decoded:
# This allow logs to still be findable on the receipt.
decoded[abi.name] = hex_value
if use_hex_on_fail:
if abi.name not in decoded:
# This allow logs to still be findable on the receipt.
decoded[abi.name] = hex_value

else:
raise DecodingError(str(err)) from err

else:
# This happens when providers accidentally leave off trailing zeroes.
warning_message = (
f"{warning_message} "
"However, we are able to get a value using decode(strict=False)"
)
logger.warn_from_exception(err, warning_message)
decoded[abi.name] = self.decode_value(abi_type, value)

else:
# The data was formatted correctly and we were able to decode logs.
result = self.decode_value(abi_type, value)
decoded[abi.name] = result

data_abi_types = [abi.canonical_type for abi in self.data_abi_types]
hex_data = decode_hex(data) if isinstance(data, str) else data

try:
data_values = decode(data_abi_types, hex_data)
except InsufficientDataBytes as err:
Expand Down
34 changes: 32 additions & 2 deletions tests/functional/utils/test_abi.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def topics():
return ["0xc52ec0ad7872dae440d886040390c13677df7bf3cca136d8d81e5e5e7dd62ff1"]


@pytest.fixture
@pytest.fixture(scope="module")
def log_data_missing_trailing_zeroes():
return HexBytes(
"0x000000000000000000000000000000000000000000000000000000000000001e"
Expand All @@ -65,7 +65,9 @@ def log_data_missing_trailing_zeroes():
)


def test_decoding_with_strict(collection, topics, log_data_missing_trailing_zeroes, ape_caplog):
def test_decode_data_missing_trailing_zeroes(
collection, topics, log_data_missing_trailing_zeroes, ape_caplog
):
"""
This test is for a time where Alchemy gave us log data when it was missing trailing zeroes.
When using strict=False, it was able to properly decode. In this case, in Ape, we warn
Expand All @@ -84,6 +86,34 @@ def test_decoding_with_strict(collection, topics, log_data_missing_trailing_zero
assert actual == expected


def test_decode_topics_missing_leading_zeroes(vyper_contract_type):
# The second value here was the problem before... It has no leading zeroes
# and eth-abi is very strict about that.
topics = [
"0xa84473122c11e32cd505595f246a28418b8ecd6cf819f4e3915363fad1b8f968",
"0x0141",
"0x9f3d45ac20ccf04b45028b8080bb191eab93e29f7898ed43acf480dd80bba94d",
]

# NOTE: data isn't really part of the test but still has to be included.
data = (
b"\x9c\xe2\xce\xf5\x9b\xf2\xdeu\x83f\xf8s\xdb\x7f&\xef\xab\x9bw\xf7\xcf"
b"\xe9\xc8I\xb6\xb5@\x04g\xa9)\x86\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00`\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07"
b"Dynamic\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
)
abi = vyper_contract_type.events["NumberChange"]
collection = LogInputABICollection(abi)

actual = collection.decode(topics, data)
assert actual["newNum"] == 321 # NOTE: Was a bug where this causes issues.


class TestStruct:
@pytest.fixture
def struct(self):
Expand Down
Loading