-
Notifications
You must be signed in to change notification settings - Fork 265
Geth support for inspect #112
base: main
Are you sure you want to change the base?
Changes from 1 commit
75ac0ea
2f1d826
c841313
54bd2e6
3fa8655
8504ac5
0895a0f
d1a1a53
f705bb9
b31f5d7
6b6dd45
935d0c9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,6 @@ | ||
import asyncio | ||
import logging | ||
from typing import List, Optional | ||
import json | ||
import aiohttp | ||
|
||
from sqlalchemy import orm | ||
from web3 import Web3 | ||
|
@@ -11,10 +9,17 @@ | |
from mev_inspect.schemas.blocks import Block | ||
from mev_inspect.schemas.receipts import Receipt | ||
from mev_inspect.schemas.traces import Trace, TraceType | ||
from mev_inspect.utils import hex_to_int | ||
from mev_inspect.utils import RPCType, hex_to_int | ||
|
||
|
||
logger = logging.getLogger(__name__) | ||
_calltype_mapping = { | ||
"CALL": "call", | ||
"DELEGATECALL": "delegateCall", | ||
"CREATE": "create", | ||
"SUICIDE": "suicide", | ||
"REWARD": "reward", | ||
} | ||
|
||
|
||
async def get_latest_block_number(base_provider) -> int: | ||
|
@@ -29,7 +34,7 @@ async def get_latest_block_number(base_provider) -> int: | |
async def create_from_block_number( | ||
base_provider, | ||
w3: Web3, | ||
geth: bool, | ||
type: RPCType, | ||
block_number: int, | ||
trace_db_session: Optional[orm.Session], | ||
) -> Block: | ||
|
@@ -39,55 +44,63 @@ async def create_from_block_number( | |
block = _find_block(trace_db_session, block_number) | ||
|
||
if block is None: | ||
block = await _fetch_block(w3, base_provider, geth, block_number) | ||
return block | ||
if type is RPCType.parity: | ||
block = await _fetch_block_parity(w3, base_provider, block_number) | ||
elif type is RPCType.geth: | ||
block = await _fetch_block_geth(w3, base_provider, block_number) | ||
else: | ||
logger.error(f"RPCType not known - {type}") | ||
raise ValueError | ||
return block | ||
|
||
|
||
async def _fetch_block( | ||
w3, base_provider, geth: bool, block_number: int, retries: int = 0 | ||
async def _fetch_block_parity( | ||
w3, base_provider, block_number: int, retries: int = 0 | ||
) -> Block: | ||
if not geth: | ||
block_json, receipts_json, traces_json, base_fee_per_gas = await asyncio.gather( | ||
w3.eth.get_block(block_number), | ||
base_provider.make_request("eth_getBlockReceipts", [block_number]), | ||
base_provider.make_request("trace_block", [block_number]), | ||
fetch_base_fee_per_gas(w3, block_number), | ||
block_json, receipts_json, traces_json, base_fee_per_gas = await asyncio.gather( | ||
w3.eth.get_block(block_number), | ||
base_provider.make_request("eth_getBlockReceipts", [block_number]), | ||
base_provider.make_request("trace_block", [block_number]), | ||
fetch_base_fee_per_gas(w3, block_number), | ||
) | ||
|
||
try: | ||
receipts: List[Receipt] = [ | ||
Receipt(**receipt) for receipt in receipts_json["result"] | ||
] | ||
traces = [Trace(**trace_json) for trace_json in traces_json["result"]] | ||
return Block( | ||
block_number=block_number, | ||
block_timestamp=block_json["timestamp"], | ||
miner=block_json["miner"], | ||
base_fee_per_gas=base_fee_per_gas, | ||
traces=traces, | ||
receipts=receipts, | ||
) | ||
except KeyError as e: | ||
logger.warning( | ||
f"Failed to create objects from block: {block_number}: {e}, retrying: {retries + 1} / 3" | ||
) | ||
if retries < 3: | ||
await asyncio.sleep(5) | ||
return await _fetch_block_parity(w3, base_provider, block_number, retries) | ||
else: | ||
raise | ||
|
||
try: | ||
receipts: List[Receipt] = [ | ||
Receipt(**receipt) for receipt in receipts_json["result"] | ||
] | ||
traces = [Trace(**trace_json) for trace_json in traces_json["result"]] | ||
return Block( | ||
block_number=block_number, | ||
block_timestamp=block_json["timestamp"], | ||
miner=block_json["miner"], | ||
base_fee_per_gas=base_fee_per_gas, | ||
traces=traces, | ||
receipts=receipts, | ||
) | ||
except KeyError as e: | ||
logger.warning( | ||
f"Failed to create objects from block: {block_number}: {e}, retrying: {retries + 1} / 3" | ||
) | ||
if retries < 3: | ||
await asyncio.sleep(5) | ||
return await _fetch_block( | ||
w3, base_provider, geth, block_number, retries | ||
) | ||
else: | ||
raise | ||
else: | ||
# print(block_number) | ||
block_json = await asyncio.gather(w3.eth.get_block(block_number)) | ||
|
||
async def _fetch_block_geth( | ||
w3, base_provider, block_number: int, retries: int = 0 | ||
) -> Block: | ||
block_json = await asyncio.gather(w3.eth.get_block(block_number)) | ||
|
||
try: | ||
# Separate calls to help with load during block tracing | ||
traces = await geth_get_tx_traces_parity_format(base_provider, block_json[0]) | ||
geth_tx_receipts = await geth_get_tx_receipts_async( | ||
base_provider.endpoint_uri, block_json[0]["transactions"] | ||
base_provider, block_json[0]["transactions"] | ||
) | ||
receipts = geth_receipts_translator(block_json[0], geth_tx_receipts) | ||
base_fee_per_gas = 0 | ||
base_fee_per_gas = 0 # Polygon specific, TODO for other chains | ||
|
||
return Block( | ||
block_number=block_number, | ||
|
@@ -97,6 +110,15 @@ async def _fetch_block( | |
traces=traces, | ||
receipts=receipts, | ||
) | ||
except KeyError as e: | ||
logger.warning( | ||
f"Failed to create objects from block: {block_number}: {e}, retrying: {retries + 1} / 3" | ||
) | ||
if retries < 3: | ||
await asyncio.sleep(5) | ||
return await _fetch_block_geth(w3, base_provider, block_number, retries) | ||
else: | ||
raise | ||
|
||
|
||
def _find_block( | ||
|
@@ -245,13 +267,6 @@ def unwrap_tx_trace_for_parity( | |
block_json, tx_pos_in_block, tx_trace, position=[] | ||
) -> List[Trace]: | ||
response_list = [] | ||
_calltype_mapping = { | ||
"CALL": "call", | ||
"DELEGATECALL": "delegateCall", | ||
"CREATE": "create", | ||
"SUICIDE": "suicide", | ||
"REWARD": "reward", | ||
} | ||
try: | ||
if tx_trace["type"] == "STATICCALL": | ||
return [] | ||
|
@@ -279,7 +294,8 @@ def unwrap_tx_trace_for_parity( | |
type=TraceType(_calltype_mapping[tx_trace["type"]]), | ||
) | ||
) | ||
except Exception: | ||
except Exception as e: | ||
logger.warn(f"error while unwraping tx trace for parity {e}") | ||
return [] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. what causes this to get hit There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. from my experience, There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If that's the case, want to move this to the same as you have for STATICCALL where it checks for it explicitly? It's better on our side to have it just blow up with an exception for now. We'd rather know if we're missing cases. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For 1, should there be a line like this?
For 2, is there anything that still makes it throw now that create2 is supported? I'm a little hesitant to blanket accept all Exception - if there are known failures I'm ok with skipping those blocks for now, but I think we should at least limit the scope of exceptions to what we know Otherwise for geth users they could be unknowingly missing blocks |
||
|
||
if "calls" in tx_trace.keys(): | ||
|
@@ -292,28 +308,20 @@ def unwrap_tx_trace_for_parity( | |
return response_list | ||
|
||
|
||
async def geth_get_tx_receipts_task(session, endpoint_uri, tx): | ||
data = { | ||
"jsonrpc": "2.0", | ||
"id": "0", | ||
"method": "eth_getTransactionReceipt", | ||
"params": [tx.hex()], | ||
} | ||
async with session.post(endpoint_uri, json=data) as response: | ||
if response.status != 200: | ||
response.raise_for_status() | ||
return await response.text() | ||
async def geth_get_tx_receipts_task(base_provider, tx): | ||
receipt = await base_provider.make_request("eth_getTransactionReceipt", [tx.hex()]) | ||
return receipt | ||
|
||
|
||
async def geth_get_tx_receipts_async(endpoint_uri, transactions): | ||
async def geth_get_tx_receipts_async(base_provider, transactions): | ||
geth_tx_receipts = [] | ||
async with aiohttp.ClientSession() as session: | ||
tasks = [ | ||
asyncio.create_task(geth_get_tx_receipts_task(session, endpoint_uri, tx)) | ||
for tx in transactions | ||
] | ||
geth_tx_receipts = await asyncio.gather(*tasks) | ||
return [json.loads(tx_receipts) for tx_receipts in geth_tx_receipts] | ||
tasks = [ | ||
asyncio.create_task(geth_get_tx_receipts_task(base_provider, tx)) | ||
for tx in transactions | ||
] | ||
geth_tx_receipts = await asyncio.gather(*tasks) | ||
# return [json.loads(tx_receipts) for tx_receipts in geth_tx_receipts] | ||
return geth_tx_receipts | ||
|
||
|
||
def geth_receipts_translator(block_json, geth_tx_receipts) -> List[Receipt]: | ||
|
@@ -331,24 +339,18 @@ def geth_receipts_translator(block_json, geth_tx_receipts) -> List[Receipt]: | |
|
||
|
||
def unwrap_tx_receipt_for_parity(block_json, tx_pos_in_block, tx_receipt) -> Receipt: | ||
try: | ||
if tx_pos_in_block != int(tx_receipt["transactionIndex"], 16): | ||
print( | ||
"Alert the position of transaction in block is mismatched ", | ||
tx_pos_in_block, | ||
tx_receipt["transactionIndex"], | ||
) | ||
return Receipt( | ||
block_number=block_json["number"], | ||
transaction_hash=tx_receipt["transactionHash"], | ||
transaction_index=tx_pos_in_block, | ||
gas_used=tx_receipt["gasUsed"], | ||
effective_gas_price=tx_receipt["effectiveGasPrice"], | ||
cumulative_gas_used=tx_receipt["cumulativeGasUsed"], | ||
to=tx_receipt["to"], | ||
if tx_pos_in_block != int(tx_receipt["transactionIndex"], 16): | ||
logger.info( | ||
"Alert the position of transaction in block is mismatched ", | ||
tx_pos_in_block, | ||
tx_receipt["transactionIndex"], | ||
) | ||
|
||
except Exception as e: | ||
print("error while decoding receipt", tx_receipt, e) | ||
|
||
return Receipt() | ||
return Receipt( | ||
block_number=block_json["number"], | ||
transaction_hash=tx_receipt["transactionHash"], | ||
transaction_index=tx_pos_in_block, | ||
gas_used=tx_receipt["gasUsed"], | ||
effective_gas_price=tx_receipt["effectiveGasPrice"], | ||
cumulative_gas_used=tx_receipt["cumulativeGasUsed"], | ||
to=tx_receipt["to"], | ||
) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
these can be grouped with a gather to make in parallel
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Have seen extra load while tracing is being done hamper tracer in polygon. Sometimes the tracer runs out of time (internal timeout) and errors. Better to keep separate.