Skip to content

Commit

Permalink
Optimize Safe Events Indexer
Browse files Browse the repository at this point in the history
- Add some comments
- Don't store a list and then convert to a set, do it in the same step
  • Loading branch information
Uxio0 committed Nov 27, 2024
1 parent 63b15bf commit 8d9d66c
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 10 deletions.
20 changes: 11 additions & 9 deletions safe_transaction_service/history/indexers/safe_events_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,19 +511,21 @@ def _process_safe_creation_events(
Process creation events (ProxyCreation and SafeSetup).
:param safe_addresses_with_creation_events:
:return:
:return: Dictionary with a Safe address and a list of creation related events (ProxyCreation and SafeSetup)
"""
internal_txs = []
internal_decoded_txs = []
# Check if were indexed
# Check if contracts were already indexed
safe_creation_events_addresses = set(safe_addresses_with_creation_events.keys())
indexed_addresses = InternalTxDecoded.objects.filter(
internal_tx___from__in=safe_creation_events_addresses,
function_name="setup",
internal_tx__contract_address=None,
).values_list("internal_tx___from", flat=True)
# Ignoring the already indexed contracts
addresses_to_index = safe_creation_events_addresses - set(indexed_addresses)
indexed_addresses = set(
InternalTxDecoded.objects.filter(
internal_tx___from__in=safe_creation_events_addresses,
function_name="setup",
internal_tx__contract_address=None,
).values_list("internal_tx___from", flat=True)
)
# Ignoring already indexed contracts
addresses_to_index = safe_creation_events_addresses - indexed_addresses

for safe_address in addresses_to_index:
events = safe_addresses_with_creation_events[safe_address]
Expand Down
2 changes: 1 addition & 1 deletion safe_transaction_service/history/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def create_from_block(
f"Marking block as not confirmed"
)

@lru_cache(maxsize=100_000)
@lru_cache(maxsize=1_000_000)
def get_timestamp_by_hash(self, block_hash: HexBytes) -> datetime.datetime:
try:
return self.values("timestamp").get(block_hash=block_hash)["timestamp"]
Expand Down

0 comments on commit 8d9d66c

Please sign in to comment.