Skip to content

Commit

Permalink
fix extracting topics
Browse files Browse the repository at this point in the history
  • Loading branch information
DenisCarriere committed Jul 16, 2024
1 parent 4f05395 commit 8bfb51b
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 6 deletions.
2 changes: 1 addition & 1 deletion blocks/evm/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ info:

.PHONY: run
run:
substreams run -e eth.substreams.pinax.network:443 graph_out -s 20293026 -t 20293027
substreams run -e eth.substreams.pinax.network:443 graph_out -s 10320264 -t 10320265

.PHONY: gui
gui:
Expand Down
12 changes: 7 additions & 5 deletions blocks/evm/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use common::{block_time_to_date, bytes_to_hex};
use common::{block_time_to_date, bytes_to_hex, extract_topic};
use substreams::errors::Error;
use substreams::pb::substreams::Clock;
use substreams_entity_change::pb::entity::EntityChanges;
Expand All @@ -15,6 +15,7 @@ pub fn graph_out(clock: Clock, block: Block) -> Result<EntityChanges, Error> {
let block_hash = format!("0x{}", clock.id);
let block_date = block_time_to_date(block_time.as_str());

// blocks
tables
.create_row("blocks", &block_hash)
.set("time", &block_time)
Expand All @@ -41,6 +42,7 @@ pub fn graph_out(clock: Clock, block: Block) -> Result<EntityChanges, Error> {
.set_bigint("base_fee_per_gas", &header.base_fee_per_gas.unwrap_or_default().with_decimal(0).to_string())
.set("parent_beacon_root", bytes_to_hex(header.parent_beacon_root));

// logs
for log in block.logs() {
let log_index = log.index();
let transaction = log.receipt.transaction;
Expand All @@ -50,10 +52,10 @@ pub fn graph_out(clock: Clock, block: Block) -> Result<EntityChanges, Error> {
let tx_to = bytes_to_hex(transaction.to.to_vec());
let contract_address = bytes_to_hex(log.address().to_vec());
let topics = log.topics();
let topic0 = bytes_to_hex(topics[0].clone());
let topic1 = bytes_to_hex(topics[1].clone());
let topic2 = bytes_to_hex(topics[2].clone());
let topic3 = bytes_to_hex(topics[3].clone());
let topic0 = extract_topic(topics, 0);
let topic1 = extract_topic(topics, 1);
let topic2 = extract_topic(topics, 2);
let topic3 = extract_topic(topics, 3);
let data = bytes_to_hex(log.data().to_vec());

tables
Expand Down
54 changes: 54 additions & 0 deletions common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@ pub fn bytes_to_hex(bytes: Vec<u8>) -> String {
format!{"0x{}", Hex::encode(bytes)}.to_string()
}

pub fn extract_topic(topics: &Vec<Vec<u8>>, index: usize) -> String {
if index < topics.len() {
bytes_to_hex(topics[index].clone())
} else {
"".to_string()
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -74,4 +82,50 @@ mod tests {
let expected = "0xdeadbeef";
assert_eq!(bytes_to_hex(bytes), expected);
}

#[test]
fn test_extract_topic_valid_index() {
let topics = vec![
vec![0x01, 0x02, 0x03],
vec![0x0a, 0x0b, 0x0c],
vec![0xff, 0xfe, 0xfd],
];
assert_eq!(extract_topic(&topics, 0), "0x010203");
assert_eq!(extract_topic(&topics, 1), "0x0a0b0c");
assert_eq!(extract_topic(&topics, 2), "0xfffefd");
}

#[test]
fn test_extract_topic_invalid_index() {
let topics = vec![
vec![0x01, 0x02, 0x03],
vec![0x0a, 0x0b, 0x0c],
];
assert_eq!(extract_topic(&topics, 3), "");
assert_eq!(extract_topic(&topics, 100), "");
}

#[test]
fn test_extract_topic_empty_vector() {
let topics: Vec<Vec<u8>> = Vec::new();
assert_eq!(extract_topic(&topics, 0), "");
assert_eq!(extract_topic(&topics, 1), "");
}

#[test]
fn test_extract_topic_single_element() {
let topics = vec![vec![0x0d, 0x0e, 0x0f]];
assert_eq!(extract_topic(&topics, 0), "0x0d0e0f");
assert_eq!(extract_topic(&topics, 1), "");
}

#[test]
fn test_extract_topic_large_numbers() {
let topics = vec![
vec![0xaa, 0xbb, 0xcc],
vec![0xde, 0xad, 0xbe, 0xef],
];
assert_eq!(extract_topic(&topics, 0), "0xaabbcc");
assert_eq!(extract_topic(&topics, 1), "0xdeadbeef");
}
}

0 comments on commit 8bfb51b

Please sign in to comment.