Skip to content

Commit

Permalink
Allow longer chains when formatted
Browse files Browse the repository at this point in the history
  • Loading branch information
Jurshsmith committed Sep 24, 2023
1 parent 368599e commit 66f9090
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 84 deletions.
4 changes: 1 addition & 3 deletions chaindexing-tests/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ pub fn database_url() -> String {
fn get_db_name_and_raw_url(url: &str) -> (String, String) {
let mut url_split = url.split('/').collect::<Vec<&str>>();

let db_name = url_split
.pop()
.expect("DATABASE NAME needs to be specified. See: sample.env");
let db_name = url_split.pop().expect("DATABASE NAME needs to be specified. See: sample.env");
let db_raw_url = url_split.join("/");

(db_name.to_string(), db_raw_url)
Expand Down
12 changes: 3 additions & 9 deletions chaindexing-tests/src/tests/events_ingester.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ mod tests {
Chaindexing::create_initial_contract_addresses(&mut conn, &contracts).await;

let conn = Arc::new(Mutex::new(conn));
EventsIngester::ingest(conn.clone(), &contracts, 10, json_rpc)
.await
.unwrap();
EventsIngester::ingest(conn.clone(), &contracts, 10, json_rpc).await.unwrap();

let mut conn = conn.lock().await;
let ingested_events = PostgresRepo::get_all_events(&mut conn).await;
Expand Down Expand Up @@ -69,9 +67,7 @@ mod tests {
));

let conn = Arc::new(Mutex::new(conn));
EventsIngester::ingest(conn, &contracts, 10, json_rpc)
.await
.unwrap();
EventsIngester::ingest(conn, &contracts, 10, json_rpc).await.unwrap();
})
.await;
}
Expand Down Expand Up @@ -144,9 +140,7 @@ mod tests {
Chaindexing::create_initial_contract_addresses(&mut conn, &contracts).await;

let conn = Arc::new(Mutex::new(conn));
EventsIngester::ingest(conn.clone(), &contracts, 10, json_rpc)
.await
.unwrap();
EventsIngester::ingest(conn.clone(), &contracts, 10, json_rpc).await.unwrap();

let mut conn = conn.lock().await;
assert!(PostgresRepo::get_all_events(&mut conn).await.is_empty());
Expand Down
49 changes: 16 additions & 33 deletions chaindexing/src/contracts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,7 @@ impl<State: ContractState> Contract<State> {
}

pub fn build_events(&self) -> Vec<ContractEvent> {
self.get_event_abis()
.iter()
.map(|abi| ContractEvent::new(abi))
.collect()
self.get_event_abis().iter().map(|abi| ContractEvent::new(abi)).collect()
}
}

Expand All @@ -105,12 +102,9 @@ impl Contracts {
contracts.iter().fold(
HashMap::new(),
|mut event_handlers_by_event_abi, contract| {
contract
.event_handlers
.iter()
.for_each(|(event_abi, event_handler)| {
event_handlers_by_event_abi.insert(event_abi, event_handler.clone());
});
contract.event_handlers.iter().for_each(|(event_abi, event_handler)| {
event_handlers_by_event_abi.insert(event_abi, event_handler.clone());
});

event_handlers_by_event_abi
},
Expand All @@ -120,13 +114,11 @@ impl Contracts {
pub fn group_event_topics_by_names<State: ContractState>(
contracts: &Vec<Contract<State>>,
) -> HashMap<String, Vec<ContractEventTopic>> {
contracts
.iter()
.fold(HashMap::new(), |mut topics_by_contract_name, contract| {
topics_by_contract_name.insert(contract.name.clone(), contract.get_event_topics());
contracts.iter().fold(HashMap::new(), |mut topics_by_contract_name, contract| {
topics_by_contract_name.insert(contract.name.clone(), contract.get_event_topics());

topics_by_contract_name
})
topics_by_contract_name
})
}

pub fn group_events_by_topics<State: ContractState>(
Expand All @@ -142,19 +134,14 @@ impl Contracts {
pub fn group_by_addresses<'a, State: ContractState>(
contracts: &'a Vec<Contract<State>>,
) -> HashMap<Address, &'a Contract<State>> {
contracts
.iter()
.fold(HashMap::new(), |mut contracts_by_addresses, contract| {
contract
.addresses
.iter()
.for_each(|UnsavedContractAddress { address, .. }| {
contracts_by_addresses
.insert(Address::from_str(&*address.as_str()).unwrap(), contract);
});

contracts.iter().fold(HashMap::new(), |mut contracts_by_addresses, contract| {
contract.addresses.iter().for_each(|UnsavedContractAddress { address, .. }| {
contracts_by_addresses
})
.insert(Address::from_str(&*address.as_str()).unwrap(), contract);
});

contracts_by_addresses
})
}
}

Expand Down Expand Up @@ -209,10 +196,6 @@ impl ContractAddress {
ContractAddressID(self.id)
}
pub fn address_to_string(address: &Address) -> String {
serde_json::to_value(address)
.unwrap()
.as_str()
.unwrap()
.to_string()
serde_json::to_value(address).unwrap().as_str().unwrap().to_string()
}
}
10 changes: 4 additions & 6 deletions chaindexing/src/events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,11 @@ impl Event {
}

fn log_params_to_parameters(log_params: &Vec<LogParam>) -> HashMap<String, Token> {
log_params
.iter()
.fold(HashMap::new(), |mut parameters, log_param| {
parameters.insert(log_param.name.to_string(), log_param.value.clone());
log_params.iter().fold(HashMap::new(), |mut parameters, log_param| {
parameters.insert(log_param.name.to_string(), log_param.value.clone());

parameters
})
parameters
})
}
}

Expand Down
29 changes: 8 additions & 21 deletions chaindexing/src/events_ingester.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,21 +68,11 @@ impl EventsIngester {
interval.tick().await;

try_join_all(
config
.chains
.clone()
.into_iter()
.map(|(_chain, json_rpc_url)| {
let json_rpc =
Arc::new(Provider::<Http>::try_from(json_rpc_url).unwrap());

Self::ingest(
conn.clone(),
&contracts,
config.blocks_per_batch,
json_rpc,
)
}),
config.chains.clone().into_iter().map(|(_chain, json_rpc_url)| {
let json_rpc = Arc::new(Provider::<Http>::try_from(json_rpc_url).unwrap());

Self::ingest(conn.clone(), &contracts, config.blocks_per_batch, json_rpc)
}),
)
.await
.unwrap();
Expand Down Expand Up @@ -148,9 +138,7 @@ impl EventsIngester {

let conn = Arc::new(Mutex::new(conn));
join_all(contract_addresses.iter().map(|contract_address| {
let filters = filters_by_contract_address_id
.get(&contract_address.id)
.unwrap();
let filters = filters_by_contract_address_id.get(&contract_address.id).unwrap();

let conn = conn.clone();
async move {
Expand Down Expand Up @@ -240,9 +228,8 @@ impl Filters {
contract_addresses
.iter()
.map(|contract_address| {
let topics_by_contract_name = topics_by_contract_name
.get(contract_address.contract_name.as_str())
.unwrap();
let topics_by_contract_name =
topics_by_contract_name.get(contract_address.contract_name.as_str()).unwrap();

Filter::new(
contract_address,
Expand Down
10 changes: 3 additions & 7 deletions chaindexing/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
mod chains;
mod config;
mod contracts;
mod contract_states;
mod contracts;
mod diesel;
mod event_handlers;
mod events;
Expand Down Expand Up @@ -54,12 +54,8 @@ impl Chaindexing {
conn: &mut ChaindexingRepoConn<'a>,
contracts: &Vec<Contract<State>>,
) {
let contract_addresses = contracts
.clone()
.into_iter()
.map(|c| c.addresses)
.flatten()
.collect();
let contract_addresses =
contracts.clone().into_iter().map(|c| c.addresses).flatten().collect();

ChaindexingRepo::create_contract_addresses(conn, &contract_addresses).await;
}
Expand Down
6 changes: 1 addition & 5 deletions chaindexing/src/repos/postgres_repo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,7 @@ impl Repo for PostgresRepo {
async fn get_pool(&self, max_size: u32) -> Pool {
let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new(&self.url);

bb8::Pool::builder()
.max_size(max_size)
.build(manager)
.await
.unwrap()
bb8::Pool::builder().max_size(max_size).build(manager).await.unwrap()
}

async fn get_conn<'a>(pool: &'a Pool) -> Conn<'a> {
Expand Down
1 change: 1 addition & 0 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
chain_width = 80

0 comments on commit 66f9090

Please sign in to comment.