Skip to content

Commit

Permalink
chore(fmt): make build happy
Browse files Browse the repository at this point in the history
  • Loading branch information
armyhaylenko committed Jan 13, 2025
1 parent 38f2d98 commit 2a18411
Show file tree
Hide file tree
Showing 20 changed files with 55 additions and 58 deletions.
4 changes: 1 addition & 3 deletions backfill_rpc/src/slots_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,7 @@ fn fetch_related_signature(
collected_key: &solana_program::pubkey::Pubkey,
block_with_start_signature: Option<UiConfirmedBlock>,
) -> Option<String> {
let Some(txs) = block_with_start_signature.and_then(|block| block.transactions) else {
return None;
};
let txs = block_with_start_signature.and_then(|block| block.transactions)?;
for tx in txs {
if tx.meta.and_then(|meta| meta.err).is_some() {
continue;
Expand Down
6 changes: 3 additions & 3 deletions metrics_utils/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,10 @@ pub async fn start_metrics_server(
Ok(())
}

type HttpResponseFuture = Box<dyn Future<Output = io::Result<Response<Body>>> + Send>;

/// This function returns a HTTP handler (i.e. another function)
pub fn make_handler(
registry: Arc<Registry>,
) -> impl Fn(Request<Body>) -> Pin<Box<dyn Future<Output = io::Result<Response<Body>>> + Send>> {
pub fn make_handler(registry: Arc<Registry>) -> impl Fn(Request<Body>) -> Pin<HttpResponseFuture> {
// This closure accepts a request and responds with the OpenMetrics encoding of our metrics.
move |_req: Request<Body>| {
let reg = registry.clone();
Expand Down
1 change: 1 addition & 0 deletions nft_ingester/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ rpc_tests = []
integration_tests = []
batch_mint_tests = []
profiling = []
big_table_tests = []

[dependencies.utils]
version = "0.1.8"
Expand Down
4 changes: 2 additions & 2 deletions nft_ingester/src/api/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ impl BatchMintService {
.and_then(|ct| ct.to_str().ok())
.and_then(|ct| multer::parse_boundary(ct).ok());

return match boundary {
match boundary {
Some(boundary) => {
let mut multipart = Multipart::new(req.into_body(), boundary);
let file_name = format!("{}.json", Uuid::new_v4());
Expand Down Expand Up @@ -306,7 +306,7 @@ impl BatchMintService {
.status(StatusCode::BAD_REQUEST)
.body(Body::from("BAD REQUEST"))
.unwrap()),
};
}
}
_ => Ok(Response::builder()
.status(StatusCode::NOT_FOUND)
Expand Down
4 changes: 2 additions & 2 deletions nft_ingester/src/api/synchronization_state_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,14 +104,14 @@ impl ConsistencyChecker for SynchronizationStateConsistencyChecker {
return false;
}

return match call {
match call {
Call::MethodCall(method_call) => {
INDEX_STORAGE_DEPENDS_METHODS.contains(&method_call.method.as_str())
}
Call::Notification(notification) => {
INDEX_STORAGE_DEPENDS_METHODS.contains(&notification.method.as_str())
}
_ => false,
};
}
}
}
2 changes: 1 addition & 1 deletion nft_ingester/src/backfiller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ where
.db
.raw_iterator_cf(&slot_db.db.cf_handle(RawBlock::NAME).unwrap());
if let Some(start_slot) = start_slot {
it.seek(&RawBlock::encode_key(start_slot));
it.seek(RawBlock::encode_key(start_slot));
} else {
it.seek_to_first();
}
Expand Down
2 changes: 1 addition & 1 deletion nft_ingester/src/plerkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub struct TransactionInfo {

pub struct PlerkleAccountInfo<'a>(pub plerkle_serialization::AccountInfo<'a>);

impl<'a> TryFrom<PlerkleAccountInfo<'a>> for AccountInfo {
impl TryFrom<PlerkleAccountInfo<'_>> for AccountInfo {
type Error = PlerkleDeserializerError;

fn try_from(value: PlerkleAccountInfo) -> Result<Self, Self::Error> {
Expand Down
4 changes: 2 additions & 2 deletions nft_ingester/src/scheduler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl Scheduler {
/// It executes jobs one by one sequentially.
pub async fn run(&mut self) {
loop {
let mut sleep_to_next_run = u64::max_value();
let mut sleep_to_next_run = u64::MAX;
let mut to_remove = Vec::new();
for job in self.jobs.iter_mut() {
let mut sched = match self.storage.get_schedule(job.id()) {
Expand Down Expand Up @@ -95,7 +95,7 @@ impl Scheduler {
}
}
self.jobs.retain(|j| to_remove.contains(&j.id()));
if self.jobs.is_empty() || sleep_to_next_run == u64::max_value() {
if self.jobs.is_empty() || sleep_to_next_run == u64::MAX {
break;
}
tokio::time::sleep(Duration::from_secs(sleep_to_next_run)).await;
Expand Down
10 changes: 9 additions & 1 deletion nft_ingester/tests/batch_mint_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use std::sync::Arc;
use std::time::Duration;

use anchor_lang::prelude::*;
#[cfg(feature = "batch_mint_tests")]
use async_trait::async_trait;
use mockall::predicate;
use mpl_bubblegum::types::{Creator, LeafSchema, MetadataArgs};
Expand All @@ -19,7 +20,9 @@ use entities::models::BufferedTransaction;
use entities::models::{BatchMintToVerify, BatchMintWithState};
use flatbuffers::FlatBufferBuilder;
use interface::account_balance::MockAccountBalanceGetter;
use interface::batch_mint::{BatchMintDownloader, MockBatchMintDownloader};
#[cfg(feature = "batch_mint_tests")]
use interface::batch_mint::BatchMintDownloader;
use interface::batch_mint::MockBatchMintDownloader;
use interface::error::UsecaseError;
use metrics_utils::ApiMetricsConfig;
use metrics_utils::BatchMintPersisterMetricsConfig;
Expand Down Expand Up @@ -117,8 +120,13 @@ fn test_generate_10_000_000_batch_mint() {
serde_json::to_writer(file, &batch_mint).unwrap()
}

#[cfg(feature = "batch_mint_tests")]
const BATCH_MINT_ASSETS_TO_SAVE: usize = 1_000;

#[cfg(feature = "batch_mint_tests")]
struct TestBatchMintCreator;

#[cfg(feature = "batch_mint_tests")]
#[async_trait]
impl BatchMintDownloader for TestBatchMintCreator {
async fn download_batch_mint(
Expand Down
24 changes: 8 additions & 16 deletions postgre-client/src/asset_filter_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -450,14 +450,10 @@ impl AssetPubkeyFilteredFetcher for PgClient {
let query = query_builder.build_query_as::<AssetRawResponse>();
debug!("SEARCH QUERY: {}", &query.sql());
let start_time = chrono::Utc::now();
let result = query
.fetch_all(&self.pool)
.await
.map_err(|e: sqlx::Error| {
self.metrics
.observe_error(SQL_COMPONENT, SELECT_ACTION, "assets_v3");
e
})?;
let result = query.fetch_all(&self.pool).await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, SELECT_ACTION, "assets_v3");
})?;
self.metrics
.observe_request(SQL_COMPONENT, SELECT_ACTION, "assets_v3", start_time);
let r = result
Expand All @@ -478,14 +474,10 @@ impl AssetPubkeyFilteredFetcher for PgClient {
let mut query_builder = Self::build_grand_total_query(filter, options)?;
let query = query_builder.build();
let start_time = chrono::Utc::now();
let result = query
.fetch_one(&self.pool)
.await
.map_err(|e: sqlx::Error| {
self.metrics
.observe_error(SQL_COMPONENT, COUNT_ACTION, "assets_v3");
e
})?;
let result = query.fetch_one(&self.pool).await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, COUNT_ACTION, "assets_v3");
})?;
self.metrics
.observe_request(SQL_COMPONENT, COUNT_ACTION, "assets_v3", start_time);
let count: i64 = result.get(0);
Expand Down
6 changes: 2 additions & 4 deletions postgre-client/src/asset_index_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,9 @@ impl PgClient {
query_builder.push(format!(" WHERE id = {}", asset_type as i32));
let start_time = chrono::Utc::now();
let query = query_builder.build_query_as::<(Option<Vec<u8>>,)>();
let result = query.fetch_one(executor).await.map_err(|e| {
let result = query.fetch_one(executor).await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, SELECT_ACTION, table_name);
e
})?;
self.metrics
.observe_request(SQL_COMPONENT, SELECT_ACTION, table_name, start_time);
Expand Down Expand Up @@ -902,10 +901,9 @@ impl PgClient {
query_builder.push(");");
let query = query_builder.build_query_as::<CreatorRawResponse>();
let start_time = chrono::Utc::now();
let creators_result = query.fetch_all(transaction).await.map_err(|err| {
let creators_result = query.fetch_all(transaction).await.inspect_err(|_err| {
self.metrics
.observe_error(SQL_COMPONENT, BATCH_SELECT_ACTION, table);
err
})?;
self.metrics
.observe_request(SQL_COMPONENT, BATCH_SELECT_ACTION, table, start_time);
Expand Down
9 changes: 3 additions & 6 deletions postgre-client/src/integrity_verification_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,9 @@ impl PgClient {
query_builder.push(" FROM random");
let query = query_builder.build();
let start_time = chrono::Utc::now();
let rows = query.fetch_all(&self.pool).await.map_err(|e| {
let rows = query.fetch_all(&self.pool).await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, SELECT_ACTION, "integrity_asset_by_field");
e
})?;
self.metrics.observe_request(
SQL_COMPONENT,
Expand Down Expand Up @@ -75,10 +74,9 @@ impl PgClient {
.build()
.fetch_all(&self.pool)
.await
.map_err(|e| {
.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, SELECT_ACTION, "integrity_asset");
e
})?;
self.metrics
.observe_request(SQL_COMPONENT, SELECT_ACTION, "integrity_asset", start_time);
Expand Down Expand Up @@ -128,13 +126,12 @@ impl IntegrityVerificationKeysFetcher for PgClient {
let rows = sqlx::query(query)
.fetch_all(&self.pool)
.await
.map_err(|e| {
.inspect_err(|_e| {
self.metrics.observe_error(
SQL_COMPONENT,
SELECT_ACTION,
"integrity_asset_creators",
);
e
})?;
self.metrics.observe_request(
SQL_COMPONENT,
Expand Down
9 changes: 3 additions & 6 deletions postgre-client/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,9 @@ impl PgClient {

async fn start_transaction(&self) -> Result<Transaction<'_, Postgres>, IndexDbError> {
let start_time = chrono::Utc::now();
let transaction = self.pool.begin().await.map_err(|e| {
let transaction = self.pool.begin().await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, TRANSACTION_ACTION, "begin");
e
})?;
self.metrics
.observe_request(SQL_COMPONENT, TRANSACTION_ACTION, "begin", start_time);
Expand All @@ -181,10 +180,9 @@ impl PgClient {
transaction: Transaction<'_, Postgres>,
) -> Result<(), IndexDbError> {
let start_time = chrono::Utc::now();
transaction.commit().await.map_err(|e| {
transaction.commit().await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, TRANSACTION_ACTION, "commit");
e
})?;
self.metrics
.observe_request(SQL_COMPONENT, TRANSACTION_ACTION, "commit", start_time);
Expand All @@ -195,10 +193,9 @@ impl PgClient {
transaction: Transaction<'_, Postgres>,
) -> Result<(), IndexDbError> {
let start_time = chrono::Utc::now();
transaction.rollback().await.map_err(|e| {
transaction.rollback().await.inspect_err(|_e| {
self.metrics
.observe_error(SQL_COMPONENT, TRANSACTION_ACTION, "rollback");
e
})?;
self.metrics
.observe_request(SQL_COMPONENT, TRANSACTION_ACTION, "rollback", start_time);
Expand Down
4 changes: 2 additions & 2 deletions postgre-client/src/tasks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ impl PgClient {
pub async fn store_tasks(
&self,
tasks_buffer: Arc<Mutex<VecDeque<Task>>>,
tasks: &Vec<Task>,
tasks: &[Task],
metrics: Arc<IngesterMetricsConfig>,
) {
let mut tasks_to_insert = tasks.clone();
let mut tasks_to_insert = tasks.to_owned();

// scope crated to unlock mutex before insert_tasks func, which can be time consuming
let tasks = {
Expand Down
4 changes: 2 additions & 2 deletions rocks-db/src/clients/batch_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ impl AssetUpdateIndexStorage for Storage {
// Skip keys that are in the skip_keys set
if skip_keys
.as_ref()
.map_or(false, |sk| sk.contains(&decoded_key.pubkey))
.is_some_and(|sk| sk.contains(&decoded_key.pubkey))
{
continue;
}
Expand Down Expand Up @@ -179,7 +179,7 @@ impl AssetUpdateIndexStorage for Storage {
// Skip keys that are in the skip_keys set
if skip_keys
.as_ref()
.map_or(false, |sk| sk.contains(&decoded_key.pubkey))
.is_some_and(|sk| sk.contains(&decoded_key.pubkey))
{
continue;
}
Expand Down
2 changes: 1 addition & 1 deletion rocks-db/src/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ where
backend
.batched_multi_get_cf(
&backend.cf_handle(C::NAME).unwrap(),
&keys.into_iter().map(C::encode_key).collect::<Vec<_>>(),
keys.into_iter().map(C::encode_key).collect::<Vec<_>>(),
false,
)
.into_iter()
Expand Down
6 changes: 3 additions & 3 deletions rocks-db/src/columns/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2632,7 +2632,7 @@ macro_rules! merge_updated_primitive {
($func_name:ident, $updated_type:ident, $updated_args:ident) => {
fn $func_name<'a, T, F>(
builder: &mut flatbuffers::FlatBufferBuilder<'a>,
iter: impl Iterator<Item = T> + DoubleEndedIterator,
iter: impl DoubleEndedIterator<Item = T>,
extract_fn: F,
) -> Option<flatbuffers::WIPOffset<fb::$updated_type<'a>>>
where
Expand Down Expand Up @@ -2669,7 +2669,7 @@ macro_rules! merge_updated_offset {
($func_name:ident, $updated_type:ident, $updated_args:ident, $value_create_fn:path) => {
fn $func_name<'a, T, F>(
builder: &mut flatbuffers::FlatBufferBuilder<'a>,
iter: impl Iterator<Item = T> + DoubleEndedIterator,
iter: impl DoubleEndedIterator<Item = T>,
extract_fn: F,
) -> Option<flatbuffers::WIPOffset<fb::$updated_type<'a>>>
where
Expand Down Expand Up @@ -2750,7 +2750,7 @@ merge_updated_offset!(

fn merge_updated_creators<'a, T, F>(
builder: &mut flatbuffers::FlatBufferBuilder<'a>,
iter: impl Iterator<Item = T> + DoubleEndedIterator,
iter: impl DoubleEndedIterator<Item = T>,
extract_fn: F,
) -> Option<flatbuffers::WIPOffset<fb::UpdatedCreators<'a>>>
where
Expand Down
6 changes: 4 additions & 2 deletions rocks-db/src/generated/mod.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
#[allow(
clippy::missing_safety_doc,
unused_imports,
clippy::extra_unused_lifetimes
clippy::extra_unused_lifetimes,
clippy::needless_lifetimes
)]
pub mod asset_generated;
#[allow(
clippy::missing_safety_doc,
unused_imports,
clippy::extra_unused_lifetimes
clippy::extra_unused_lifetimes,
clippy::needless_lifetimes
)]
pub mod offchain_data_generated;
4 changes: 3 additions & 1 deletion rocks-db/src/migrator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,8 @@ struct MigrationApplier<'a> {
applied_migration_versions: HashSet<u64>,
}

type ColumnIteratorItem = (Box<[u8]>, Box<[u8]>);

impl<'a> MigrationApplier<'a> {
fn new(
db_path: &'a str,
Expand Down Expand Up @@ -301,7 +303,7 @@ impl<'a> MigrationApplier<'a> {

fn migration_column_iter<M: RocksMigration>(
db: &Arc<DB>,
) -> Result<impl Iterator<Item = (Box<[u8]>, Box<[u8]>)> + '_> {
) -> Result<impl Iterator<Item = ColumnIteratorItem> + '_> {
Ok(db
.iterator_cf(
&db.cf_handle(<<M as RocksMigration>::OldDataType as TypedColumn>::NAME)
Expand Down
2 changes: 2 additions & 0 deletions usecase/src/proofs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ impl MaybeProofChecker {

#[async_trait]
impl ProofChecker for MaybeProofChecker {
#[allow(clippy::result_large_err)]
async fn check_proof(
&self,
tree_id_pk: Pubkey,
Expand Down Expand Up @@ -67,6 +68,7 @@ impl ProofChecker for MaybeProofChecker {
}
}

#[allow(clippy::result_large_err)]
pub fn validate_proofs(
mut tree_acc_info: Vec<u8>,
initial_proofs: Vec<Pubkey>,
Expand Down

0 comments on commit 2a18411

Please sign in to comment.