Skip to content

Commit

Permalink
chore: fix test warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
armyhaylenko committed Jan 10, 2025
1 parent c4214fe commit a5ba664
Show file tree
Hide file tree
Showing 8 changed files with 74 additions and 84 deletions.
57 changes: 30 additions & 27 deletions nft_ingester/tests/api_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2758,39 +2758,42 @@ mod tests {
let cnt = 100;
let cli = Cli::default();
let (env, generated_assets) = setup::TestEnvironment::create(&cli, cnt, 100).await;
let mut collection_dynamic_details = HashMap::<Pubkey, AssetCompleteDetails>::new();
generated_assets.collections.iter().for_each(|collection| {
env.rocks_env.storage.db.put_cf(
&env.rocks_env
.storage
.db
.cf_handle(AssetCompleteDetails::NAME)
.unwrap(),
collection.collection.value,
AssetCompleteDetails {
pubkey: collection.collection.value,
dynamic_details: Some(AssetDynamicDetails {
env.rocks_env
.storage
.db
.put_cf(
&env.rocks_env
.storage
.db
.cf_handle(AssetCompleteDetails::NAME)
.unwrap(),
collection.collection.value,
AssetCompleteDetails {
pubkey: collection.collection.value,
url: Updated::new(
100,
Some(UpdateVersion::Sequence(100)),
"http://example.com".to_string(),
),
onchain_data: Some(Updated::new(
100,
Some(UpdateVersion::Sequence(100)),
"{
dynamic_details: Some(AssetDynamicDetails {
pubkey: collection.collection.value,
url: Updated::new(
100,
Some(UpdateVersion::Sequence(100)),
"http://example.com".to_string(),
),
onchain_data: Some(Updated::new(
100,
Some(UpdateVersion::Sequence(100)),
"{
\"name\": \"WIF Drop\",
\"symbol\": \"6WIF\"\
}"
.to_string(),
)),
.to_string(),
)),
..Default::default()
}),
..Default::default()
}),
..Default::default()
}
.convert_to_fb_bytes(),
);
}
.convert_to_fb_bytes(),
)
.expect("insert asset complete details");
});
let o = env.rocks_env.storage.asset_offchain_data.put_async(
"http://example.com".to_string(),
Expand Down
2 changes: 0 additions & 2 deletions nft_ingester/tests/batch_mint_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,10 @@ use solana_sdk::transaction::Transaction;
use solana_transaction_status::TransactionStatusMeta;
use solana_transaction_status::{InnerInstruction, InnerInstructions};
use spl_account_compression::ConcurrentMerkleTree;
use std::collections::VecDeque;
use tempfile::TempDir;
use testcontainers::clients::Cli;
use tokio::io::AsyncWriteExt;
use tokio::sync::broadcast;
use tokio::sync::Mutex;
use usecase::proofs::MaybeProofChecker;
use uuid::Uuid;

Expand Down
30 changes: 12 additions & 18 deletions nft_ingester/tests/bubblegum_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,12 @@ mod tests {
use nft_ingester::json_worker::JsonWorker;
use nft_ingester::raydium_price_fetcher::RaydiumTokenPriceFetcher;
use nft_ingester::{
backfiller::DirectBlockParser,
buffer::Buffer,
backfiller::DirectBlockParser, buffer::Buffer,
processors::transaction_based::bubblegum_updates_processor::BubblegumTxProcessor,
transaction_ingester::{self, BackfillTransactionIngester},
transaction_ingester,
};
use rocks_db::columns::offchain_data::OffChainData;
use rocks_db::migrator::MigrationState;
use rocks_db::SlotStorage;
use rocks_db::Storage;
use solana_program::pubkey::Pubkey;
use std::fs::File;
Expand All @@ -39,7 +37,7 @@ mod tests {
#[ignore = "FIXME: column families not opened error (probably outdated)"]
async fn test_bubblegum_proofs() {
// write slots we need to parse because backfiller dropped it during raw transactions saving
let slots_to_parse = &[
let _slots_to_parse = &[
242049108, 242049247, 242049255, 242050728, 242050746, 242143893, 242143906, 242239091,
242239108, 242248687, 242560746, 242847845, 242848373, 242853752, 242856151, 242943141,
242943774, 242947970, 242948187, 242949333, 242949940, 242951695, 242952638,
Expand Down Expand Up @@ -95,7 +93,7 @@ mod tests {
NATIVE_MINT_PUBKEY.to_string(),
);

let buffer = Arc::new(Buffer::new());
let _buffer = Arc::new(Buffer::new());

let bubblegum_updates_processor = Arc::new(BubblegumTxProcessor::new(
env.rocks_env.storage.clone(),
Expand All @@ -106,16 +104,14 @@ mod tests {
bubblegum_updates_processor.clone(),
));

let consumer = Arc::new(DirectBlockParser::new(
let _consumer = Arc::new(DirectBlockParser::new(
tx_ingester.clone(),
env.rocks_env.storage.clone(),
Arc::new(BackfillerMetricsConfig::new()),
));
let producer = rocks_storage.clone();
let _producer = rocks_storage.clone();

let (_shutdown_tx, shutdown_rx) = broadcast::channel::<()>(1);

let none: Option<Arc<Storage>> = None;
let (_shutdown_tx, _shutdown_rx) = broadcast::channel::<()>(1);

let file = File::open("./tests/artifacts/expected_proofs.json").unwrap();
let mut reader = io::BufReader::new(file);
Expand Down Expand Up @@ -153,7 +149,7 @@ mod tests {
#[ignore = "FIXME: column families not opened error (probably outdated)"]
async fn test_asset_compression_info() {
// write slots we need to parse because backfiller dropped it during raw transactions saving
let slots_to_parse = &[
let _slots_to_parse = &[
242049108, 242049247, 242049255, 242050728, 242050746, 242143893, 242143906, 242239091,
242239108, 242248687, 242560746, 242847845, 242848373, 242853752, 242856151, 242943141,
242943774, 242947970, 242948187, 242949333, 242949940, 242951695, 242952638,
Expand Down Expand Up @@ -209,7 +205,7 @@ mod tests {
NATIVE_MINT_PUBKEY.to_string(),
);

let buffer = Arc::new(Buffer::new());
let _buffer = Arc::new(Buffer::new());

let bubblegum_updates_processor = Arc::new(BubblegumTxProcessor::new(
env.rocks_env.storage.clone(),
Expand All @@ -220,16 +216,14 @@ mod tests {
bubblegum_updates_processor.clone(),
));

let consumer = Arc::new(DirectBlockParser::new(
let _consumer = Arc::new(DirectBlockParser::new(
tx_ingester.clone(),
env.rocks_env.storage.clone(),
Arc::new(BackfillerMetricsConfig::new()),
));
let producer = rocks_storage.clone();

let (_shutdown_tx, shutdown_rx) = broadcast::channel::<()>(1);
let _producer = rocks_storage.clone();

let none: Option<Arc<Storage>> = None;
let (_shutdown_tx, _shutdown_rx) = broadcast::channel::<()>(1);

let metadata = OffChainData {
url: Some("https://supersweetcollection.notarealurl/token.json".to_string()),
Expand Down
9 changes: 5 additions & 4 deletions nft_ingester/tests/clean_forks_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ use bincode::deserialize;
use blockbuster::instruction::InstructionBundle;
use blockbuster::programs::bubblegum::BubblegumInstruction;
use entities::models::{RawBlock, SignatureWithSlot};
use metrics_utils::utils::start_metrics;
use metrics_utils::{MetricState, MetricsTrait};
use metrics_utils::MetricState;
use mpl_bubblegum::types::{BubblegumEventType, LeafSchema, Version};
use mpl_bubblegum::{InstructionName, LeafSchemaEvent};
use nft_ingester::cleaners::fork_cleaner::ForkCleaner;
Expand All @@ -15,10 +14,8 @@ use rocks_db::tree_seq::TreeSeqIdx;
use setup::rocks::RocksTestEnvironment;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::Signature;
use solana_transaction_status::UiConfirmedBlock;
use spl_account_compression::events::ChangeLogEventV1;
use spl_account_compression::state::PathNode;
use std::str::FromStr;
use tokio::sync::broadcast;

#[cfg(test)]
Expand All @@ -28,7 +25,11 @@ use tokio::sync::broadcast;
async fn test_clean_forks() {
use std::collections::{HashMap, HashSet};

use metrics_utils::utils::start_metrics;
use metrics_utils::MetricsTrait;
use rocks_db::{columns::cl_items::ClItemKey, columns::leaf_signatures::LeafSignature};
use solana_transaction_status::UiConfirmedBlock;
use std::str::FromStr;

let RocksTestEnvironment {
storage,
Expand Down
16 changes: 7 additions & 9 deletions nft_ingester/tests/decompress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,14 @@ mod tests {
use nft_ingester::processors::account_based::mplx_updates_processor::MplxAccountsProcessor;
use nft_ingester::raydium_price_fetcher::RaydiumTokenPriceFetcher;
use nft_ingester::{
backfiller::DirectBlockParser,
buffer::Buffer,
backfiller::DirectBlockParser, buffer::Buffer,
processors::account_based::token_updates_processor::TokenAccountsProcessor,
processors::transaction_based::bubblegum_updates_processor::BubblegumTxProcessor,
transaction_ingester::{self, BackfillTransactionIngester},
transaction_ingester,
};
use rocks_db::batch_savers::BatchSaveStorage;
use rocks_db::columns::offchain_data::OffChainData;
use rocks_db::migrator::MigrationState;
use rocks_db::SlotStorage;
use rocks_db::Storage;
use solana_sdk::pubkey::Pubkey;
use std::fs::File;
Expand All @@ -46,10 +44,10 @@ mod tests {
async fn process_bubblegum_transactions(
mutexed_tasks: Arc<Mutex<JoinSet<core::result::Result<(), tokio::task::JoinError>>>>,
env_rocks: Arc<rocks_db::Storage>,
buffer: Arc<Buffer>,
_buffer: Arc<Buffer>,
) {
// write slots we need to parse because backfiller dropped it during raw transactions saving
let slots_to_parse = &[
let _slots_to_parse = &[
242049108, 242049247, 242049255, 242050728, 242050746, 242143893, 242143906, 242239091,
242239108, 242248687, 242560746, 242847845, 242848373, 242853752, 242856151, 242943141,
242943774, 242947970, 242948187, 242949333, 242949940, 242951695, 242952638,
Expand Down Expand Up @@ -85,14 +83,14 @@ mod tests {
bubblegum_updates_processor.clone(),
));

let consumer = Arc::new(DirectBlockParser::new(
let _consumer = Arc::new(DirectBlockParser::new(
tx_ingester.clone(),
rocks_storage.clone(),
Arc::new(BackfillerMetricsConfig::new()),
));
let producer = rocks_storage.clone();
let _producer = rocks_storage.clone();

let (_shutdown_tx, shutdown_rx) = broadcast::channel::<()>(1);
let (_shutdown_tx, _shutdown_rx) = broadcast::channel::<()>(1);
}

async fn process_accounts(
Expand Down
4 changes: 2 additions & 2 deletions nft_ingester/tests/gapfiller_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use interface::asset_streaming_and_discovery::{
AsyncError, MockAssetDetailsConsumer, MockRawBlocksConsumer,
};
use metrics_utils::red::RequestErrorDurationMetrics;
use nft_ingester::gapfiller::{process_asset_details_stream, process_raw_blocks_stream};
use nft_ingester::gapfiller::process_asset_details_stream;
use rocks_db::generated::asset_generated::asset as fb;
use rocks_db::{
column::TypedColumn, columns::asset::AssetCompleteDetails, migrator::MigrationState,
Expand Down Expand Up @@ -118,7 +118,7 @@ async fn test_process_raw_blocks_stream() {
let mut mock = MockRawBlocksConsumer::new();
mock.expect_get_raw_blocks_consumable_stream_in_range()
.returning(move |_, _| Ok(Box::pin(stream::iter(vec![Ok(block.clone())]))));
let (_, rx) = tokio::sync::broadcast::channel::<()>(1);
let (_, _rx) = tokio::sync::broadcast::channel::<()>(1);

// TODO: this method currently does nothing. uncomment once fixed

Expand Down
24 changes: 12 additions & 12 deletions nft_ingester/tests/sequence_consistent_tests.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,7 @@
#[cfg(test)]
mod tests {
use backfill_rpc::rpc::BackfillRPC;
use metrics_utils::MetricState;
use nft_ingester::backfiller::DirectBlockParser;
use nft_ingester::processors::transaction_based::bubblegum_updates_processor::BubblegumTxProcessor;
use nft_ingester::sequence_consistent::collect_sequences_gaps;
use nft_ingester::transaction_ingester::BackfillTransactionIngester;
use rocks_db::tree_seq::TreeSeqIdx;
use setup::rocks::RocksTestEnvironment;
use std::str::FromStr;
use std::sync::Arc;
use tokio::sync::broadcast;
use usecase::bigtable::BigTableClient;
#[cfg(any(feature = "integration_tests", feature = "rpc_tests"))]
use {rocks_db::tree_seq::TreeSeqIdx, setup::rocks::RocksTestEnvironment, std::str::FromStr};

#[cfg(feature = "integration_tests")]
#[tracing_test::traced_test]
Expand Down Expand Up @@ -68,6 +58,16 @@ mod tests {
#[tracing_test::traced_test]
#[tokio::test]
async fn test_fill_gap() {
use std::sync::Arc;

use backfill_rpc::rpc::BackfillRPC;
use metrics_utils::MetricState;
use nft_ingester::backfiller::DirectBlockParser;
use nft_ingester::processors::transaction_based::bubblegum_updates_processor::BubblegumTxProcessor;
use nft_ingester::sequence_consistent::collect_sequences_gaps;
use nft_ingester::transaction_ingester::BackfillTransactionIngester;
use tokio::sync::broadcast;
use usecase::bigtable::BigTableClient;
// Tests the following gap is filled: Gap found for MRKt4uPZY5ytQzxvAYEkeGAd3A8ir12khRUNfZvNb5U tree. Sequences: [39739, 39742], slots: [305441204, 305441218]
// slot 305441204 also contains seq 39738, which will be in the result set as well

Expand Down
16 changes: 6 additions & 10 deletions rocks-db/src/columns/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4037,8 +4037,6 @@ mod tests {

// Generate all permutations of the updates
let permutations = updates.iter().permutations(updates.len());
let mut expected_result: Option<Vec<u8>> = None;

let merge_result = merge_complete_details_fb_simple_raw(
&[],
Some(&original_data_bytes.as_slice()),
Expand All @@ -4051,7 +4049,7 @@ mod tests {
.into_iter(), //perm.into_iter().map(|d| *d),
)
.expect("expected merge to return some value");
expected_result = Some(merge_result);
let expected_result = merge_result;

for perm in permutations {
let merge_result = merge_complete_details_fb_simple_raw(
Expand Down Expand Up @@ -4101,13 +4099,11 @@ mod tests {
.value(),
false
);
if let Some(expected) = &expected_result {
assert_eq!(
&merge_result, expected,
"Merge result differs for one permutation {}",
perm_name,
);
}
assert_eq!(
merge_result, expected_result,
"Merge result differs for one permutation {}",
perm_name,
);
}
}
}

0 comments on commit a5ba664

Please sign in to comment.