From 272424c4edb29b3a4ff86262e2938b3809c8b12e Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Thu, 20 Feb 2025 12:44:00 +0200 Subject: [PATCH 1/5] initial commit --- core/lib/basic_types/src/prover_dal.rs | 7 ++--- .../bin/witness_generator/src/artifacts.rs | 3 +++ .../src/rounds/leaf_aggregation/artifacts.rs | 1 + ...0250220091717_add-chain_id-column.down.sql | 27 +++++++++++++++++++ .../20250220091717_add-chain_id-column.up.sql | 27 +++++++++++++++++++ .../crates/lib/prover_fri_types/src/keys.rs | 10 ++++--- 6 files changed, 69 insertions(+), 6 deletions(-) create mode 100644 prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.down.sql create mode 100644 prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.up.sql diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index d2af75fe2ff5..87adf51132b2 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,13 +5,12 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use serde::{Deserialize, Serialize}; use strum::{Display, EnumString}; -use crate::{ - basic_fri_types::AggregationRound, protocol_version::ProtocolVersionId, L1BatchNumber, -}; +use crate::{basic_fri_types::AggregationRound, protocol_version::ProtocolVersionId, L1BatchNumber, L2ChainId}; #[derive(Debug, Clone, Copy)] pub struct FriProverJobMetadata { pub id: u32, + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_id: u8, pub aggregation_round: AggregationRound, @@ -106,6 +105,7 @@ impl From for SocketAddress { #[derive(Debug, Clone)] pub struct LeafAggregationJobMetadata { pub id: u32, + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_id: u8, pub prover_job_ids_for_proofs: Vec, @@ -114,6 +114,7 @@ pub struct LeafAggregationJobMetadata { #[derive(Debug, Clone)] pub struct NodeAggregationJobMetadata { pub id: u32, + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_id: u8, pub depth: u16, diff --git a/prover/crates/bin/witness_generator/src/artifacts.rs b/prover/crates/bin/witness_generator/src/artifacts.rs index 0c6044692ddb..22f69e2af0cd 100644 --- a/prover/crates/bin/witness_generator/src/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/artifacts.rs @@ -3,6 +3,7 @@ use std::{sync::Arc, time::Instant}; use async_trait::async_trait; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover}; +use zksync_types::L2ChainId; #[derive(Debug)] pub struct AggregationBlobUrls { @@ -23,6 +24,7 @@ pub trait ArtifactsManager { ) -> anyhow::Result; async fn save_to_bucket( + chain_id: L2ChainId, job_id: u32, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, @@ -33,6 +35,7 @@ pub trait ArtifactsManager { async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + chain_id: L2ChainId, started_at: Instant, blob_urls: Self::BlobUrls, artifacts: Self::OutputArtifacts, diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs index 796fdb9a2fa1..e16dfe6a17cd 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs @@ -26,6 +26,7 @@ impl ArtifactsManager for LeafAggregation { object_store: &dyn ObjectStore, ) -> anyhow::Result { let key = ClosedFormInputKey { + chain_id: metadata.chain_id, block_number: metadata.block_number, circuit_id: metadata.circuit_id, }; diff --git a/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.down.sql b/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.down.sql new file mode 100644 index 000000000000..6c9bfb4830fd --- /dev/null +++ b/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.down.sql @@ -0,0 +1,27 @@ +ALTER TABLE witness_inputs_fri DROP CONSTRAINT IF EXISTS witness_inputs_fri_pkey; +ALTER TABLE recursion_tip_witness_jobs_fri DROP CONSTRAINT IF EXISTS recursion_tip_witness_jobs_fri_pkey; +ALTER TABLE scheduler_witness_jobs_fri DROP CONSTRAINT IF EXISTS scheduler_witness_jobs_fri_pkey; +ALTER TABLE proof_compression_jobs_fri DROP CONSTRAINT IF EXISTS proof_compression_jobs_fri_pkey; + +ALTER TABLE witness_inputs_fri ADD CONSTRAINT witness_inputs_fri_pkey PRIMARY KEY (l1_batch_number); +ALTER TABLE recursion_tip_witness_jobs_fri ADD CONSTRAINT recursion_tip_witness_jobs_fri_pkey PRIMARY KEY (l1_batch_number); +ALTER TABLE scheduler_witness_jobs_fri ADD CONSTRAINT scheduler_witness_jobs_fri_pkey PRIMARY KEY (l1_batch_number); +ALTER TABLE proof_compression_jobs_fri ADD CONSTRAINT proof_compression_jobs_fri_pkey PRIMARY KEY (l1_batch_number); + +DROP INDEX IF EXISTS leaf_aggregation_witness_jobs_fri_composite_index; +CREATE UNIQUE INDEX leaf_aggregation_witness_jobs_fri_composite_index ON leaf_aggregation_witness_jobs_fri (l1_batch_number, circuit_id); + +DROP INDEX IF EXISTS node_aggregation_witness_jobs_fri_composite_index; +CREATE UNIQUE INDEX node_aggregation_witness_jobs_fri_composite_index ON node_aggregation_witness_jobs_fri (l1_batch_number, circuit_id, depth); + +DROP INDEX IF EXISTS prover_jobs_fri_composite_index; +CREATE UNIQUE INDEX prover_jobs_fri_composite_index ON prover_jobs_fri (l1_batch_number, aggregation_round, circuit_id, depth, sequence_number); + +ALTER TABLE witness_inputs_fri DROP COLUMN chain_id; +ALTER TABLE leaf_aggregation_witness_jobs_fri DROP COLUMN chain_id; +ALTER TABLE node_aggregation_witness_jobs_fri DROP COLUMN chain_id; +ALTER TABLE recursion_tip_witness_jobs_fri DROP COLUMN chain_id; +ALTER TABLE scheduler_witness_jobs_fri DROP COLUMN chain_id; +ALTER TABLE proof_compression_jobs_fri DROP COLUMN chain_id; +ALTER TABLE prover_jobs_fri DROP COLUMN chain_id; +ALTER TABLE prover_jobs_fri_archive DROP COLUMN chain_id; diff --git a/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.up.sql b/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.up.sql new file mode 100644 index 000000000000..49f0560a02a3 --- /dev/null +++ b/prover/crates/lib/prover_dal/migrations/20250220091717_add-chain_id-column.up.sql @@ -0,0 +1,27 @@ +ALTER TABLE witness_inputs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE leaf_aggregation_witness_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE node_aggregation_witness_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE recursion_tip_witness_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE scheduler_witness_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE proof_compression_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE prover_jobs_fri ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; +ALTER TABLE prover_jobs_fri_archive ADD COLUMN chain_id INTEGER NOT NULL DEFAULT 0; + +ALTER TABLE witness_inputs_fri DROP CONSTRAINT IF EXISTS witness_inputs_fri_pkey; +ALTER TABLE recursion_tip_witness_jobs_fri DROP CONSTRAINT IF EXISTS recursion_tip_witness_jobs_fri_pkey; +ALTER TABLE scheduler_witness_jobs_fri DROP CONSTRAINT IF EXISTS scheduler_witness_jobs_fri_pkey; +ALTER TABLE proof_compression_jobs_fri DROP CONSTRAINT IF EXISTS proof_compression_jobs_fri_pkey; + +ALTER TABLE witness_inputs_fri ADD CONSTRAINT witness_inputs_fri_pkey PRIMARY KEY (l1_batch_number, chain_id); +ALTER TABLE recursion_tip_witness_jobs_fri ADD CONSTRAINT recursion_tip_witness_jobs_fri_pkey PRIMARY KEY (l1_batch_number, chain_id); +ALTER TABLE scheduler_witness_jobs_fri ADD CONSTRAINT scheduler_witness_jobs_fri_pkey PRIMARY KEY (l1_batch_number, chain_id); +ALTER TABLE proof_compression_jobs_fri ADD CONSTRAINT proof_compression_jobs_fri_pkey PRIMARY KEY (l1_batch_number, chain_id); + +DROP INDEX IF EXISTS leaf_aggregation_witness_jobs_fri_composite_index; +CREATE UNIQUE INDEX leaf_aggregation_witness_jobs_fri_composite_index ON leaf_aggregation_witness_jobs_fri (l1_batch_number, chain_id, circuit_id); + +DROP INDEX IF EXISTS node_aggregation_witness_jobs_fri_composite_index; +CREATE UNIQUE INDEX node_aggregation_witness_jobs_fri_composite_index ON node_aggregation_witness_jobs_fri (l1_batch_number, chain_id, circuit_id, depth); + +DROP INDEX IF EXISTS prover_jobs_fri_composite_index; +CREATE UNIQUE INDEX prover_jobs_fri_composite_index ON prover_jobs_fri (l1_batch_number, chain_id, aggregation_round, circuit_id, depth, sequence_number); diff --git a/prover/crates/lib/prover_fri_types/src/keys.rs b/prover/crates/lib/prover_fri_types/src/keys.rs index 26aa679b4a94..5368227247e7 100644 --- a/prover/crates/lib/prover_fri_types/src/keys.rs +++ b/prover/crates/lib/prover_fri_types/src/keys.rs @@ -1,12 +1,11 @@ //! Different key types for object store. -use zksync_types::{ - basic_fri_types::AggregationRound, prover_dal::FriProverJobMetadata, L1BatchNumber, -}; +use zksync_types::{basic_fri_types::AggregationRound, prover_dal::FriProverJobMetadata, L1BatchNumber, L2ChainId}; /// Storage key for a [AggregationWrapper`]. #[derive(Debug, Clone, Copy)] pub struct AggregationsKey { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_id: u8, pub depth: u16, @@ -15,6 +14,7 @@ pub struct AggregationsKey { /// Storage key for a [ClosedFormInputWrapper`]. #[derive(Debug, Clone, Copy)] pub struct ClosedFormInputKey { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_id: u8, } @@ -22,6 +22,7 @@ pub struct ClosedFormInputKey { /// Storage key for a [`CircuitWrapper`]. #[derive(Debug, Clone, Copy)] pub struct FriCircuitKey { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub sequence_number: usize, pub circuit_id: u8, @@ -32,6 +33,7 @@ pub struct FriCircuitKey { impl From for FriCircuitKey { fn from(prover_job_metadata: FriProverJobMetadata) -> Self { FriCircuitKey { + chain_id: prover_job_metadata.chain_id, block_number: prover_job_metadata.block_number, sequence_number: prover_job_metadata.sequence_number, circuit_id: prover_job_metadata.circuit_id, @@ -44,6 +46,7 @@ impl From for FriCircuitKey { /// Storage key for a [`ZkSyncCircuit`]. #[derive(Debug, Clone, Copy)] pub struct CircuitKey<'a> { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub sequence_number: usize, pub circuit_type: &'a str, @@ -53,6 +56,7 @@ pub struct CircuitKey<'a> { /// Storage key for a [`RamPermutationQueueWitness`]. #[derive(Debug, Clone, Copy)] pub struct RamPermutationQueueWitnessKey { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub circuit_subsequence_number: usize, pub is_sorted: bool, From 7cd37175a88bb467fab377b89082e3cc5e9468c8 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Thu, 20 Feb 2025 12:45:52 +0200 Subject: [PATCH 2/5] fmt --- core/lib/basic_types/src/prover_dal.rs | 5 ++++- prover/crates/lib/prover_fri_types/src/keys.rs | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 87adf51132b2..3a7253e9788d 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -5,7 +5,10 @@ use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use serde::{Deserialize, Serialize}; use strum::{Display, EnumString}; -use crate::{basic_fri_types::AggregationRound, protocol_version::ProtocolVersionId, L1BatchNumber, L2ChainId}; +use crate::{ + basic_fri_types::AggregationRound, protocol_version::ProtocolVersionId, L1BatchNumber, + L2ChainId, +}; #[derive(Debug, Clone, Copy)] pub struct FriProverJobMetadata { diff --git a/prover/crates/lib/prover_fri_types/src/keys.rs b/prover/crates/lib/prover_fri_types/src/keys.rs index 5368227247e7..0e81a6a158bc 100644 --- a/prover/crates/lib/prover_fri_types/src/keys.rs +++ b/prover/crates/lib/prover_fri_types/src/keys.rs @@ -1,6 +1,8 @@ //! Different key types for object store. -use zksync_types::{basic_fri_types::AggregationRound, prover_dal::FriProverJobMetadata, L1BatchNumber, L2ChainId}; +use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::FriProverJobMetadata, L1BatchNumber, L2ChainId, +}; /// Storage key for a [AggregationWrapper`]. #[derive(Debug, Clone, Copy)] From 6f8870eeaface421e1e3acc252dddc8b90e64584 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Fri, 21 Feb 2025 15:16:41 +0200 Subject: [PATCH 3/5] update queries --- core/lib/basic_types/src/prover_dal.rs | 18 ++- .../bin/witness_generator/src/artifacts.rs | 2 +- .../src/rounds/basic_circuits/artifacts.rs | 24 ++-- .../src/rounds/basic_circuits/mod.rs | 24 ++-- .../src/rounds/basic_circuits/utils.rs | 3 +- .../src/rounds/leaf_aggregation/artifacts.rs | 30 +++-- .../src/rounds/leaf_aggregation/mod.rs | 12 +- .../bin/witness_generator/src/rounds/mod.rs | 3 +- .../src/rounds/node_aggregation/artifacts.rs | 2 +- .../src/rounds/recursion_tip/artifacts.rs | 2 +- .../src/rounds/scheduler/artifacts.rs | 2 +- .../crates/bin/witness_generator/src/utils.rs | 14 ++- ...e00ecf56aa352e618a523d81170c829340fa6.json | 6 + ...57b2e96ecdf74c849252da6d2ba8b573e07f.json} | 10 +- ...c43ffc5f519d35c90c60f89579f0689906df5.json | 18 --- ...3f8256315c8701c5378df6bc76afc5a60431.json} | 13 +- ...a1a753fc47d64ec3bd2f6606ad207405c1ae.json} | 10 +- ...6cd61c9209a36933cd530974972302a33635.json} | 19 ++- ...60bdee99924fce74bbdbcaeda017729549e4.json} | 13 +- ...caff4e47e3fff23812922e62e0aeee2a0a9cf.json | 20 ++++ ...4ed451c5e42603d8a8d4a74a059599f1d8be.json} | 17 ++- ...d15d2d1cde9053851ccd87fa5267d2b54294.json} | 7 +- ...02caedb6cdf6d68d7199b1bc55aadf30558d9.json | 18 +++ ...7626ec21660eb177f569dbb7ff85cd944f36.json} | 7 +- ...4754cfbb41efe371f601b192a5eca327b91a6.json | 59 +++++++++ ...f78ae73d2076852e08fb04cf6373d8fc2cda.json} | 16 ++- ...40aecb253284888866a6ad21fc81578ac7bf.json} | 7 +- ...7884856b76edc0ac5c7d6f059f9cfe05cb630.json | 32 +++++ ...4ede0843e79e52dd933021d4fb3a22619bf2.json} | 20 ++-- ...1e32bbf46ffd5cbb9b5c6ba3a26bedd15e719.json | 26 ---- ...df2d455ebe07ed55c88778c0196919af0764.json} | 5 +- ...26de1f70be7b3a22c8ce691fa5c9e68aec21.json} | 23 ++-- ...71ac47ba4a5b435bc811d9009013d25a34c4.json} | 17 ++- ...ed53bcf607a3bbbad01f1c8b9b2a02bf2c7a.json} | 20 ++-- ...eb98b9f3498e8da9ac4917201d921540eab1f.json | 32 +++++ ...8b87ead36f593488437c6f67da629ca81e4fa.json | 14 --- ...19f63c54585e7b753f9775ba3b3a11a02f4f.json} | 13 +- ...d180f2aefdb9f491e13de36716f24c5a9841c.json | 53 -------- ...d82aa860dab855c80ca4a2ffd5a8dfd93e34.json} | 13 +- ...7271bdd7d2fb05dd622b838733429561b49e1.json | 18 +++ ...e40978966fb2ad8b342907580dd17c0a52779.json | 17 --- ...1106029810403effe9f50c089745810983ac.json} | 7 +- ...a7c24415205aaba4c555118e4a4600024f3b.json} | 10 +- ...968a33e295abca7e0cab303f7111f2051535.json} | 7 +- ...aea08606f3ebd73aaf280ad014789fcc21d62.json | 19 --- ...f438ce6713b077fcc27befe3788c348cf47a8.json | 28 +++++ ...536b9704e8a76de95811cb23e3aa9f2512ade.json | 20 ---- ...326383ac71c031cf2d0a9752595761f5baa7f.json | 17 --- ...23076f287ae98c0df38a6b4092bfb73803566.json | 15 +++ ...96b2adf94a24834deef6e211f94453733b01.json} | 7 +- ...faabc7a6fa24f28bbeda0a3ff0f56c5ede88.json} | 7 +- ...c03167ecf857a3156376a96e7191f6a7317d.json} | 18 ++- ...5457269ca22767d7b44cbc213c2adf86c991c.json | 38 ++++++ ...d9f54db9bf83127458d632f0cad44b075f8bb.json | 19 +++ ...d78cf0e31ba31d9f05658a903edb99317297.json} | 13 +- ...2c0214469edff6fc74965f1ec1366f8b46b8e.json | 20 ---- ...43d01cff938d3aed44c9bb1d864bfdf69e39a.json | 22 ---- ...5e61abe5a1c38736dbfb8107d59c9c00d1ab8.json | 21 ++++ ...ebb2fcf32e99c140c8b929d98437272b8967.json} | 7 +- ...a3a4efb59242fe8bd9856bc5e374a6ea5713.json} | 7 +- ...55b04b640b65f90df56c67acb669552d1f9d7.json | 47 ++++++++ ...83680276a82b0dd6fd4608ebf06b2c3c5628.json} | 17 ++- ...01426d60c56b526af5ed2e0c602f881c3bbf0.json | 22 ---- ...564b62decfefc5a15de6e9145e43b5a0c1edc.json | 23 ++++ ...89daacb88fe5aaf368c5f81a885821522b99c.json | 41 ------- ...ad31283a47b013262ab4b6e00e58e247de00d.json | 32 +++++ ...369393e0b1744099a92ffee5a02ec4549c81.json} | 16 ++- ...ff2fa2c04525595a8adf5acd4da5ea4d5776.json} | 13 +- ...45a40a0e69c1b1065e05d41d180616ffb6a8a.json | 40 +++++++ ...6b137d7f48a8278a5a5c5568a0ca22df97c0.json} | 7 +- ...065dd5ab428bf30708f6f5ebd83a85a45dfcc.json | 15 +++ ...77313681560f538c6524c54648d6308533744.json | 32 ----- ...7f57a667a685b7331f0355cadb204674d303.json} | 10 +- ...eebc897025512da823469c2feabde43d1c53.json} | 7 +- ...c746fab1aea15f23f8e5e4971ae67944de29.json} | 5 +- ...b9314a2ce0180c20126ad22ddaa20c7c32c2c.json | 34 ------ ...7d3114d651a561bc702cec11afb8515fa6c4.json} | 7 +- ...599bf65cbf2fd7c27a2dcad43e41a0f43cba0.json | 26 ---- ...09f39d096db7e00fd2be9bb1cd64a392d38a3.json | 6 + ...1623b2141aef23292ca75a998b3a306a0eff.json} | 16 ++- ...71e45a108407ee55900557c91691c5f95033c.json | 28 +++++ ...3aaee2b3708f40799a7aeab8e143b0b534c4f.json | 17 --- ...356c541d65e0c84a4d82f86bbf000c21c9e8.json} | 9 +- ...0e284b5d2c0c50f2e4f0f2fc692bd715e040.json} | 13 +- ...4be22aa5d51b753e0fb261ad8fe612f60981.json} | 20 ++-- ...55be95aea85fd8096a06665ba9785257296b.json} | 16 ++- ...93a4eb2ee0284aa89bca1ba958f470a2d6254.json | 14 --- ...45592b6b8b57a323ceea26f515a4fad02684.json} | 7 +- ...fb9c8674344921b98b8b26e4d85994b3d72af.json | 22 ---- ...a030401f899503efb07f139d09fc36a24e86f.json | 18 +++ ...13e9c1aebb64b2985233e00e0f318d1f4aede.json | 26 ++++ ...03bd3db402367bf6b5a0b6049e7b5839daf1.json} | 20 ++-- ...8b41eff6b683e4e9c70216076c2434218ade.json} | 7 +- ...d52a495f9015d135dde998a91578a20a793e.json} | 18 ++- ...912933a28b1222b272801a8f83254323af33.json} | 5 +- .../src/fri_proof_compressor_dal.rs | 84 +++++++++---- .../lib/prover_dal/src/fri_prover_dal.rs | 113 +++++++++++++----- .../src/fri_witness_generator_dal/basic.rs | 50 +++++--- .../src/fri_witness_generator_dal/leaf.rs | 48 +++++--- .../src/fri_witness_generator_dal/mod.rs | 35 ++++-- .../src/fri_witness_generator_dal/node.rs | 51 +++++--- .../recursion_tip.rs | 49 +++++--- .../fri_witness_generator_dal/scheduler.rs | 60 ++++++---- prover/crates/lib/prover_fri_types/src/lib.rs | 28 +++-- 104 files changed, 1333 insertions(+), 822 deletions(-) rename prover/crates/lib/prover_dal/.sqlx/{query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json => query-0ab4466aff0db31ef4f2d516b8cf57b2e96ecdf74c849252da6d2ba8b573e07f.json} (51%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-1080b95755b3047747a9fabc7c7c43ffc5f519d35c90c60f89579f0689906df5.json rename prover/crates/lib/prover_dal/.sqlx/{query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json => query-126df7c8433ac85a618726bd78f43f8256315c8701c5378df6bc76afc5a60431.json} (84%) rename prover/crates/lib/prover_dal/.sqlx/{query-77b893afe090161b6730843c1e1b2c74a6e4e1deca98005908d6660355e6b32d.json => query-13555e987d680e3d5efb91808974a1a753fc47d64ec3bd2f6606ad207405c1ae.json} (79%) rename prover/crates/lib/prover_dal/.sqlx/{query-a4ea8082b242abe65ebec11ec5799264e05a9c849b0f0122a54c87793c279786.json => query-13f2ca492e4da965a72d8db3ddd46cd61c9209a36933cd530974972302a33635.json} (64%) rename prover/crates/lib/prover_dal/.sqlx/{query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json => query-18c0e2ec79bb15ad51a3fda973f360bdee99924fce74bbdbcaeda017729549e4.json} (85%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-199b98ebc224abb97495f092168caff4e47e3fff23812922e62e0aeee2a0a9cf.json rename prover/crates/lib/prover_dal/.sqlx/{query-5dbc0878c04098fecec7f3188838eba7fd36afc4442bcd6ad5a537f982990d45.json => query-1b7b63bf909a50abbdea1f74e5534ed451c5e42603d8a8d4a74a059599f1d8be.json} (61%) rename prover/crates/lib/prover_dal/.sqlx/{query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json => query-1df8dc2e9f03c979552c6faa34c3d15d2d1cde9053851ccd87fa5267d2b54294.json} (67%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-1e651681a0286f81d6d1b1b7da002caedb6cdf6d68d7199b1bc55aadf30558d9.json rename prover/crates/lib/prover_dal/.sqlx/{query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json => query-205a2cf51706af934f5a85b4074a7626ec21660eb177f569dbb7ff85cd944f36.json} (64%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-21bdb15e0547dd290a3706b8bbc4754cfbb41efe371f601b192a5eca327b91a6.json rename prover/crates/lib/prover_dal/.sqlx/{query-68d3a8da3df92d9449b18e60f4adc0c370c90a2720c207bb8ccc664ef76191ec.json => query-225a5e09fed79546f7c3ad5e526ff78ae73d2076852e08fb04cf6373d8fc2cda.json} (78%) rename prover/crates/lib/prover_dal/.sqlx/{query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json => query-23993a95d51cc49ce02523cac10a40aecb253284888866a6ad21fc81578ac7bf.json} (63%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-274a6a2c184ab96a5f223e9d3327884856b76edc0ac5c7d6f059f9cfe05cb630.json rename prover/crates/lib/prover_dal/.sqlx/{query-ec6615c2780582eb77a10a2f5bbf875b993980e988524534ccc00fe37c0f729d.json => query-290056d4cc494a6b90bcf7c9ad4f4ede0843e79e52dd933021d4fb3a22619bf2.json} (78%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-2e4ffccfa0d458323e8be70d1621e32bbf46ffd5cbb9b5c6ba3a26bedd15e719.json rename prover/crates/lib/prover_dal/.sqlx/{query-614ebacf57b8db848b669e90a42b5ff6f230c9cf323a684a4498d720698f70a5.json => query-2eeb33037a3c17a8a4e002274a99df2d455ebe07ed55c88778c0196919af0764.json} (66%) rename prover/crates/lib/prover_dal/.sqlx/{query-b9aaf5fe4d0261f7b05c8601a96b3027b4c2ce405d3bcc9821440408a394d7f5.json => query-310e95f1848ad42df67aa62ca2e126de1f70be7b3a22c8ce691fa5c9e68aec21.json} (65%) rename prover/crates/lib/prover_dal/.sqlx/{query-984f8d4f24f3493e1e3bda1fe9270224164d1d136d339d736776770e91731a2d.json => query-32dbb82468b949f4c0edc8ae0b3f71ac47ba4a5b435bc811d9009013d25a34c4.json} (65%) rename prover/crates/lib/prover_dal/.sqlx/{query-af9b41e0bd8cd9f7871fd4877a0865b2a69e8ded0425d23e16a0ed00627d5a50.json => query-3be009f4e1feafc5638189d4a0aced53bcf607a3bbbad01f1c8b9b2a02bf2c7a.json} (79%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-3e99760dac3d2eeae0e02a84c94eb98b9f3498e8da9ac4917201d921540eab1f.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json rename prover/crates/lib/prover_dal/.sqlx/{query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json => query-3ee4f6b6fbf2d488298b065a92cc19f63c54585e7b753f9775ba3b3a11a02f4f.json} (84%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-40f8baee895cfd7c2c455d9fb4bd180f2aefdb9f491e13de36716f24c5a9841c.json rename prover/crates/lib/prover_dal/.sqlx/{query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json => query-42e254e5f9a10b097cb62c22dbe8d82aa860dab855c80ca4a2ffd5a8dfd93e34.json} (87%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-461ac149f80918da708217353c47271bdd7d2fb05dd622b838733429561b49e1.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-48b57a279bfff34d44d1f5a6501e40978966fb2ad8b342907580dd17c0a52779.json rename prover/crates/lib/prover_dal/.sqlx/{query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json => query-53556f2bc913affcd23a1a20b4081106029810403effe9f50c089745810983ac.json} (63%) rename prover/crates/lib/prover_dal/.sqlx/{query-b6aff8e1c6130e59dc35de817d9f37d4d343e8ba8de0a5e098caf7298ef49ec1.json => query-537e048773806bab2a8147195621a7c24415205aaba4c555118e4a4600024f3b.json} (83%) rename prover/crates/lib/prover_dal/.sqlx/{query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json => query-5487ba4c3230a4a55f335341c2be968a33e295abca7e0cab303f7111f2051535.json} (73%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-5743f03acecf1a0ab106ed0aec8aea08606f3ebd73aaf280ad014789fcc21d62.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-5ecaba6831e399a895ee0389b4df438ce6713b077fcc27befe3788c348cf47a8.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-5f18efe2fb3a16cdf3c23379f36536b9704e8a76de95811cb23e3aa9f2512ade.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-5fb1d2533749420889f9cf3b5ec326383ac71c031cf2d0a9752595761f5baa7f.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-6458bd8bbc33e3ea7026c3e465623076f287ae98c0df38a6b4092bfb73803566.json rename prover/crates/lib/prover_dal/.sqlx/{query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json => query-67fbf2ef2f642d16ad0b30bb72f796b2adf94a24834deef6e211f94453733b01.json} (71%) rename prover/crates/lib/prover_dal/.sqlx/{query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json => query-69995ea9aa62d8a40790544bb4ecfaabc7a6fa24f28bbeda0a3ff0f56c5ede88.json} (67%) rename prover/crates/lib/prover_dal/.sqlx/{query-1f4179bf130e570d1c2dd8349bfc63c68aacff76d4484ca91636b03c2d37feb4.json => query-6b0d05bfe7159e139d781aa91bdcc03167ecf857a3156376a96e7191f6a7317d.json} (78%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-6fa101609da545860501ac32a5b5457269ca22767d7b44cbc213c2adf86c991c.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-70910da49648c5b713f1b719c71d9f54db9bf83127458d632f0cad44b075f8bb.json rename prover/crates/lib/prover_dal/.sqlx/{query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json => query-70cf63542465ca962c87e0050dc7d78cf0e31ba31d9f05658a903edb99317297.json} (85%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-7238cfe04ba59967fe5589665ad2c0214469edff6fc74965f1ec1366f8b46b8e.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-749d18c0fdae16ce0ed4e3c23e543d01cff938d3aed44c9bb1d864bfdf69e39a.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-7b844346d8089335d22640a5fa25e61abe5a1c38736dbfb8107d59c9c00d1ab8.json rename prover/crates/lib/prover_dal/.sqlx/{query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json => query-7c2f89d77f85ecfffe90ffdb74d3ebb2fcf32e99c140c8b929d98437272b8967.json} (75%) rename prover/crates/lib/prover_dal/.sqlx/{query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json => query-821f308ebb20c978ce4cb210e8f8a3a4efb59242fe8bd9856bc5e374a6ea5713.json} (64%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-850b7c3a275800f3332e4c7bc4a55b04b640b65f90df56c67acb669552d1f9d7.json rename prover/crates/lib/prover_dal/.sqlx/{query-edd8ebb555f2e4f3ad95421e1770a6058790361f2ddb57e263582378d956d00d.json => query-8b4943d619bcac5cdbd5cfaa5eab83680276a82b0dd6fd4608ebf06b2c3c5628.json} (61%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-8c5aba6ce584c1671f2d65fb47701426d60c56b526af5ed2e0c602f881c3bbf0.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-8ca1700b100ab1bcac58f3b0118564b62decfefc5a15de6e9145e43b5a0c1edc.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-9a0b335e830ac2bc6039e7be656ad31283a47b013262ab4b6e00e58e247de00d.json rename prover/crates/lib/prover_dal/.sqlx/{query-aca775bc700169e31fee6eb7c4869b6fb812f6d43a91fb8ac68c9fedb17d1a50.json => query-9d9d6edbbef1c4eadc7a6abcf2a7369393e0b1744099a92ffee5a02ec4549c81.json} (81%) rename prover/crates/lib/prover_dal/.sqlx/{query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json => query-9da8ce97366d62ae7381a2b4eb98ff2fa2c04525595a8adf5acd4da5ea4d5776.json} (86%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json rename prover/crates/lib/prover_dal/.sqlx/{query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json => query-a388d34c454c4b7645c6e5bf07026b137d7f48a8278a5a5c5568a0ca22df97c0.json} (62%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-a5ee1c40af6ce4fc9962757b771065dd5ab428bf30708f6f5ebd83a85a45dfcc.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-a6eb7a1f1aa2f6f5d90fbe3b8c777313681560f538c6524c54648d6308533744.json rename prover/crates/lib/prover_dal/.sqlx/{query-4f7e7de116b1f1b93cb2300c7ec4abe94d783d252c497d848ba2da3b798add19.json => query-a98cdf6d25e6deb679cad15807c87f57a667a685b7331f0355cadb204674d303.json} (66%) rename prover/crates/lib/prover_dal/.sqlx/{query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json => query-ac1c23dc72b6a6490ecb03a0da87eebc897025512da823469c2feabde43d1c53.json} (73%) rename prover/crates/lib/prover_dal/.sqlx/{query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json => query-accb1c0e9f765d4d63eb84dc1058c746fab1aea15f23f8e5e4971ae67944de29.json} (77%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-c6d02dc9cb9908a57c79729c759b9314a2ce0180c20126ad22ddaa20c7c32c2c.json rename prover/crates/lib/prover_dal/.sqlx/{query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json => query-c73801e578ad07a7c848f9f05d6c7d3114d651a561bc702cec11afb8515fa6c4.json} (67%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-cb453f0677b92539747e175e796599bf65cbf2fd7c27a2dcad43e41a0f43cba0.json rename prover/crates/lib/prover_dal/.sqlx/{query-33f99b7ae36cea4676163f99a39980377c082766efd98569904632504db05095.json => query-d42583c5e2921bf43dbbdbf0a93d1623b2141aef23292ca75a998b3a306a0eff.json} (78%) create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-d72c7beede026491202626a897e3aaee2b3708f40799a7aeab8e143b0b534c4f.json rename prover/crates/lib/prover_dal/.sqlx/{query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json => query-d88fde0e42305ff648c718c742fb356c541d65e0c84a4d82f86bbf000c21c9e8.json} (50%) rename prover/crates/lib/prover_dal/.sqlx/{query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json => query-dbb28977b65a0496fdcebd5e0ad00e284b5d2c0c50f2e4f0f2fc692bd715e040.json} (90%) rename prover/crates/lib/prover_dal/.sqlx/{query-5dd5a6ad9adb97a2c2fe8cec66682b12e9e81d7cc188369025b0d209b526b327.json => query-dfabe5ecdd9217abb7e75e400c3f4be22aa5d51b753e0fb261ad8fe612f60981.json} (83%) rename prover/crates/lib/prover_dal/.sqlx/{query-779b75eef7433715bc5dea7f8e7bdc4424ac6384c5ad7ef6c08911529f05419a.json => query-e2a0b31993ba4aed24abbbd7de1e55be95aea85fd8096a06665ba9785257296b.json} (78%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json rename prover/crates/lib/prover_dal/.sqlx/{query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json => query-e62f61185e6353ace2050287a45645592b6b8b57a323ceea26f515a4fad02684.json} (63%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-e65d9d8389b60f48468561984f0fb9c8674344921b98b8b26e4d85994b3d72af.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-e91577193c3da5c4c60ca3f972fa030401f899503efb07f139d09fc36a24e86f.json create mode 100644 prover/crates/lib/prover_dal/.sqlx/query-ea5d124ba45e81c0aa1d4d86ebf13e9c1aebb64b2985233e00e0f318d1f4aede.json rename prover/crates/lib/prover_dal/.sqlx/{query-7a41908eac57403ddc6785ff0a646830fcb6b1fdcfcbbd9a1b19d1a4b1e7a978.json => query-eca875e2159b5852d14fbbda302503bd3db402367bf6b5a0b6049e7b5839daf1.json} (79%) rename prover/crates/lib/prover_dal/.sqlx/{query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json => query-ecfba455b3af61411965022c89ca8b41eff6b683e4e9c70216076c2434218ade.json} (62%) rename prover/crates/lib/prover_dal/.sqlx/{query-bf25e4d0f807f618a822c83f3ff42d00d76a304106cbda611864cc64cdcd339a.json => query-ee643991981290bfc43ead208194d52a495f9015d135dde998a91578a20a793e.json} (78%) rename prover/crates/lib/prover_dal/.sqlx/{query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json => query-f76966131ebc989eb255e444fea7912933a28b1222b272801a8f83254323af33.json} (62%) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 3a7253e9788d..5d1738fbf9da 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -13,8 +13,9 @@ use crate::{ #[derive(Debug, Clone, Copy)] pub struct FriProverJobMetadata { pub id: u32, - pub chain_id: L2ChainId, pub block_number: L1BatchNumber, + + pub chain_id: L2ChainId, pub circuit_id: u8, pub aggregation_round: AggregationRound, pub sequence_number: usize, @@ -76,6 +77,7 @@ impl JobCountStatistics { #[derive(Debug)] pub struct StuckJobs { pub id: u64, + pub chain_id: L2ChainId, pub status: String, pub attempts: u64, pub circuit_id: Option, @@ -108,8 +110,8 @@ impl From for SocketAddress { #[derive(Debug, Clone)] pub struct LeafAggregationJobMetadata { pub id: u32, - pub chain_id: L2ChainId, pub block_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_id: u8, pub prover_job_ids_for_proofs: Vec, } @@ -117,8 +119,8 @@ pub struct LeafAggregationJobMetadata { #[derive(Debug, Clone)] pub struct NodeAggregationJobMetadata { pub id: u32, - pub chain_id: L2ChainId, pub block_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_id: u8, pub depth: u16, pub prover_job_ids_for_proofs: Vec, @@ -216,6 +218,7 @@ pub enum WitnessJobStatus { #[derive(Debug)] pub struct WitnessJobInfo { pub block_number: L1BatchNumber, + pub chain_id: L2ChainId, pub created_at: DateTime, pub updated_at: DateTime, pub status: WitnessJobStatus, @@ -226,6 +229,7 @@ pub struct WitnessJobInfo { pub struct ProverJobInfo { pub id: u32, pub block_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_type: String, pub position: JobPosition, pub input_length: u64, @@ -273,6 +277,7 @@ impl FromStr for GpuProverInstanceStatus { pub struct ProverJobFriInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_id: u32, pub circuit_blob_url: String, pub aggregation_round: AggregationRound, @@ -299,6 +304,7 @@ pub trait Stallable { #[derive(Debug, Clone)] pub struct BasicWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub witness_inputs_blob_url: Option, pub attempts: u32, pub status: WitnessJobStatus, @@ -325,6 +331,7 @@ impl Stallable for BasicWitnessGeneratorJobInfo { pub struct LeafWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_id: u32, pub closed_form_inputs_blob_url: Option, pub attempts: u32, @@ -353,6 +360,7 @@ impl Stallable for LeafWitnessGeneratorJobInfo { pub struct NodeWitnessGeneratorJobInfo { pub id: u32, pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub circuit_id: u32, pub depth: u32, pub status: WitnessJobStatus, @@ -381,6 +389,7 @@ impl Stallable for NodeWitnessGeneratorJobInfo { #[derive(Debug, Clone)] pub struct RecursionTipWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub status: WitnessJobStatus, pub attempts: u32, pub processing_started_at: Option, @@ -406,6 +415,7 @@ impl Stallable for RecursionTipWitnessGeneratorJobInfo { #[derive(Debug, Clone)] pub struct SchedulerWitnessGeneratorJobInfo { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub scheduler_partial_input_blob_url: String, pub status: WitnessJobStatus, pub processing_started_at: Option, @@ -447,6 +457,7 @@ pub enum ProofCompressionJobStatus { #[derive(Debug, Clone)] pub struct ProofCompressionJobInfo { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub attempts: u32, pub status: ProofCompressionJobStatus, pub fri_proof_blob_url: Option, @@ -464,6 +475,7 @@ pub struct ProofCompressionJobInfo { #[derive(Debug, Clone)] pub struct ProofGenerationTime { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub time_taken: NaiveTime, pub created_at: NaiveDateTime, } diff --git a/prover/crates/bin/witness_generator/src/artifacts.rs b/prover/crates/bin/witness_generator/src/artifacts.rs index 22f69e2af0cd..68af362a15e0 100644 --- a/prover/crates/bin/witness_generator/src/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/artifacts.rs @@ -24,8 +24,8 @@ pub trait ArtifactsManager { ) -> anyhow::Result; async fn save_to_bucket( - chain_id: L2ChainId, job_id: u32, + chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, shall_save_to_public_bucket: bool, diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs index cac45511a798..30cc68fd269e 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs @@ -1,11 +1,12 @@ use std::{sync::Arc, time::Instant}; +use std::iter::chain; use async_trait::async_trait; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::AuxOutputWitnessWrapper; use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId}; use crate::{ artifacts::ArtifactsManager, @@ -18,7 +19,7 @@ use crate::{ #[async_trait] impl ArtifactsManager for BasicCircuits { - type InputMetadata = L1BatchNumber; + type InputMetadata = (L2ChainId, L1BatchNumber); type InputArtifacts = BasicWitnessGeneratorJob; type OutputArtifacts = BasicCircuitArtifacts; type BlobUrls = String; @@ -27,9 +28,10 @@ impl ArtifactsManager for BasicCircuits { metadata: &Self::InputMetadata, object_store: &dyn ObjectStore, ) -> anyhow::Result { - let l1_batch_number = *metadata; - let data = object_store.get(l1_batch_number).await.unwrap(); + let (chain_id, l1_batch_number) = *metadata; + let data = object_store.get((chain_id, l1_batch_number)).await.unwrap(); Ok(BasicWitnessGeneratorJob { + chain_id, block_number: l1_batch_number, data, }) @@ -37,6 +39,7 @@ impl ArtifactsManager for BasicCircuits { async fn save_to_bucket( job_id: u32, + chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, shall_save_to_public_bucket: bool, @@ -47,18 +50,18 @@ impl ArtifactsManager for BasicCircuits { if shall_save_to_public_bucket { public_blob_store.as_deref() .expect("public_object_store shall not be empty while running with shall_save_to_public_bucket config") - .put(L1BatchNumber(job_id), &aux_output_witness_wrapper) + .put((chain_id, L1BatchNumber(job_id)), &aux_output_witness_wrapper) .await .unwrap(); } object_store - .put(L1BatchNumber(job_id), &aux_output_witness_wrapper) + .put((chain_id, L1BatchNumber(job_id)), &aux_output_witness_wrapper) .await .unwrap(); let wrapper = SchedulerPartialInputWrapper(artifacts.scheduler_witness); object_store - .put(L1BatchNumber(job_id), &wrapper) + .put((chain_id, L1BatchNumber(job_id)), &wrapper) .await .unwrap() } @@ -67,6 +70,7 @@ impl ArtifactsManager for BasicCircuits { async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + chain_id: L2ChainId, started_at: Instant, blob_urls: String, artifacts: Self::OutputArtifacts, @@ -81,12 +85,13 @@ impl ArtifactsManager for BasicCircuits { .expect("failed to get database transaction"); let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id), chain_id) .await; transaction .fri_prover_jobs_dal() .insert_prover_jobs( L1BatchNumber(job_id), + chain_id, artifacts.circuit_urls, AggregationRound::BasicCircuits, 0, @@ -97,6 +102,7 @@ impl ArtifactsManager for BasicCircuits { create_aggregation_jobs( &mut transaction, L1BatchNumber(job_id), + chain_id, &artifacts.queue_urls, &blob_urls, get_recursive_layer_circuit_id_for_base_layer, @@ -107,7 +113,7 @@ impl ArtifactsManager for BasicCircuits { transaction .fri_basic_witness_generator_dal() - .mark_witness_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .mark_witness_job_as_successful(L1BatchNumber(job_id), chain_id, started_at.elapsed()) .await; transaction .commit() diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs index 439ffde543e4..b4b87821d688 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs @@ -13,9 +13,7 @@ use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::get_current_pod_name; use zksync_prover_interface::inputs::WitnessInputData; use zksync_prover_keystore::keystore::Keystore; -use zksync_types::{ - basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, -}; +use zksync_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, L2ChainId}; use crate::{ artifacts::ArtifactsManager, @@ -40,6 +38,7 @@ pub struct BasicCircuitArtifacts { #[derive(Clone)] pub struct BasicWitnessGeneratorJob { + pub(super) chain_id: L2ChainId, pub(super) block_number: L1BatchNumber, pub(super) data: WitnessInputData, } @@ -60,7 +59,7 @@ pub struct BasicCircuits; #[async_trait] impl JobManager for BasicCircuits { type Job = BasicWitnessGeneratorJob; - type Metadata = L1BatchNumber; + type Metadata = (L2ChainId, L1BatchNumber); const ROUND: AggregationRound = AggregationRound::BasicCircuits; const SERVICE_NAME: &'static str = "fri_basic_circuit_witness_generator"; @@ -72,13 +71,15 @@ impl JobManager for BasicCircuits { started_at: Instant, ) -> anyhow::Result { let BasicWitnessGeneratorJob { + chain_id, block_number, data: job, } = job; tracing::info!( - "Starting witness generation of type {:?} for block {}", + "Starting witness generation of type {:?} for chain {} block {}", AggregationRound::BasicCircuits, + chain_id.as_u64(), block_number.0 ); @@ -87,7 +88,8 @@ impl JobManager for BasicCircuits { WITNESS_GENERATOR_METRICS.witness_generation_time[&AggregationRound::BasicCircuits.into()] .observe(started_at.elapsed()); tracing::info!( - "Witness generation for block {} is complete in {:?}", + "Witness generation for chain {} block {} is complete in {:?}", + chain_id.as_u64(), block_number.0, started_at.elapsed() ); @@ -101,11 +103,11 @@ impl JobManager for BasicCircuits { } async fn prepare_job( - metadata: L1BatchNumber, + metadata: (L2ChainId, L1BatchNumber), object_store: &dyn ObjectStore, _keystore: Keystore, ) -> anyhow::Result { - tracing::info!("Processing FRI basic witness-gen for block {}", metadata.0); + tracing::info!("Processing FRI basic witness-gen for chain {} block {}", metadata.0.as_u64(), metadata.1); let started_at = Instant::now(); let job = Self::get_artifacts(&metadata, object_store).await?; @@ -118,9 +120,9 @@ impl JobManager for BasicCircuits { async fn get_metadata( connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let pod_name = get_current_pod_name(); - if let Some(l1_batch_number) = connection_pool + if let Some((chain_id, l1_batch_number)) = connection_pool .connection() .await .unwrap() @@ -128,7 +130,7 @@ impl JobManager for BasicCircuits { .get_next_basic_circuit_witness_job(protocol_version, &pod_name) .await { - Ok(Some((l1_batch_number.0, l1_batch_number))) + Ok(Some((chain_id, l1_batch_number.0, l1_batch_number))) } else { Ok(None) } diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs index 690bb8820981..a3718a86ac91 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs @@ -25,7 +25,7 @@ use zksync_prover_dal::{Connection, Prover, ProverDal}; use zksync_prover_fri_types::keys::ClosedFormInputKey; use zksync_prover_interface::inputs::WitnessInputData; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; +use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber, L2ChainId}; use crate::{ precalculated_merkle_paths_provider::PrecalculatedMerklePathsProvider, @@ -267,6 +267,7 @@ async fn save_recursion_queue( pub(crate) async fn create_aggregation_jobs( connection: &mut Connection<'_, Prover>, block_number: L1BatchNumber, + chain_id: L2ChainId, closed_form_inputs_and_urls: &Vec<(u8, String, usize)>, scheduler_partial_input_blob_url: &str, base_layer_to_recursive_layer_circuit_id: fn(u8) -> u8, diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs index e16dfe6a17cd..3412224d86fd 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs @@ -5,7 +5,7 @@ use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::keys::{AggregationsKey, ClosedFormInputKey}; use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; -use zksync_types::{basic_fri_types::AggregationRound, prover_dal::LeafAggregationJobMetadata}; +use zksync_types::{basic_fri_types::AggregationRound, L2ChainId, prover_dal::LeafAggregationJobMetadata}; use crate::{ artifacts::{AggregationBlobUrls, ArtifactsManager}, @@ -41,10 +41,11 @@ impl ArtifactsManager for LeafAggregation { #[tracing::instrument( skip_all, - fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) + fields(chain = %artifacts.chain_id.as_u64(), l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) )] async fn save_to_bucket( _job_id: u32, + _chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, _shall_save_to_public_bucket: bool, @@ -52,6 +53,7 @@ impl ArtifactsManager for LeafAggregation { ) -> AggregationBlobUrls { let started_at = Instant::now(); let key = AggregationsKey { + chain_id: artifacts.chain_id, block_number: artifacts.block_number, circuit_id: get_recursive_layer_circuit_id_for_base_layer(artifacts.circuit_id), depth: 0, @@ -72,18 +74,20 @@ impl ArtifactsManager for LeafAggregation { #[tracing::instrument( skip_all, - fields(l1_batch = %job_id) + fields(chain = %artifacts.chain_id.as_u64(), l1_batch = %job_id) )] async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + _chain_id: L2ChainId, started_at: Instant, blob_urls: AggregationBlobUrls, artifacts: Self::OutputArtifacts, ) -> anyhow::Result<()> { tracing::info!( - "Updating database for job_id {}, block {} with circuit id {}", + "Updating database for job_id {}, chain {}, block {} with circuit id {}", job_id, + artifacts.chain_id.as_u64(), artifacts.block_number.0, artifacts.circuit_id, ); @@ -93,12 +97,13 @@ impl ArtifactsManager for LeafAggregation { let number_of_dependent_jobs = blob_urls.circuit_ids_and_urls.len(); let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch(artifacts.block_number) + .protocol_version_for_l1_batch_and_chain(artifacts.block_number, artifacts.chain_id) .await; tracing::info!( - "Inserting {} prover jobs for job_id {}, block {} with circuit id {}", + "Inserting {} prover jobs for job_id {}, chain {}, block {} with circuit id {}", blob_urls.circuit_ids_and_urls.len(), job_id, + artifacts.chain_id.as_u64(), artifacts.block_number.0, artifacts.circuit_id, ); @@ -106,6 +111,7 @@ impl ArtifactsManager for LeafAggregation { .fri_prover_jobs_dal() .insert_prover_jobs( artifacts.block_number, + artifacts.chain_id, blob_urls.circuit_ids_and_urls, AggregationRound::LeafAggregation, 0, @@ -113,8 +119,9 @@ impl ArtifactsManager for LeafAggregation { ) .await; tracing::info!( - "Updating node aggregation jobs url for job_id {}, block {} with circuit id {}", + "Updating node aggregation jobs url for job_id {}, chain {}, block {} with circuit id {}", job_id, + artifacts.chain_id.as_u64(), artifacts.block_number.0, artifacts.circuit_id, ); @@ -122,6 +129,7 @@ impl ArtifactsManager for LeafAggregation { .fri_node_witness_generator_dal() .update_node_aggregation_jobs_url( artifacts.block_number, + artifacts.chain_id, get_recursive_layer_circuit_id_for_base_layer(artifacts.circuit_id), number_of_dependent_jobs, 0, @@ -129,19 +137,21 @@ impl ArtifactsManager for LeafAggregation { ) .await; tracing::info!( - "Marking leaf aggregation job as successful for job id {}, block {} with circuit id {}", + "Marking leaf aggregation job as successful for job id {}, chain {}, block {} with circuit id {}", job_id, + artifacts.chain_id.as_u64(), artifacts.block_number.0, artifacts.circuit_id, ); transaction .fri_leaf_witness_generator_dal() - .mark_leaf_aggregation_as_successful(job_id, started_at.elapsed()) + .mark_leaf_aggregation_as_successful(job_id, artifacts.chain_id, started_at.elapsed()) .await; tracing::info!( - "Committing transaction for job_id {}, block {} with circuit id {}", + "Committing transaction for job_id {}, chain {}, block {} with circuit id {}", job_id, + artifacts.chain_id.as_u64() as i32, artifacts.block_number.0, artifacts.circuit_id, ); diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs index e7988bbd0af3..9c2f7b50d195 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs @@ -24,10 +24,7 @@ use zksync_prover_fri_types::{ get_current_pod_name, FriProofWrapper, }; use zksync_prover_keystore::keystore::Keystore; -use zksync_types::{ - basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, - prover_dal::LeafAggregationJobMetadata, L1BatchNumber, -}; +use zksync_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::LeafAggregationJobMetadata, L1BatchNumber, L2ChainId}; use crate::{ artifacts::ArtifactsManager, @@ -43,6 +40,7 @@ mod artifacts; pub struct LeafAggregationWitnessGeneratorJob { pub(crate) circuit_id: u8, + pub(crate) chain_id: L2ChainId, pub(crate) block_number: L1BatchNumber, pub(crate) closed_form_inputs: ClosedFormInputWrapper, pub(crate) proofs_ids: Vec, @@ -53,6 +51,7 @@ pub struct LeafAggregationWitnessGeneratorJob { #[derive(Clone)] pub struct LeafAggregationArtifacts { circuit_id: u8, + pub chain_id: L2ChainId, block_number: L1BatchNumber, pub aggregations: Vec<(u64, RecursionQueueSimulator)>, pub circuit_ids_and_urls: Vec<(u8, String)>, @@ -223,6 +222,7 @@ impl JobManager for LeafAggregation { Ok(LeafAggregationWitnessGeneratorJob { circuit_id: metadata.circuit_id, + chain_id: metadata.chain_id, block_number: metadata.block_number, closed_form_inputs: closed_form_input, proofs_ids: metadata.prover_job_ids_for_proofs, @@ -234,7 +234,7 @@ impl JobManager for LeafAggregation { async fn get_metadata( connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let pod_name = get_current_pod_name(); let Some(metadata) = connection_pool .connection() @@ -245,6 +245,6 @@ impl JobManager for LeafAggregation { else { return Ok(None); }; - Ok(Some((metadata.id, metadata))) + Ok(Some((metadata.chain_id, metadata.id, metadata))) } } diff --git a/prover/crates/bin/witness_generator/src/rounds/mod.rs b/prover/crates/bin/witness_generator/src/rounds/mod.rs index 6da6f5bb393d..502c0ce04fd2 100644 --- a/prover/crates/bin/witness_generator/src/rounds/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/mod.rs @@ -24,6 +24,7 @@ pub use node_aggregation::NodeAggregation; pub use recursion_tip::RecursionTip; pub use scheduler::Scheduler; use zksync_types::basic_fri_types::AggregationRound; +use zksync_types::L2ChainId; use crate::metrics::WITNESS_GENERATOR_METRICS; @@ -51,7 +52,7 @@ pub trait JobManager: ArtifactsManager { async fn get_metadata( connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, - ) -> anyhow::Result>; + ) -> anyhow::Result>; } #[derive(Debug)] diff --git a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs index 9a774b656740..ff7eb5f53f12 100644 --- a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs @@ -90,7 +90,7 @@ impl ArtifactsManager for NodeAggregation { let dependent_jobs = blob_urls.circuit_ids_and_urls.len(); let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch(artifacts.block_number) + .protocol_version_for_l1_batch_and_chain(artifacts.block_number) .await; match artifacts.next_aggregations.len() > 1 { true => { diff --git a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs index be838507dc10..8f85e4c9c649 100644 --- a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs @@ -109,7 +109,7 @@ impl ArtifactsManager for RecursionTip { let mut transaction = prover_connection.start_transaction().await?; let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id)) .await; transaction .fri_prover_jobs_dal() diff --git a/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs index a8c6fc1741a0..1745adb26f50 100644 --- a/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs @@ -63,7 +63,7 @@ impl ArtifactsManager for Scheduler { let mut transaction = prover_connection.start_transaction().await?; let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id)) .await; transaction .fri_prover_jobs_dal() diff --git a/prover/crates/bin/witness_generator/src/utils.rs b/prover/crates/bin/witness_generator/src/utils.rs index ea631f19cd85..73e402fd55de 100644 --- a/prover/crates/bin/witness_generator/src/utils.rs +++ b/prover/crates/bin/witness_generator/src/utils.rs @@ -24,7 +24,7 @@ use zksync_prover_fri_types::{ keys::{AggregationsKey, ClosedFormInputKey, FriCircuitKey}, CircuitWrapper, FriProofWrapper, }; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, ProtocolVersionId, U256}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId, ProtocolVersionId, U256}; // Creates a temporary file with the serialized KZG setup usable by `zkevm_test_harness` functions. pub(crate) static KZG_TRUSTED_SETUP_FILE: Lazy = Lazy::new(|| { @@ -62,10 +62,11 @@ impl StoredObject for ClosedFormInputWrapper { fn encode_key(key: Self::Key<'_>) -> String { let ClosedFormInputKey { + chain_id, block_number, circuit_id, } = key; - format!("closed_form_inputs_{block_number}_{circuit_id}.bin") + format!("closed_form_inputs_{}_{block_number}_{circuit_id}.bin", chain_id.as_u64()) } serialize_using_bincode!(); @@ -80,11 +81,12 @@ impl StoredObject for AggregationWrapper { fn encode_key(key: Self::Key<'_>) -> String { let AggregationsKey { + chain_id, block_number, circuit_id, depth, } = key; - format!("aggregations_{block_number}_{circuit_id}_{depth}.bin") + format!("aggregations_{}_{block_number}_{circuit_id}_{depth}.bin", chain_id.as_u64()) } serialize_using_bincode!(); @@ -101,10 +103,10 @@ pub struct SchedulerPartialInputWrapper( impl StoredObject for SchedulerPartialInputWrapper { const BUCKET: Bucket = Bucket::SchedulerWitnessJobsFri; - type Key<'a> = L1BatchNumber; + type Key<'a> = (L2ChainId, L1BatchNumber); fn encode_key(key: Self::Key<'_>) -> String { - format!("scheduler_witness_{key}.bin") + format!("scheduler_witness_{}_{}.bin", key.0.as_u64(), key.1) } serialize_using_bincode!(); @@ -115,6 +117,7 @@ impl StoredObject for SchedulerPartialInputWrapper { fields(l1_batch = %block_number, circuit_id = %circuit.numeric_circuit_type()) )] pub async fn save_circuit( + chain_id: L2ChainId, block_number: L1BatchNumber, circuit: ZkSyncBaseLayerCircuit, sequence_number: usize, @@ -122,6 +125,7 @@ pub async fn save_circuit( ) -> (u8, String) { let circuit_id = circuit.numeric_circuit_type(); let circuit_key = FriCircuitKey { + chain_id, block_number, sequence_number, circuit_id, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-07c259d907ac144cfcfb11e0f5de00ecf56aa352e618a523d81170c829340fa6.json b/prover/crates/lib/prover_dal/.sqlx/query-07c259d907ac144cfcfb11e0f5de00ecf56aa352e618a523d81170c829340fa6.json index 0497cd69846c..4592125d48c9 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-07c259d907ac144cfcfb11e0f5de00ecf56aa352e618a523d81170c829340fa6.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-07c259d907ac144cfcfb11e0f5de00ecf56aa352e618a523d81170c829340fa6.json @@ -82,6 +82,11 @@ "ordinal": 15, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { @@ -107,6 +112,7 @@ true, true, false, + false, false ] }, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json b/prover/crates/lib/prover_dal/.sqlx/query-0ab4466aff0db31ef4f2d516b8cf57b2e96ecdf74c849252da6d2ba8b573e07f.json similarity index 51% rename from prover/crates/lib/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json rename to prover/crates/lib/prover_dal/.sqlx/query-0ab4466aff0db31ef4f2d516b8cf57b2e96ecdf74c849252da6d2ba8b573e07f.json index 6f83fd55064d..ae9a8347fe17 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-0ab4466aff0db31ef4f2d516b8cf57b2e96ecdf74c849252da6d2ba8b573e07f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n MIN(l1_batch_number) AS \"l1_batch_number!\",\n circuit_id,\n aggregation_round\n FROM\n prover_jobs_fri\n WHERE\n status IN ('queued', 'in_gpu_proof', 'in_progress', 'failed')\n GROUP BY\n circuit_id,\n aggregation_round\n ", + "query": "\n SELECT\n MIN(l1_batch_number) AS \"l1_batch_number!\",\n circuit_id,\n aggregation_round,\n chain_id\n FROM\n prover_jobs_fri\n WHERE\n status IN ('queued', 'in_gpu_proof', 'in_progress', 'failed')\n GROUP BY\n circuit_id,\n chain_id,\n aggregation_round\n ", "describe": { "columns": [ { @@ -17,6 +17,11 @@ "ordinal": 2, "name": "aggregation_round", "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { @@ -25,8 +30,9 @@ "nullable": [ null, false, + false, false ] }, - "hash": "ce5779092feb8a3d3e2c5e395783e67f08f2ead5f55bfb6594e50346bf9cf2ef" + "hash": "0ab4466aff0db31ef4f2d516b8cf57b2e96ecdf74c849252da6d2ba8b573e07f" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-1080b95755b3047747a9fabc7c7c43ffc5f519d35c90c60f89579f0689906df5.json b/prover/crates/lib/prover_dal/.sqlx/query-1080b95755b3047747a9fabc7c7c43ffc5f519d35c90c60f89579f0689906df5.json deleted file mode 100644 index 4b1cb2fe98a9..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-1080b95755b3047747a9fabc7c7c43ffc5f519d35c90c60f89579f0689906df5.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n proof_compression_jobs_fri (\n l1_batch_number,\n fri_proof_blob_url,\n status,\n created_at,\n updated_at,\n protocol_version,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, NOW(), NOW(), $4, $5)\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text", - "Text", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "1080b95755b3047747a9fabc7c7c43ffc5f519d35c90c60f89579f0689906df5" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json b/prover/crates/lib/prover_dal/.sqlx/query-126df7c8433ac85a618726bd78f43f8256315c8701c5378df6bc76afc5a60431.json similarity index 84% rename from prover/crates/lib/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json rename to prover/crates/lib/prover_dal/.sqlx/query-126df7c8433ac85a618726bd78f43f8256315c8701c5378df6bc76afc5a60431.json index 663feac60f78..2bf8eeee4d92 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-126df7c8433ac85a618726bd78f43f8256315c8701c5378df6bc76afc5a60431.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n scheduler_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n scheduler_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -67,11 +67,17 @@ "ordinal": 12, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -87,8 +93,9 @@ true, true, false, + false, false ] }, - "hash": "285d0ff850fa5c9af36564fcb14dd8547a1ad20492ec37c3c0be5639e5d49952" + "hash": "126df7c8433ac85a618726bd78f43f8256315c8701c5378df6bc76afc5a60431" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-77b893afe090161b6730843c1e1b2c74a6e4e1deca98005908d6660355e6b32d.json b/prover/crates/lib/prover_dal/.sqlx/query-13555e987d680e3d5efb91808974a1a753fc47d64ec3bd2f6606ad207405c1ae.json similarity index 79% rename from prover/crates/lib/prover_dal/.sqlx/query-77b893afe090161b6730843c1e1b2c74a6e4e1deca98005908d6660355e6b32d.json rename to prover/crates/lib/prover_dal/.sqlx/query-13555e987d680e3d5efb91808974a1a753fc47d64ec3bd2f6606ad207405c1ae.json index 0d1cb41af4fe..c080c8a05281 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-77b893afe090161b6730843c1e1b2c74a6e4e1deca98005908d6660355e6b32d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-13555e987d680e3d5efb91808974a1a753fc47d64ec3bd2f6606ad207405c1ae.json @@ -1,10 +1,15 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $2\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n witness_inputs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $3\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n witness_inputs_fri.l1_batch_number\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $2\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n witness_inputs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $3\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n witness_inputs_fri.chain_id,\n witness_inputs_fri.l1_batch_number\n ", "describe": { "columns": [ { "ordinal": 0, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 1, "name": "l1_batch_number", "type_info": "Int8" } @@ -17,8 +22,9 @@ ] }, "nullable": [ + false, false ] }, - "hash": "77b893afe090161b6730843c1e1b2c74a6e4e1deca98005908d6660355e6b32d" + "hash": "13555e987d680e3d5efb91808974a1a753fc47d64ec3bd2f6606ad207405c1ae" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a4ea8082b242abe65ebec11ec5799264e05a9c849b0f0122a54c87793c279786.json b/prover/crates/lib/prover_dal/.sqlx/query-13f2ca492e4da965a72d8db3ddd46cd61c9209a36933cd530974972302a33635.json similarity index 64% rename from prover/crates/lib/prover_dal/.sqlx/query-a4ea8082b242abe65ebec11ec5799264e05a9c849b0f0122a54c87793c279786.json rename to prover/crates/lib/prover_dal/.sqlx/query-13f2ca492e4da965a72d8db3ddd46cd61c9209a36933cd530974972302a33635.json index 783a4ef9e1b8..051de1395e61 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-a4ea8082b242abe65ebec11ec5799264e05a9c849b0f0122a54c87793c279786.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-13f2ca492e4da965a72d8db3ddd46cd61c9209a36933cd530974972302a33635.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'queued',\n error = 'Manually requeued',\n attempts = 2,\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'queued',\n error = 'Manually requeued',\n attempts = 2,\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND attempts >= $3\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n id,\n chain_id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,26 +10,31 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "error", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 6, "name": "picked_by", "type_info": "Text" } @@ -37,6 +42,7 @@ "parameters": { "Left": [ "Int8", + "Int4", "Int2" ] }, @@ -45,9 +51,10 @@ false, false, false, + false, true, true ] }, - "hash": "a4ea8082b242abe65ebec11ec5799264e05a9c849b0f0122a54c87793c279786" + "hash": "13f2ca492e4da965a72d8db3ddd46cd61c9209a36933cd530974972302a33635" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json b/prover/crates/lib/prover_dal/.sqlx/query-18c0e2ec79bb15ad51a3fda973f360bdee99924fce74bbdbcaeda017729549e4.json similarity index 85% rename from prover/crates/lib/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json rename to prover/crates/lib/prover_dal/.sqlx/query-18c0e2ec79bb15ad51a3fda973f360bdee99924fce74bbdbcaeda017729549e4.json index b69dc95b6f6a..3e78081fc8a1 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-18c0e2ec79bb15ad51a3fda973f360bdee99924fce74bbdbcaeda017729549e4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -72,11 +72,17 @@ "ordinal": 13, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 14, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -93,8 +99,9 @@ true, true, false, + false, false ] }, - "hash": "2ab2f83b273c5aa88c1eefc8f70a8ea23052f714cd74c1d28ae1203ce8f0eaa9" + "hash": "18c0e2ec79bb15ad51a3fda973f360bdee99924fce74bbdbcaeda017729549e4" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-199b98ebc224abb97495f092168caff4e47e3fff23812922e62e0aeee2a0a9cf.json b/prover/crates/lib/prover_dal/.sqlx/query-199b98ebc224abb97495f092168caff4e47e3fff23812922e62e0aeee2a0a9cf.json new file mode 100644 index 000000000000..b1eb6c71a00d --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-199b98ebc224abb97495f092168caff4e47e3fff23812922e62e0aeee2a0a9cf.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n leaf_aggregation_witness_jobs_fri (\n l1_batch_number,\n chain_id,\n circuit_id,\n closed_form_inputs_blob_url,\n number_of_basic_circuits,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', NOW(), NOW(), $7)\n ON CONFLICT (l1_batch_number, chain_id, circuit_id) DO\n UPDATE\n SET\n updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int2", + "Text", + "Int4", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "199b98ebc224abb97495f092168caff4e47e3fff23812922e62e0aeee2a0a9cf" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5dbc0878c04098fecec7f3188838eba7fd36afc4442bcd6ad5a537f982990d45.json b/prover/crates/lib/prover_dal/.sqlx/query-1b7b63bf909a50abbdea1f74e5534ed451c5e42603d8a8d4a74a059599f1d8be.json similarity index 61% rename from prover/crates/lib/prover_dal/.sqlx/query-5dbc0878c04098fecec7f3188838eba7fd36afc4442bcd6ad5a537f982990d45.json rename to prover/crates/lib/prover_dal/.sqlx/query-1b7b63bf909a50abbdea1f74e5534ed451c5e42603d8a8d4a74a059599f1d8be.json index 6458e1df0873..0ff37e47ed8e 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-5dbc0878c04098fecec7f3188838eba7fd36afc4442bcd6ad5a537f982990d45.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-1b7b63bf909a50abbdea1f74e5534ed451c5e42603d8a8d4a74a059599f1d8be.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND attempts >= $3\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -32,6 +37,7 @@ "parameters": { "Left": [ "Int8", + "Int4", "Int2" ] }, @@ -39,9 +45,10 @@ false, false, false, + false, true, true ] }, - "hash": "5dbc0878c04098fecec7f3188838eba7fd36afc4442bcd6ad5a537f982990d45" + "hash": "1b7b63bf909a50abbdea1f74e5534ed451c5e42603d8a8d4a74a059599f1d8be" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json b/prover/crates/lib/prover_dal/.sqlx/query-1df8dc2e9f03c979552c6faa34c3d15d2d1cde9053851ccd87fa5267d2b54294.json similarity index 67% rename from prover/crates/lib/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json rename to prover/crates/lib/prover_dal/.sqlx/query-1df8dc2e9f03c979552c6faa34c3d15d2d1cde9053851ccd87fa5267d2b54294.json index 422036ebb115..79bf84e6fdf6 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-1df8dc2e9f03c979552c6faa34c3d15d2d1cde9053851ccd87fa5267d2b54294.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'failed',\n error = $1,\n updated_at = NOW()\n WHERE\n id = $2\n AND status != 'successful'\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'failed',\n error = $1,\n updated_at = NOW()\n WHERE\n id = $2\n AND chain_id = $3\n AND status != 'successful'\n ", "describe": { "columns": [], "parameters": { "Left": [ "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "e8066db420e075306235f728d57567878f347bdaf36294e9b24ee9c0aa1e861b" + "hash": "1df8dc2e9f03c979552c6faa34c3d15d2d1cde9053851ccd87fa5267d2b54294" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-1e651681a0286f81d6d1b1b7da002caedb6cdf6d68d7199b1bc55aadf30558d9.json b/prover/crates/lib/prover_dal/.sqlx/query-1e651681a0286f81d6d1b1b7da002caedb6cdf6d68d7199b1bc55aadf30558d9.json new file mode 100644 index 000000000000..67c7e6ff7f1a --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-1e651681a0286f81d6d1b1b7da002caedb6cdf6d68d7199b1bc55aadf30558d9.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n recursion_tip_witness_jobs_fri (\n l1_batch_number,\n chain_id,\n status,\n number_of_final_node_jobs,\n protocol_version,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, 'waiting_for_proofs', $3, $4, NOW(), NOW(), $5)\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int4", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "1e651681a0286f81d6d1b1b7da002caedb6cdf6d68d7199b1bc55aadf30558d9" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json b/prover/crates/lib/prover_dal/.sqlx/query-205a2cf51706af934f5a85b4074a7626ec21660eb177f569dbb7ff85cd944f36.json similarity index 64% rename from prover/crates/lib/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json rename to prover/crates/lib/prover_dal/.sqlx/query-205a2cf51706af934f5a85b4074a7626ec21660eb177f569dbb7ff85cd944f36.json index 724c01ea6c53..e547dbe92438 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-205a2cf51706af934f5a85b4074a7626ec21660eb177f569dbb7ff85cd944f36.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n attempts\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n attempts\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -11,12 +11,13 @@ ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ false ] }, - "hash": "b367ecb1ebee86ec598c4079591f8c12deeca6b8843fe3869cc2b02b30da5de6" + "hash": "205a2cf51706af934f5a85b4074a7626ec21660eb177f569dbb7ff85cd944f36" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-21bdb15e0547dd290a3706b8bbc4754cfbb41efe371f601b192a5eca327b91a6.json b/prover/crates/lib/prover_dal/.sqlx/query-21bdb15e0547dd290a3706b8bbc4754cfbb41efe371f601b192a5eca327b91a6.json new file mode 100644 index 000000000000..09a277483567 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-21bdb15e0547dd290a3706b8bbc4754cfbb41efe371f601b192a5eca327b91a6.json @@ -0,0 +1,59 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (id, chain_id) IN (\n SELECT\n id,\n chain_id\n FROM\n prover_jobs_fri\n WHERE\n (\n status IN ('in_progress', 'in_gpu_proof')\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n id,\n chain_id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "attempts", + "type_info": "Int2" + }, + { + "ordinal": 4, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "error", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "picked_by", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Interval", + "Int2" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + true + ] + }, + "hash": "21bdb15e0547dd290a3706b8bbc4754cfbb41efe371f601b192a5eca327b91a6" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-68d3a8da3df92d9449b18e60f4adc0c370c90a2720c207bb8ccc664ef76191ec.json b/prover/crates/lib/prover_dal/.sqlx/query-225a5e09fed79546f7c3ad5e526ff78ae73d2076852e08fb04cf6373d8fc2cda.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-68d3a8da3df92d9449b18e60f4adc0c370c90a2720c207bb8ccc664ef76191ec.json rename to prover/crates/lib/prover_dal/.sqlx/query-225a5e09fed79546f7c3ad5e526ff78ae73d2076852e08fb04cf6373d8fc2cda.json index d57f0bb7efb9..14d924f2452e 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-68d3a8da3df92d9449b18e60f4adc0c370c90a2720c207bb8ccc664ef76191ec.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-225a5e09fed79546f7c3ad5e526ff78ae73d2076852e08fb04cf6373d8fc2cda.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -39,9 +44,10 @@ false, false, false, + false, true, true ] }, - "hash": "68d3a8da3df92d9449b18e60f4adc0c370c90a2720c207bb8ccc664ef76191ec" + "hash": "225a5e09fed79546f7c3ad5e526ff78ae73d2076852e08fb04cf6373d8fc2cda" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json b/prover/crates/lib/prover_dal/.sqlx/query-23993a95d51cc49ce02523cac10a40aecb253284888866a6ad21fc81578ac7bf.json similarity index 63% rename from prover/crates/lib/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json rename to prover/crates/lib/prover_dal/.sqlx/query-23993a95d51cc49ce02523cac10a40aecb253284888866a6ad21fc81578ac7bf.json index 967f02586d7b..d06245ff01d5 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-23993a95d51cc49ce02523cac10a40aecb253284888866a6ad21fc81578ac7bf.json @@ -1,14 +1,15 @@ { "db_name": "PostgreSQL", - "query": "\n DELETE FROM proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n DELETE FROM proof_compression_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "2df88abaae97b6f916b104375bd7249ec09c0daf4368021788207370213a6d94" + "hash": "23993a95d51cc49ce02523cac10a40aecb253284888866a6ad21fc81578ac7bf" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-274a6a2c184ab96a5f223e9d3327884856b76edc0ac5c7d6f059f9cfe05cb630.json b/prover/crates/lib/prover_dal/.sqlx/query-274a6a2c184ab96a5f223e9d3327884856b76edc0ac5c7d6f059f9cfe05cb630.json new file mode 100644 index 000000000000..30705014357d --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-274a6a2c184ab96a5f223e9d3327884856b76edc0ac5c7d6f059f9cfe05cb630.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n (l1_batch_number, chain_id) = (\n SELECT\n l1_batch_number,\n chain_id\n FROM\n proof_compression_jobs_fri\n WHERE\n status = $2\n AND protocol_version = $4\n AND protocol_version_patch = $5\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n proof_compression_jobs_fri.l1_batch_number,\n proof_compression_jobs_fri.chain_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Int4", + "Int4" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "274a6a2c184ab96a5f223e9d3327884856b76edc0ac5c7d6f059f9cfe05cb630" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-ec6615c2780582eb77a10a2f5bbf875b993980e988524534ccc00fe37c0f729d.json b/prover/crates/lib/prover_dal/.sqlx/query-290056d4cc494a6b90bcf7c9ad4f4ede0843e79e52dd933021d4fb3a22619bf2.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-ec6615c2780582eb77a10a2f5bbf875b993980e988524534ccc00fe37c0f729d.json rename to prover/crates/lib/prover_dal/.sqlx/query-290056d4cc494a6b90bcf7c9ad4f4ede0843e79e52dd933021d4fb3a22619bf2.json index db2d78afaa70..1c9353f31ed1 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-ec6615c2780582eb77a10a2f5bbf875b993980e988524534ccc00fe37c0f729d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-290056d4cc494a6b90bcf7c9ad4f4ede0843e79e52dd933021d4fb3a22619bf2.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round != $4\n ORDER BY\n priority DESC,\n created_at ASC,\n aggregation_round ASC,\n circuit_id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round != $4\n ORDER BY\n priority DESC,\n created_at ASC,\n aggregation_round ASC,\n circuit_id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -10,31 +10,36 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "l1_batch_number", "type_info": "Int8" }, { - "ordinal": 2, + "ordinal": 3, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "aggregation_round", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "sequence_number", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 6, "name": "depth", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 7, "name": "is_node_final_proof", "type_info": "Bool" } @@ -54,8 +59,9 @@ false, false, false, + false, false ] }, - "hash": "ec6615c2780582eb77a10a2f5bbf875b993980e988524534ccc00fe37c0f729d" + "hash": "290056d4cc494a6b90bcf7c9ad4f4ede0843e79e52dd933021d4fb3a22619bf2" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-2e4ffccfa0d458323e8be70d1621e32bbf46ffd5cbb9b5c6ba3a26bedd15e719.json b/prover/crates/lib/prover_dal/.sqlx/query-2e4ffccfa0d458323e8be70d1621e32bbf46ffd5cbb9b5c6ba3a26bedd15e719.json deleted file mode 100644 index 8b41cf5a4f01..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-2e4ffccfa0d458323e8be70d1621e32bbf46ffd5cbb9b5c6ba3a26bedd15e719.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n proof_compression_jobs_fri\n WHERE\n status = $2\n AND protocol_version = $4\n AND protocol_version_patch = $5\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n proof_compression_jobs_fri.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Int4", - "Int4" - ] - }, - "nullable": [ - false - ] - }, - "hash": "2e4ffccfa0d458323e8be70d1621e32bbf46ffd5cbb9b5c6ba3a26bedd15e719" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-614ebacf57b8db848b669e90a42b5ff6f230c9cf323a684a4498d720698f70a5.json b/prover/crates/lib/prover_dal/.sqlx/query-2eeb33037a3c17a8a4e002274a99df2d455ebe07ed55c88778c0196919af0764.json similarity index 66% rename from prover/crates/lib/prover_dal/.sqlx/query-614ebacf57b8db848b669e90a42b5ff6f230c9cf323a684a4498d720698f70a5.json rename to prover/crates/lib/prover_dal/.sqlx/query-2eeb33037a3c17a8a4e002274a99df2d455ebe07ed55c88778c0196919af0764.json index 696334b225b6..75ec0c284faa 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-614ebacf57b8db848b669e90a42b5ff6f230c9cf323a684a4498d720698f70a5.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-2eeb33037a3c17a8a4e002274a99df2d455ebe07ed55c88778c0196919af0764.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = 'queued',\n error = 'Manually requeued',\n attempts = 2,\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = 'queued',\n error = 'Manually requeued',\n attempts = 2,\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND attempts >= $3\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -27,6 +27,7 @@ "parameters": { "Left": [ "Int8", + "Int4", "Int2" ] }, @@ -37,5 +38,5 @@ true ] }, - "hash": "614ebacf57b8db848b669e90a42b5ff6f230c9cf323a684a4498d720698f70a5" + "hash": "2eeb33037a3c17a8a4e002274a99df2d455ebe07ed55c88778c0196919af0764" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-b9aaf5fe4d0261f7b05c8601a96b3027b4c2ce405d3bcc9821440408a394d7f5.json b/prover/crates/lib/prover_dal/.sqlx/query-310e95f1848ad42df67aa62ca2e126de1f70be7b3a22c8ce691fa5c9e68aec21.json similarity index 65% rename from prover/crates/lib/prover_dal/.sqlx/query-b9aaf5fe4d0261f7b05c8601a96b3027b4c2ce405d3bcc9821440408a394d7f5.json rename to prover/crates/lib/prover_dal/.sqlx/query-310e95f1848ad42df67aa62ca2e126de1f70be7b3a22c8ce691fa5c9e68aec21.json index a7b33f51cb1e..15e0af727e60 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-b9aaf5fe4d0261f7b05c8601a96b3027b4c2ce405d3bcc9821440408a394d7f5.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-310e95f1848ad42df67aa62ca2e126de1f70be7b3a22c8ce691fa5c9e68aec21.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1,\n proof_blob_url = $2\n WHERE\n id = $3\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1,\n proof_blob_url = $2\n WHERE\n id = $3\n AND chain_id = $4\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -10,31 +10,36 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "l1_batch_number", "type_info": "Int8" }, { - "ordinal": 2, + "ordinal": 3, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "aggregation_round", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "sequence_number", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 6, "name": "depth", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 7, "name": "is_node_final_proof", "type_info": "Bool" } @@ -43,7 +48,8 @@ "Left": [ "Time", "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -53,8 +59,9 @@ false, false, false, + false, false ] }, - "hash": "b9aaf5fe4d0261f7b05c8601a96b3027b4c2ce405d3bcc9821440408a394d7f5" + "hash": "310e95f1848ad42df67aa62ca2e126de1f70be7b3a22c8ce691fa5c9e68aec21" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-984f8d4f24f3493e1e3bda1fe9270224164d1d136d339d736776770e91731a2d.json b/prover/crates/lib/prover_dal/.sqlx/query-32dbb82468b949f4c0edc8ae0b3f71ac47ba4a5b435bc811d9009013d25a34c4.json similarity index 65% rename from prover/crates/lib/prover_dal/.sqlx/query-984f8d4f24f3493e1e3bda1fe9270224164d1d136d339d736776770e91731a2d.json rename to prover/crates/lib/prover_dal/.sqlx/query-32dbb82468b949f4c0edc8ae0b3f71ac47ba4a5b435bc811d9009013d25a34c4.json index 842c83f1822b..da04487ab8ff 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-984f8d4f24f3493e1e3bda1fe9270224164d1d136d339d736776770e91731a2d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-32dbb82468b949f4c0edc8ae0b3f71ac47ba4a5b435bc811d9009013d25a34c4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND attempts >= $3\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -32,6 +37,7 @@ "parameters": { "Left": [ "Int8", + "Int4", "Int2" ] }, @@ -39,9 +45,10 @@ false, false, false, + false, true, true ] }, - "hash": "984f8d4f24f3493e1e3bda1fe9270224164d1d136d339d736776770e91731a2d" + "hash": "32dbb82468b949f4c0edc8ae0b3f71ac47ba4a5b435bc811d9009013d25a34c4" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-af9b41e0bd8cd9f7871fd4877a0865b2a69e8ded0425d23e16a0ed00627d5a50.json b/prover/crates/lib/prover_dal/.sqlx/query-3be009f4e1feafc5638189d4a0aced53bcf607a3bbbad01f1c8b9b2a02bf2c7a.json similarity index 79% rename from prover/crates/lib/prover_dal/.sqlx/query-af9b41e0bd8cd9f7871fd4877a0865b2a69e8ded0425d23e16a0ed00627d5a50.json rename to prover/crates/lib/prover_dal/.sqlx/query-3be009f4e1feafc5638189d4a0aced53bcf607a3bbbad01f1c8b9b2a02bf2c7a.json index 351e3499e9e2..ad9a6cea5b47 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-af9b41e0bd8cd9f7871fd4877a0865b2a69e8ded0425d23e16a0ed00627d5a50.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-3be009f4e1feafc5638189d4a0aced53bcf607a3bbbad01f1c8b9b2a02bf2c7a.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n ORDER BY\n priority DESC,\n created_at ASC,\n aggregation_round DESC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n ORDER BY\n priority DESC,\n created_at ASC,\n aggregation_round DESC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -10,31 +10,36 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "l1_batch_number", "type_info": "Int8" }, { - "ordinal": 2, + "ordinal": 3, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "aggregation_round", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "sequence_number", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 6, "name": "depth", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 7, "name": "is_node_final_proof", "type_info": "Bool" } @@ -53,8 +58,9 @@ false, false, false, + false, false ] }, - "hash": "af9b41e0bd8cd9f7871fd4877a0865b2a69e8ded0425d23e16a0ed00627d5a50" + "hash": "3be009f4e1feafc5638189d4a0aced53bcf607a3bbbad01f1c8b9b2a02bf2c7a" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3e99760dac3d2eeae0e02a84c94eb98b9f3498e8da9ac4917201d921540eab1f.json b/prover/crates/lib/prover_dal/.sqlx/query-3e99760dac3d2eeae0e02a84c94eb98b9f3498e8da9ac4917201d921540eab1f.json new file mode 100644 index 000000000000..a262d4c03776 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-3e99760dac3d2eeae0e02a84c94eb98b9f3498e8da9ac4917201d921540eab1f.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id, circuit_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.circuit_id\n FROM\n prover_jobs_fri\n JOIN leaf_aggregation_witness_jobs_fri lawj\n ON\n prover_jobs_fri.l1_batch_number = lawj.l1_batch_number\n AND prover_jobs_fri.chain_id = lawj.chain_id\n AND prover_jobs_fri.circuit_id = lawj.circuit_id\n WHERE\n lawj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = 0\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.circuit_id,\n lawj.number_of_basic_circuits\n HAVING\n COUNT(*) = lawj.number_of_basic_circuits\n )\n RETURNING\n l1_batch_number,\n chain_id,\n circuit_id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "3e99760dac3d2eeae0e02a84c94eb98b9f3498e8da9ac4917201d921540eab1f" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json b/prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json deleted file mode 100644 index 5815e65636cb..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n l1_batch_number = $1\n AND status != 'successful'\n AND status != 'in_progress'\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "3ec365c5c81f4678a905ae5bbd48b87ead36f593488437c6f67da629ca81e4fa" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json b/prover/crates/lib/prover_dal/.sqlx/query-3ee4f6b6fbf2d488298b065a92cc19f63c54585e7b753f9775ba3b3a11a02f4f.json similarity index 84% rename from prover/crates/lib/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json rename to prover/crates/lib/prover_dal/.sqlx/query-3ee4f6b6fbf2d488298b065a92cc19f63c54585e7b753f9775ba3b3a11a02f4f.json index 2c87e6fae445..ab3860c78e23 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-3ee4f6b6fbf2d488298b065a92cc19f63c54585e7b753f9775ba3b3a11a02f4f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n recursion_tip_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n recursion_tip_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -67,11 +67,17 @@ "ordinal": 12, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -87,8 +93,9 @@ true, true, false, + false, false ] }, - "hash": "85a69b433c08847876bf6e7af9bc39ae8a6e053a0e03afd3fb5e02ee17157067" + "hash": "3ee4f6b6fbf2d488298b065a92cc19f63c54585e7b753f9775ba3b3a11a02f4f" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-40f8baee895cfd7c2c455d9fb4bd180f2aefdb9f491e13de36716f24c5a9841c.json b/prover/crates/lib/prover_dal/.sqlx/query-40f8baee895cfd7c2c455d9fb4bd180f2aefdb9f491e13de36716f24c5a9841c.json deleted file mode 100644 index 066c8d951710..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-40f8baee895cfd7c2c455d9fb4bd180f2aefdb9f491e13de36716f24c5a9841c.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n id IN (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n (\n status IN ('in_progress', 'in_gpu_proof')\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "status", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "attempts", - "type_info": "Int2" - }, - { - "ordinal": 3, - "name": "circuit_id", - "type_info": "Int2" - }, - { - "ordinal": 4, - "name": "error", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "picked_by", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Interval", - "Int2" - ] - }, - "nullable": [ - false, - false, - false, - false, - true, - true - ] - }, - "hash": "40f8baee895cfd7c2c455d9fb4bd180f2aefdb9f491e13de36716f24c5a9841c" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json b/prover/crates/lib/prover_dal/.sqlx/query-42e254e5f9a10b097cb62c22dbe8d82aa860dab855c80ca4a2ffd5a8dfd93e34.json similarity index 87% rename from prover/crates/lib/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json rename to prover/crates/lib/prover_dal/.sqlx/query-42e254e5f9a10b097cb62c22dbe8d82aa860dab855c80ca4a2ffd5a8dfd93e34.json index bacbbdc9bb8d..011d46367ecf 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-42e254e5f9a10b097cb62c22dbe8d82aa860dab855c80ca4a2ffd5a8dfd93e34.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n node_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n node_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -87,11 +87,17 @@ "ordinal": 16, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -111,8 +117,9 @@ true, true, false, + false, false ] }, - "hash": "94a75b05ecbab75d6ebf39cca029bfb838c787fc58d7536f9e9976e5e515431a" + "hash": "42e254e5f9a10b097cb62c22dbe8d82aa860dab855c80ca4a2ffd5a8dfd93e34" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-461ac149f80918da708217353c47271bdd7d2fb05dd622b838733429561b49e1.json b/prover/crates/lib/prover_dal/.sqlx/query-461ac149f80918da708217353c47271bdd7d2fb05dd622b838733429561b49e1.json new file mode 100644 index 000000000000..dc9cc7e59252 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-461ac149f80918da708217353c47271bdd7d2fb05dd622b838733429561b49e1.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n scheduler_witness_jobs_fri (\n l1_batch_number,\n chain_id,\n scheduler_partial_input_blob_url,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, 'waiting_for_proofs', NOW(), NOW(), $5)\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Text", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "461ac149f80918da708217353c47271bdd7d2fb05dd622b838733429561b49e1" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-48b57a279bfff34d44d1f5a6501e40978966fb2ad8b342907580dd17c0a52779.json b/prover/crates/lib/prover_dal/.sqlx/query-48b57a279bfff34d44d1f5a6501e40978966fb2ad8b342907580dd17c0a52779.json deleted file mode 100644 index 6e6a6b4ac1af..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-48b57a279bfff34d44d1f5a6501e40978966fb2ad8b342907580dd17c0a52779.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n witness_inputs_fri (\n l1_batch_number,\n witness_inputs_blob_url,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, 'queued', NOW(), NOW(), $4)\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "48b57a279bfff34d44d1f5a6501e40978966fb2ad8b342907580dd17c0a52779" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json b/prover/crates/lib/prover_dal/.sqlx/query-53556f2bc913affcd23a1a20b4081106029810403effe9f50c089745810983ac.json similarity index 63% rename from prover/crates/lib/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json rename to prover/crates/lib/prover_dal/.sqlx/query-53556f2bc913affcd23a1a20b4081106029810403effe9f50c089745810983ac.json index 2c7d7f1da5f0..151fdb288476 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-53556f2bc913affcd23a1a20b4081106029810403effe9f50c089745810983ac.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Time", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "edc61e1285bf6d3837acc67af4f15aaade450980719933089824eb8c494d64a4" + "hash": "53556f2bc913affcd23a1a20b4081106029810403effe9f50c089745810983ac" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-b6aff8e1c6130e59dc35de817d9f37d4d343e8ba8de0a5e098caf7298ef49ec1.json b/prover/crates/lib/prover_dal/.sqlx/query-537e048773806bab2a8147195621a7c24415205aaba4c555118e4a4600024f3b.json similarity index 83% rename from prover/crates/lib/prover_dal/.sqlx/query-b6aff8e1c6130e59dc35de817d9f37d4d343e8ba8de0a5e098caf7298ef49ec1.json rename to prover/crates/lib/prover_dal/.sqlx/query-537e048773806bab2a8147195621a7c24415205aaba4c555118e4a4600024f3b.json index 1a359b770118..80f168972619 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-b6aff8e1c6130e59dc35de817d9f37d4d343e8ba8de0a5e098caf7298ef49ec1.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-537e048773806bab2a8147195621a7c24415205aaba4c555118e4a4600024f3b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n recursion_tip_witness_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n recursion_tip_witness_jobs_fri.l1_batch_number,\n recursion_tip_witness_jobs_fri.number_of_final_node_jobs\n ", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n recursion_tip_witness_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n recursion_tip_witness_jobs_fri.l1_batch_number,\n recursion_tip_witness_jobs_fri.chain_id,\n recursion_tip_witness_jobs_fri.number_of_final_node_jobs\n ", "describe": { "columns": [ { @@ -10,6 +10,11 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "number_of_final_node_jobs", "type_info": "Int4" } @@ -22,9 +27,10 @@ ] }, "nullable": [ + false, false, false ] }, - "hash": "b6aff8e1c6130e59dc35de817d9f37d4d343e8ba8de0a5e098caf7298ef49ec1" + "hash": "537e048773806bab2a8147195621a7c24415205aaba4c555118e4a4600024f3b" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json b/prover/crates/lib/prover_dal/.sqlx/query-5487ba4c3230a4a55f335341c2be968a33e295abca7e0cab303f7111f2051535.json similarity index 73% rename from prover/crates/lib/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json rename to prover/crates/lib/prover_dal/.sqlx/query-5487ba4c3230a4a55f335341c2be968a33e295abca7e0cab303f7111f2051535.json index 9a6b55602d01..fea5fc1a2470 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-5487ba4c3230a4a55f335341c2be968a33e295abca7e0cab303f7111f2051535.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n ", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Time", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "8bcad2be3dd29e36ea731417b68023678f31a1b7f5ee33b643dd551c40e88329" + "hash": "5487ba4c3230a4a55f335341c2be968a33e295abca7e0cab303f7111f2051535" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5743f03acecf1a0ab106ed0aec8aea08606f3ebd73aaf280ad014789fcc21d62.json b/prover/crates/lib/prover_dal/.sqlx/query-5743f03acecf1a0ab106ed0aec8aea08606f3ebd73aaf280ad014789fcc21d62.json deleted file mode 100644 index 5c57c2bb9cf9..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-5743f03acecf1a0ab106ed0aec8aea08606f3ebd73aaf280ad014789fcc21d62.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n leaf_aggregation_witness_jobs_fri (\n l1_batch_number,\n circuit_id,\n closed_form_inputs_blob_url,\n number_of_basic_circuits,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, 'waiting_for_proofs', NOW(), NOW(), $6)\n ON CONFLICT (l1_batch_number, circuit_id) DO\n UPDATE\n SET\n updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int2", - "Text", - "Int4", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "5743f03acecf1a0ab106ed0aec8aea08606f3ebd73aaf280ad014789fcc21d62" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5ecaba6831e399a895ee0389b4df438ce6713b077fcc27befe3788c348cf47a8.json b/prover/crates/lib/prover_dal/.sqlx/query-5ecaba6831e399a895ee0389b4df438ce6713b077fcc27befe3788c348cf47a8.json new file mode 100644 index 000000000000..7b10e2b4e848 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-5ecaba6831e399a895ee0389b4df438ce6713b077fcc27befe3788c348cf47a8.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id\n FROM\n prover_jobs_fri\n JOIN\n recursion_tip_witness_jobs_fri rtwj\n ON prover_jobs_fri.l1_batch_number = rtwj.l1_batch_number\n WHERE\n rtwj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n AND prover_jobs_fri.is_node_final_proof = TRUE\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id,\n rtwj.number_of_final_node_jobs\n HAVING\n COUNT(*) = rtwj.number_of_final_node_jobs\n )\n RETURNING\n l1_batch_number,\n chain_id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int2" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "5ecaba6831e399a895ee0389b4df438ce6713b077fcc27befe3788c348cf47a8" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5f18efe2fb3a16cdf3c23379f36536b9704e8a76de95811cb23e3aa9f2512ade.json b/prover/crates/lib/prover_dal/.sqlx/query-5f18efe2fb3a16cdf3c23379f36536b9704e8a76de95811cb23e3aa9f2512ade.json deleted file mode 100644 index a9c675855baf..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-5f18efe2fb3a16cdf3c23379f36536b9704e8a76de95811cb23e3aa9f2512ade.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_batch_number\n FROM\n proof_compression_jobs_fri\n WHERE\n status <> 'successful'\n AND status <> 'sent_to_server'\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "5f18efe2fb3a16cdf3c23379f36536b9704e8a76de95811cb23e3aa9f2512ade" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5fb1d2533749420889f9cf3b5ec326383ac71c031cf2d0a9752595761f5baa7f.json b/prover/crates/lib/prover_dal/.sqlx/query-5fb1d2533749420889f9cf3b5ec326383ac71c031cf2d0a9752595761f5baa7f.json deleted file mode 100644 index 6fb992769e4c..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-5fb1d2533749420889f9cf3b5ec326383ac71c031cf2d0a9752595761f5baa7f.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n scheduler_witness_jobs_fri (\n l1_batch_number,\n scheduler_partial_input_blob_url,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, 'waiting_for_proofs', NOW(), NOW(), $4)\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "5fb1d2533749420889f9cf3b5ec326383ac71c031cf2d0a9752595761f5baa7f" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-6458bd8bbc33e3ea7026c3e465623076f287ae98c0df38a6b4092bfb73803566.json b/prover/crates/lib/prover_dal/.sqlx/query-6458bd8bbc33e3ea7026c3e465623076f287ae98c0df38a6b4092bfb73803566.json new file mode 100644 index 000000000000..8ac94a7e86d6 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-6458bd8bbc33e3ea7026c3e465623076f287ae98c0df38a6b4092bfb73803566.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND status != 'successful'\n AND status != 'in_progress'\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "6458bd8bbc33e3ea7026c3e465623076f287ae98c0df38a6b4092bfb73803566" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json b/prover/crates/lib/prover_dal/.sqlx/query-67fbf2ef2f642d16ad0b30bb72f796b2adf94a24834deef6e211f94453733b01.json similarity index 71% rename from prover/crates/lib/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json rename to prover/crates/lib/prover_dal/.sqlx/query-67fbf2ef2f642d16ad0b30bb72f796b2adf94a24834deef6e211f94453733b01.json index 95ae04bed503..b3ca3fd76a75 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-67fbf2ef2f642d16ad0b30bb72f796b2adf94a24834deef6e211f94453733b01.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n updated_at = NOW(),\n time_taken = $2,\n l1_proof_blob_url = $3\n WHERE\n l1_batch_number = $4\n ", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n updated_at = NOW(),\n time_taken = $2,\n l1_proof_blob_url = $3\n WHERE\n l1_batch_number = $4\n AND chain_id = $5\n ", "describe": { "columns": [], "parameters": { @@ -8,10 +8,11 @@ "Text", "Time", "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "c706a49ff54f6b424e24d061fe7ac429aac3c030f7e226a1264243d8cdae038d" + "hash": "67fbf2ef2f642d16ad0b30bb72f796b2adf94a24834deef6e211f94453733b01" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json b/prover/crates/lib/prover_dal/.sqlx/query-69995ea9aa62d8a40790544bb4ecfaabc7a6fa24f28bbeda0a3ff0f56c5ede88.json similarity index 67% rename from prover/crates/lib/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json rename to prover/crates/lib/prover_dal/.sqlx/query-69995ea9aa62d8a40790544bb4ecfaabc7a6fa24f28bbeda0a3ff0f56c5ede88.json index 9ad3099d7765..d59661ede525 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-69995ea9aa62d8a40790544bb4ecfaabc7a6fa24f28bbeda0a3ff0f56c5ede88.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n attempts\n FROM\n prover_jobs_fri\n WHERE\n id = $1\n ", + "query": "\n SELECT\n attempts\n FROM\n prover_jobs_fri\n WHERE\n id = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -11,12 +11,13 @@ ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ false ] }, - "hash": "df00e33809768120e395d8f740770a4e629b2a1cde641e74e4e55bb100df809f" + "hash": "69995ea9aa62d8a40790544bb4ecfaabc7a6fa24f28bbeda0a3ff0f56c5ede88" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-1f4179bf130e570d1c2dd8349bfc63c68aacff76d4484ca91636b03c2d37feb4.json b/prover/crates/lib/prover_dal/.sqlx/query-6b0d05bfe7159e139d781aa91bdcc03167ecf857a3156376a96e7191f6a7317d.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-1f4179bf130e570d1c2dd8349bfc63c68aacff76d4484ca91636b03c2d37feb4.json rename to prover/crates/lib/prover_dal/.sqlx/query-6b0d05bfe7159e139d781aa91bdcc03167ecf857a3156376a96e7191f6a7317d.json index 8c617a26a261..3e8e7aaa485a 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-1f4179bf130e570d1c2dd8349bfc63c68aacff76d4484ca91636b03c2d37feb4.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-6b0d05bfe7159e139d781aa91bdcc03167ecf857a3156376a96e7191f6a7317d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", + "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n id,\n chain_id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,26 +10,31 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "error", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 6, "name": "picked_by", "type_info": "Text" } @@ -45,9 +50,10 @@ false, false, false, + false, true, true ] }, - "hash": "1f4179bf130e570d1c2dd8349bfc63c68aacff76d4484ca91636b03c2d37feb4" + "hash": "6b0d05bfe7159e139d781aa91bdcc03167ecf857a3156376a96e7191f6a7317d" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-6fa101609da545860501ac32a5b5457269ca22767d7b44cbc213c2adf86c991c.json b/prover/crates/lib/prover_dal/.sqlx/query-6fa101609da545860501ac32a5b5457269ca22767d7b44cbc213c2adf86c991c.json new file mode 100644 index 000000000000..9af9cb2a7be0 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-6fa101609da545860501ac32a5b5457269ca22767d7b44cbc213c2adf86c991c.json @@ -0,0 +1,38 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id, circuit_id, depth) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.depth\n FROM\n prover_jobs_fri\n JOIN node_aggregation_witness_jobs_fri nawj\n ON\n prover_jobs_fri.l1_batch_number = nawj.l1_batch_number\n AND prover_jobs_fri.chain_id = nawj.chain_id\n AND prover_jobs_fri.circuit_id = nawj.circuit_id\n AND prover_jobs_fri.depth = nawj.depth\n WHERE\n nawj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = 1\n AND prover_jobs_fri.depth = 0\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.depth,\n nawj.number_of_dependent_jobs\n HAVING\n COUNT(*) = nawj.number_of_dependent_jobs\n )\n RETURNING\n l1_batch_number,\n chain_id,\n circuit_id,\n depth;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "depth", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "6fa101609da545860501ac32a5b5457269ca22767d7b44cbc213c2adf86c991c" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-70910da49648c5b713f1b719c71d9f54db9bf83127458d632f0cad44b075f8bb.json b/prover/crates/lib/prover_dal/.sqlx/query-70910da49648c5b713f1b719c71d9f54db9bf83127458d632f0cad44b075f8bb.json new file mode 100644 index 000000000000..74213c577e6a --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-70910da49648c5b713f1b719c71d9f54db9bf83127458d632f0cad44b075f8bb.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n proof_compression_jobs_fri (\n l1_batch_number,\n chain_id,\n fri_proof_blob_url,\n status,\n created_at,\n updated_at,\n protocol_version,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, NOW(), NOW(), $5, $6)\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Text", + "Text", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "70910da49648c5b713f1b719c71d9f54db9bf83127458d632f0cad44b075f8bb" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json b/prover/crates/lib/prover_dal/.sqlx/query-70cf63542465ca962c87e0050dc7d78cf0e31ba31d9f05658a903edb99317297.json similarity index 85% rename from prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json rename to prover/crates/lib/prover_dal/.sqlx/query-70cf63542465ca962c87e0050dc7d78cf0e31ba31d9f05658a903edb99317297.json index 832f0ce269c1..2b556f9bb055 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-70cf63542465ca962c87e0050dc7d78cf0e31ba31d9f05658a903edb99317297.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -67,11 +67,17 @@ "ordinal": 12, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -87,8 +93,9 @@ true, false, true, + false, false ] }, - "hash": "e0a6cc885e437aa7ded9def71f3e118cabc67b6e507efefb7b69e102f1b43c58" + "hash": "70cf63542465ca962c87e0050dc7d78cf0e31ba31d9f05658a903edb99317297" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-7238cfe04ba59967fe5589665ad2c0214469edff6fc74965f1ec1366f8b46b8e.json b/prover/crates/lib/prover_dal/.sqlx/query-7238cfe04ba59967fe5589665ad2c0214469edff6fc74965f1ec1366f8b46b8e.json deleted file mode 100644 index 227ff7dbe263..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-7238cfe04ba59967fe5589665ad2c0214469edff6fc74965f1ec1366f8b46b8e.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n node_aggregation_witness_jobs_fri (\n l1_batch_number,\n circuit_id,\n depth,\n aggregations_url,\n number_of_dependent_jobs,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', NOW(), NOW(), $7)\n ON CONFLICT (l1_batch_number, circuit_id, depth) DO\n UPDATE\n SET\n updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int2", - "Int4", - "Text", - "Int4", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "7238cfe04ba59967fe5589665ad2c0214469edff6fc74965f1ec1366f8b46b8e" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-749d18c0fdae16ce0ed4e3c23e543d01cff938d3aed44c9bb1d864bfdf69e39a.json b/prover/crates/lib/prover_dal/.sqlx/query-749d18c0fdae16ce0ed4e3c23e543d01cff938d3aed44c9bb1d864bfdf69e39a.json deleted file mode 100644 index d01c5481fa6d..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-749d18c0fdae16ce0ed4e3c23e543d01cff938d3aed44c9bb1d864bfdf69e39a.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n prover_jobs_fri (\n l1_batch_number,\n circuit_id,\n circuit_blob_url,\n aggregation_round,\n sequence_number,\n depth,\n is_node_final_proof,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, 'queued', NOW(), NOW(), $9)\n ON CONFLICT (\n l1_batch_number, aggregation_round, circuit_id, depth, sequence_number\n ) DO\n UPDATE\n SET\n updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int2", - "Text", - "Int2", - "Int4", - "Int4", - "Bool", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "749d18c0fdae16ce0ed4e3c23e543d01cff938d3aed44c9bb1d864bfdf69e39a" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-7b844346d8089335d22640a5fa25e61abe5a1c38736dbfb8107d59c9c00d1ab8.json b/prover/crates/lib/prover_dal/.sqlx/query-7b844346d8089335d22640a5fa25e61abe5a1c38736dbfb8107d59c9c00d1ab8.json new file mode 100644 index 000000000000..2ad1bf6e33af --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-7b844346d8089335d22640a5fa25e61abe5a1c38736dbfb8107d59c9c00d1ab8.json @@ -0,0 +1,21 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n node_aggregation_witness_jobs_fri (\n l1_batch_number,\n chain_id,\n circuit_id,\n depth,\n aggregations_url,\n number_of_dependent_jobs,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, 'waiting_for_proofs', NOW(), NOW(), $8)\n ON CONFLICT (l1_batch_number, chain_id, circuit_id, depth) DO\n UPDATE\n SET\n updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int2", + "Int4", + "Text", + "Int4", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "7b844346d8089335d22640a5fa25e61abe5a1c38736dbfb8107d59c9c00d1ab8" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json b/prover/crates/lib/prover_dal/.sqlx/query-7c2f89d77f85ecfffe90ffdb74d3ebb2fcf32e99c140c8b929d98437272b8967.json similarity index 75% rename from prover/crates/lib/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json rename to prover/crates/lib/prover_dal/.sqlx/query-7c2f89d77f85ecfffe90ffdb74d3ebb2fcf32e99c140c8b929d98437272b8967.json index fdbe0f989850..8c1904647230 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-7c2f89d77f85ecfffe90ffdb74d3ebb2fcf32e99c140c8b929d98437272b8967.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n protocol_version,\n protocol_version_patch\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n protocol_version,\n protocol_version_patch\n FROM\n witness_inputs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -16,7 +16,8 @@ ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -24,5 +25,5 @@ false ] }, - "hash": "3902f6a8e09cd5ad560d23fe0269fd5b3d210a117bb0027d58c6cb4debd63f33" + "hash": "7c2f89d77f85ecfffe90ffdb74d3ebb2fcf32e99c140c8b929d98437272b8967" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json b/prover/crates/lib/prover_dal/.sqlx/query-821f308ebb20c978ce4cb210e8f8a3a4efb59242fe8bd9856bc5e374a6ea5713.json similarity index 64% rename from prover/crates/lib/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json rename to prover/crates/lib/prover_dal/.sqlx/query-821f308ebb20c978ce4cb210e8f8a3a4efb59242fe8bd9856bc5e374a6ea5713.json index b076553ff34e..53b6e054c108 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-821f308ebb20c978ce4cb210e8f8a3a4efb59242fe8bd9856bc5e374a6ea5713.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n circuit_id,\n id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND is_node_final_proof = TRUE\n AND status = 'successful'\n ORDER BY\n circuit_id ASC\n ", + "query": "\n SELECT\n circuit_id,\n id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND is_node_final_proof = TRUE\n AND status = 'successful'\n ORDER BY\n circuit_id ASC\n ", "describe": { "columns": [ { @@ -16,7 +16,8 @@ ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -24,5 +25,5 @@ false ] }, - "hash": "02f2010c60dfa5b93d3f2ee7594579b23540815afa1c6a8d4c36bba951861fe7" + "hash": "821f308ebb20c978ce4cb210e8f8a3a4efb59242fe8bd9856bc5e374a6ea5713" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-850b7c3a275800f3332e4c7bc4a55b04b640b65f90df56c67acb669552d1f9d7.json b/prover/crates/lib/prover_dal/.sqlx/query-850b7c3a275800f3332e4c7bc4a55b04b640b65f90df56c67acb669552d1f9d7.json new file mode 100644 index 000000000000..3fad77b76cd3 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-850b7c3a275800f3332e4c7bc4a55b04b640b65f90df56c67acb669552d1f9d7.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n chain_id,\n status,\n protocol_version,\n protocol_version_patch\n FROM\n proof_compression_jobs_fri\n WHERE\n (l1_batch_number, chain_id) = (\n SELECT\n MIN(l1_batch_number),\n chain_id\n FROM\n proof_compression_jobs_fri\n WHERE\n status = $1\n OR status = $2\n GROUP BY chain_id\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "protocol_version_patch", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + false + ] + }, + "hash": "850b7c3a275800f3332e4c7bc4a55b04b640b65f90df56c67acb669552d1f9d7" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-edd8ebb555f2e4f3ad95421e1770a6058790361f2ddb57e263582378d956d00d.json b/prover/crates/lib/prover_dal/.sqlx/query-8b4943d619bcac5cdbd5cfaa5eab83680276a82b0dd6fd4608ebf06b2c3c5628.json similarity index 61% rename from prover/crates/lib/prover_dal/.sqlx/query-edd8ebb555f2e4f3ad95421e1770a6058790361f2ddb57e263582378d956d00d.json rename to prover/crates/lib/prover_dal/.sqlx/query-8b4943d619bcac5cdbd5cfaa5eab83680276a82b0dd6fd4608ebf06b2c3c5628.json index 4e5574f4640d..e19ad5c3cab0 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-edd8ebb555f2e4f3ad95421e1770a6058790361f2ddb57e263582378d956d00d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-8b4943d619bcac5cdbd5cfaa5eab83680276a82b0dd6fd4608ebf06b2c3c5628.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND attempts >= $2\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND attempts >= $3\n AND (\n status = 'in_progress'\n OR status = 'failed'\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -32,6 +37,7 @@ "parameters": { "Left": [ "Int8", + "Int4", "Int2" ] }, @@ -39,9 +45,10 @@ false, false, false, + false, true, true ] }, - "hash": "edd8ebb555f2e4f3ad95421e1770a6058790361f2ddb57e263582378d956d00d" + "hash": "8b4943d619bcac5cdbd5cfaa5eab83680276a82b0dd6fd4608ebf06b2c3c5628" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-8c5aba6ce584c1671f2d65fb47701426d60c56b526af5ed2e0c602f881c3bbf0.json b/prover/crates/lib/prover_dal/.sqlx/query-8c5aba6ce584c1671f2d65fb47701426d60c56b526af5ed2e0c602f881c3bbf0.json deleted file mode 100644 index fc0594f64436..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-8c5aba6ce584c1671f2d65fb47701426d60c56b526af5ed2e0c602f881c3bbf0.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n l1_batch_number IN (\n SELECT\n prover_jobs_fri.l1_batch_number\n FROM\n prover_jobs_fri\n JOIN\n scheduler_witness_jobs_fri swj\n ON prover_jobs_fri.l1_batch_number = swj.l1_batch_number\n WHERE\n swj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n )\n RETURNING\n l1_batch_number;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int2" - ] - }, - "nullable": [ - false - ] - }, - "hash": "8c5aba6ce584c1671f2d65fb47701426d60c56b526af5ed2e0c602f881c3bbf0" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-8ca1700b100ab1bcac58f3b0118564b62decfefc5a15de6e9145e43b5a0c1edc.json b/prover/crates/lib/prover_dal/.sqlx/query-8ca1700b100ab1bcac58f3b0118564b62decfefc5a15de6e9145e43b5a0c1edc.json new file mode 100644 index 000000000000..0b6aff47cca3 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-8ca1700b100ab1bcac58f3b0118564b62decfefc5a15de6e9145e43b5a0c1edc.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n prover_jobs_fri (\n l1_batch_number,\n chain_id,\n circuit_id,\n circuit_blob_url,\n aggregation_round,\n sequence_number,\n depth,\n is_node_final_proof,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, 'queued', NOW(), NOW(), $10)\n ON CONFLICT (\n l1_batch_number, aggregation_round, circuit_id, depth, sequence_number\n ) DO\n UPDATE\n SET\n updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int2", + "Text", + "Int2", + "Int4", + "Int4", + "Bool", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "8ca1700b100ab1bcac58f3b0118564b62decfefc5a15de6e9145e43b5a0c1edc" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json b/prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json deleted file mode 100644 index 76a2a54a6a1d..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_batch_number,\n status,\n protocol_version,\n protocol_version_patch\n FROM\n proof_compression_jobs_fri\n WHERE\n l1_batch_number = (\n SELECT\n MIN(l1_batch_number)\n FROM\n proof_compression_jobs_fri\n WHERE\n status = $1\n OR status = $2\n )\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "status", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "protocol_version", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "protocol_version_patch", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text", - "Text" - ] - }, - "nullable": [ - false, - false, - true, - false - ] - }, - "hash": "8ffb62f6a17c68af701e790044989daacb88fe5aaf368c5f81a885821522b99c" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-9a0b335e830ac2bc6039e7be656ad31283a47b013262ab4b6e00e58e247de00d.json b/prover/crates/lib/prover_dal/.sqlx/query-9a0b335e830ac2bc6039e7be656ad31283a47b013262ab4b6e00e58e247de00d.json new file mode 100644 index 000000000000..67f5ffd0ddbd --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-9a0b335e830ac2bc6039e7be656ad31283a47b013262ab4b6e00e58e247de00d.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n chain_id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND circuit_id = $2\n AND aggregation_round = $3\n AND depth = $4\n AND chain_id = $5\n AND status = 'successful'\n ORDER BY\n sequence_number ASC;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int2", + "Int2", + "Int4", + "Int4" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "9a0b335e830ac2bc6039e7be656ad31283a47b013262ab4b6e00e58e247de00d" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-aca775bc700169e31fee6eb7c4869b6fb812f6d43a91fb8ac68c9fedb17d1a50.json b/prover/crates/lib/prover_dal/.sqlx/query-9d9d6edbbef1c4eadc7a6abcf2a7369393e0b1744099a92ffee5a02ec4549c81.json similarity index 81% rename from prover/crates/lib/prover_dal/.sqlx/query-aca775bc700169e31fee6eb7c4869b6fb812f6d43a91fb8ac68c9fedb17d1a50.json rename to prover/crates/lib/prover_dal/.sqlx/query-9d9d6edbbef1c4eadc7a6abcf2a7369393e0b1744099a92ffee5a02ec4549c81.json index 955745df781c..b6e555b0e037 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-aca775bc700169e31fee6eb7c4869b6fb812f6d43a91fb8ac68c9fedb17d1a50.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-9d9d6edbbef1c4eadc7a6abcf2a7369393e0b1744099a92ffee5a02ec4549c81.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -39,9 +44,10 @@ false, false, false, + false, true, true ] }, - "hash": "aca775bc700169e31fee6eb7c4869b6fb812f6d43a91fb8ac68c9fedb17d1a50" + "hash": "9d9d6edbbef1c4eadc7a6abcf2a7369393e0b1744099a92ffee5a02ec4549c81" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json b/prover/crates/lib/prover_dal/.sqlx/query-9da8ce97366d62ae7381a2b4eb98ff2fa2c04525595a8adf5acd4da5ea4d5776.json similarity index 86% rename from prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json rename to prover/crates/lib/prover_dal/.sqlx/query-9da8ce97366d62ae7381a2b4eb98ff2fa2c04525595a8adf5acd4da5ea4d5776.json index 5cffd240b43c..dff1b5bded71 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-9da8ce97366d62ae7381a2b4eb98ff2fa2c04525595a8adf5acd4da5ea4d5776.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n leaf_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n ", + "query": "\n SELECT\n *\n FROM\n leaf_aggregation_witness_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", "describe": { "columns": [ { @@ -82,11 +82,17 @@ "ordinal": 15, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -105,8 +111,9 @@ true, true, false, + false, false ] }, - "hash": "21621153e545859d71188e2421f5d2832571464e74b5fed92cf54617573c84ec" + "hash": "9da8ce97366d62ae7381a2b4eb98ff2fa2c04525595a8adf5acd4da5ea4d5776" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json b/prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json new file mode 100644 index 000000000000..3141b1edeb7f --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n comp.l1_batch_number,\n comp.chain_id,\n (comp.updated_at - wit.created_at) AS time_taken,\n wit.created_at\n FROM\n proof_compression_jobs_fri AS comp\n JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number AND comp.chain_id = wit.chain_id\n WHERE\n wit.created_at > $1\n ORDER BY\n time_taken DESC;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "time_taken", + "type_info": "Interval" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Timestamp" + ] + }, + "nullable": [ + false, + false, + null, + false + ] + }, + "hash": "a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json b/prover/crates/lib/prover_dal/.sqlx/query-a388d34c454c4b7645c6e5bf07026b137d7f48a8278a5a5c5568a0ca22df97c0.json similarity index 62% rename from prover/crates/lib/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json rename to prover/crates/lib/prover_dal/.sqlx/query-a388d34c454c4b7645c6e5bf07026b137d7f48a8278a5a5c5568a0ca22df97c0.json index d9f7527dfa00..7e01b32d8c1f 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-a388d34c454c4b7645c6e5bf07026b137d7f48a8278a5a5c5568a0ca22df97c0.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n id = $2\n ", + "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n id = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Time", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "db3e74f0e83ffbf84a6d61e560f2060fbea775dc185f639139fbfd23e4d5f3c6" + "hash": "a388d34c454c4b7645c6e5bf07026b137d7f48a8278a5a5c5568a0ca22df97c0" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a5ee1c40af6ce4fc9962757b771065dd5ab428bf30708f6f5ebd83a85a45dfcc.json b/prover/crates/lib/prover_dal/.sqlx/query-a5ee1c40af6ce4fc9962757b771065dd5ab428bf30708f6f5ebd83a85a45dfcc.json new file mode 100644 index 000000000000..d59300eabf4e --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-a5ee1c40af6ce4fc9962757b771065dd5ab428bf30708f6f5ebd83a85a45dfcc.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "a5ee1c40af6ce4fc9962757b771065dd5ab428bf30708f6f5ebd83a85a45dfcc" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a6eb7a1f1aa2f6f5d90fbe3b8c777313681560f538c6524c54648d6308533744.json b/prover/crates/lib/prover_dal/.sqlx/query-a6eb7a1f1aa2f6f5d90fbe3b8c777313681560f538c6524c54648d6308533744.json deleted file mode 100644 index 319b00b4c61d..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-a6eb7a1f1aa2f6f5d90fbe3b8c777313681560f538c6524c54648d6308533744.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, circuit_id, depth) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.depth\n FROM\n prover_jobs_fri\n JOIN node_aggregation_witness_jobs_fri nawj\n ON\n prover_jobs_fri.l1_batch_number = nawj.l1_batch_number\n AND prover_jobs_fri.circuit_id = nawj.circuit_id\n AND prover_jobs_fri.depth = nawj.depth\n WHERE\n nawj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = 1\n AND prover_jobs_fri.depth = 0\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.depth,\n nawj.number_of_dependent_jobs\n HAVING\n COUNT(*) = nawj.number_of_dependent_jobs\n )\n RETURNING\n l1_batch_number,\n circuit_id,\n depth;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "circuit_id", - "type_info": "Int2" - }, - { - "ordinal": 2, - "name": "depth", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false - ] - }, - "hash": "a6eb7a1f1aa2f6f5d90fbe3b8c777313681560f538c6524c54648d6308533744" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-4f7e7de116b1f1b93cb2300c7ec4abe94d783d252c497d848ba2da3b798add19.json b/prover/crates/lib/prover_dal/.sqlx/query-a98cdf6d25e6deb679cad15807c87f57a667a685b7331f0355cadb204674d303.json similarity index 66% rename from prover/crates/lib/prover_dal/.sqlx/query-4f7e7de116b1f1b93cb2300c7ec4abe94d783d252c497d848ba2da3b798add19.json rename to prover/crates/lib/prover_dal/.sqlx/query-a98cdf6d25e6deb679cad15807c87f57a667a685b7331f0355cadb204674d303.json index 04b28420cbf0..2e93b3ab2a59 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-4f7e7de116b1f1b93cb2300c7ec4abe94d783d252c497d848ba2da3b798add19.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-a98cdf6d25e6deb679cad15807c87f57a667a685b7331f0355cadb204674d303.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $2\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n scheduler_witness_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $3\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n scheduler_witness_jobs_fri.*\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $2\n WHERE\n (l1_batch_number, chain_id) = (\n SELECT\n l1_batch_number,\n chain_id\n FROM\n scheduler_witness_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $3\n ORDER BY\n priority DESC,\n created_at ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n scheduler_witness_jobs_fri.*\n ", "describe": { "columns": [ { @@ -67,6 +67,11 @@ "ordinal": 12, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { @@ -89,8 +94,9 @@ true, true, false, + false, false ] }, - "hash": "4f7e7de116b1f1b93cb2300c7ec4abe94d783d252c497d848ba2da3b798add19" + "hash": "a98cdf6d25e6deb679cad15807c87f57a667a685b7331f0355cadb204674d303" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json b/prover/crates/lib/prover_dal/.sqlx/query-ac1c23dc72b6a6490ecb03a0da87eebc897025512da823469c2feabde43d1c53.json similarity index 73% rename from prover/crates/lib/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json rename to prover/crates/lib/prover_dal/.sqlx/query-ac1c23dc72b6a6490ecb03a0da87eebc897025512da823469c2feabde43d1c53.json index 49a533897ce3..f80e881bc928 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-ac1c23dc72b6a6490ecb03a0da87eebc897025512da823469c2feabde43d1c53.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n l1_batch_number = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Time", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "00b88ec7fcf40bb18e0018b7c76f6e1df560ab1e8935564355236e90b6147d2f" + "hash": "ac1c23dc72b6a6490ecb03a0da87eebc897025512da823469c2feabde43d1c53" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json b/prover/crates/lib/prover_dal/.sqlx/query-accb1c0e9f765d4d63eb84dc1058c746fab1aea15f23f8e5e4971ae67944de29.json similarity index 77% rename from prover/crates/lib/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json rename to prover/crates/lib/prover_dal/.sqlx/query-accb1c0e9f765d4d63eb84dc1058c746fab1aea15f23f8e5e4971ae67944de29.json index 73a8c33695b1..58d96cd495a9 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-accb1c0e9f765d4d63eb84dc1058c746fab1aea15f23f8e5e4971ae67944de29.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n aggregations_url = $1,\n number_of_dependent_jobs = $5,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n AND circuit_id = $3\n AND depth = $4\n ", + "query": "\n UPDATE node_aggregation_witness_jobs_fri\n SET\n aggregations_url = $1,\n number_of_dependent_jobs = $5,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n AND circuit_id = $3\n AND depth = $4\n AND chain_id = $6\n ", "describe": { "columns": [], "parameters": { @@ -9,10 +9,11 @@ "Int8", "Int2", "Int4", + "Int4", "Int4" ] }, "nullable": [] }, - "hash": "7a2145e2234a7896031bbc1ce82715e903f3b399886c2c73e838bd924fed6776" + "hash": "accb1c0e9f765d4d63eb84dc1058c746fab1aea15f23f8e5e4971ae67944de29" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c6d02dc9cb9908a57c79729c759b9314a2ce0180c20126ad22ddaa20c7c32c2c.json b/prover/crates/lib/prover_dal/.sqlx/query-c6d02dc9cb9908a57c79729c759b9314a2ce0180c20126ad22ddaa20c7c32c2c.json deleted file mode 100644 index 8462680ad82d..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-c6d02dc9cb9908a57c79729c759b9314a2ce0180c20126ad22ddaa20c7c32c2c.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n comp.l1_batch_number,\n (comp.updated_at - wit.created_at) AS time_taken,\n wit.created_at\n FROM\n proof_compression_jobs_fri AS comp\n JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number\n WHERE\n wit.created_at > $1\n ORDER BY\n time_taken DESC;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "time_taken", - "type_info": "Interval" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Timestamp" - } - ], - "parameters": { - "Left": [ - "Timestamp" - ] - }, - "nullable": [ - false, - null, - false - ] - }, - "hash": "c6d02dc9cb9908a57c79729c759b9314a2ce0180c20126ad22ddaa20c7c32c2c" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json b/prover/crates/lib/prover_dal/.sqlx/query-c73801e578ad07a7c848f9f05d6c7d3114d651a561bc702cec11afb8515fa6c4.json similarity index 67% rename from prover/crates/lib/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json rename to prover/crates/lib/prover_dal/.sqlx/query-c73801e578ad07a7c848f9f05d6c7d3114d651a561bc702cec11afb8515fa6c4.json index a85b4895b451..b7acc231c221 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-c73801e578ad07a7c848f9f05d6c7d3114d651a561bc702cec11afb8515fa6c4.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n ", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "534822a226068cde83ad8c30b569a8f447824a5ab466bb6eea1710e8aeaa2c56" + "hash": "c73801e578ad07a7c848f9f05d6c7d3114d651a561bc702cec11afb8515fa6c4" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-cb453f0677b92539747e175e796599bf65cbf2fd7c27a2dcad43e41a0f43cba0.json b/prover/crates/lib/prover_dal/.sqlx/query-cb453f0677b92539747e175e796599bf65cbf2fd7c27a2dcad43e41a0f43cba0.json deleted file mode 100644 index 5c5a9e288b2c..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-cb453f0677b92539747e175e796599bf65cbf2fd7c27a2dcad43e41a0f43cba0.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, circuit_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id\n FROM\n prover_jobs_fri\n JOIN leaf_aggregation_witness_jobs_fri lawj\n ON\n prover_jobs_fri.l1_batch_number = lawj.l1_batch_number\n AND prover_jobs_fri.circuit_id = lawj.circuit_id\n WHERE\n lawj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = 0\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n lawj.number_of_basic_circuits\n HAVING\n COUNT(*) = lawj.number_of_basic_circuits\n )\n RETURNING\n l1_batch_number,\n circuit_id;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "circuit_id", - "type_info": "Int2" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false - ] - }, - "hash": "cb453f0677b92539747e175e796599bf65cbf2fd7c27a2dcad43e41a0f43cba0" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-d376b159d30c1ce73c50f78b79c09f39d096db7e00fd2be9bb1cd64a392d38a3.json b/prover/crates/lib/prover_dal/.sqlx/query-d376b159d30c1ce73c50f78b79c09f39d096db7e00fd2be9bb1cd64a392d38a3.json index 90df498e072b..8ae98ea6b2f9 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-d376b159d30c1ce73c50f78b79c09f39d096db7e00fd2be9bb1cd64a392d38a3.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-d376b159d30c1ce73c50f78b79c09f39d096db7e00fd2be9bb1cd64a392d38a3.json @@ -87,6 +87,11 @@ "ordinal": 16, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 17, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { @@ -113,6 +118,7 @@ true, true, false, + false, false ] }, diff --git a/prover/crates/lib/prover_dal/.sqlx/query-33f99b7ae36cea4676163f99a39980377c082766efd98569904632504db05095.json b/prover/crates/lib/prover_dal/.sqlx/query-d42583c5e2921bf43dbbdbf0a93d1623b2141aef23292ca75a998b3a306a0eff.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-33f99b7ae36cea4676163f99a39980377c082766efd98569904632504db05095.json rename to prover/crates/lib/prover_dal/.sqlx/query-d42583c5e2921bf43dbbdbf0a93d1623b2141aef23292ca75a998b3a306a0eff.json index 4f7ca83a9570..32f5c6d5c700 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-33f99b7ae36cea4676163f99a39980377c082766efd98569904632504db05095.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-d42583c5e2921bf43dbbdbf0a93d1623b2141aef23292ca75a998b3a306a0eff.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -39,9 +44,10 @@ false, false, false, + false, true, true ] }, - "hash": "33f99b7ae36cea4676163f99a39980377c082766efd98569904632504db05095" + "hash": "d42583c5e2921bf43dbbdbf0a93d1623b2141aef23292ca75a998b3a306a0eff" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json b/prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json new file mode 100644 index 000000000000..abb7a6a618e3 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id\n FROM\n prover_jobs_fri\n JOIN\n scheduler_witness_jobs_fri swj\n ON prover_jobs_fri.l1_batch_number = swj.l1_batch_number\n AND prover_jobs_fri.chain_id = swj.chain_id\n WHERE\n swj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n )\n RETURNING\n l1_batch_number,\n chain_id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int2" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-d72c7beede026491202626a897e3aaee2b3708f40799a7aeab8e143b0b534c4f.json b/prover/crates/lib/prover_dal/.sqlx/query-d72c7beede026491202626a897e3aaee2b3708f40799a7aeab8e143b0b534c4f.json deleted file mode 100644 index 519642458920..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-d72c7beede026491202626a897e3aaee2b3708f40799a7aeab8e143b0b534c4f.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n recursion_tip_witness_jobs_fri (\n l1_batch_number,\n status,\n number_of_final_node_jobs,\n protocol_version,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, 'waiting_for_proofs', $2, $3, NOW(), NOW(), $4)\n ON CONFLICT (l1_batch_number) DO\n UPDATE\n SET\n updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int4", - "Int4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "d72c7beede026491202626a897e3aaee2b3708f40799a7aeab8e143b0b534c4f" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json b/prover/crates/lib/prover_dal/.sqlx/query-d88fde0e42305ff648c718c742fb356c541d65e0c84a4d82f86bbf000c21c9e8.json similarity index 50% rename from prover/crates/lib/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json rename to prover/crates/lib/prover_dal/.sqlx/query-d88fde0e42305ff648c718c742fb356c541d65e0c84a4d82f86bbf000c21c9e8.json index db810604cd88..da8c50676c13 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-d88fde0e42305ff648c718c742fb356c541d65e0c84a4d82f86bbf000c21c9e8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND circuit_id = $2\n AND aggregation_round = $3\n AND depth = $4\n AND status = 'successful'\n ORDER BY\n sequence_number ASC;\n ", + "query": "\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND chain_id = $2\n AND status = 'successful'\n AND aggregation_round = $3\n ", "describe": { "columns": [ { @@ -12,14 +12,13 @@ "parameters": { "Left": [ "Int8", - "Int2", - "Int2", - "Int4" + "Int4", + "Int2" ] }, "nullable": [ false ] }, - "hash": "2b626262c8003817ee02978f77452554ccfb5b83f00efdc12bed0f60ef439785" + "hash": "d88fde0e42305ff648c718c742fb356c541d65e0c84a4d82f86bbf000c21c9e8" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json b/prover/crates/lib/prover_dal/.sqlx/query-dbb28977b65a0496fdcebd5e0ad00e284b5d2c0c50f2e4f0f2fc692bd715e040.json similarity index 90% rename from prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json rename to prover/crates/lib/prover_dal/.sqlx/query-dbb28977b65a0496fdcebd5e0ad00e284b5d2c0c50f2e4f0f2fc692bd715e040.json index 18d3f692bd45..61ea4bad0862 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-dbb28977b65a0496fdcebd5e0ad00e284b5d2c0c50f2e4f0f2fc692bd715e040.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND aggregation_round = $2\n ", + "query": "\n SELECT\n *\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND aggregation_round = $2\n AND chain_id = $3\n ", "describe": { "columns": [ { @@ -102,12 +102,18 @@ "ordinal": 19, "name": "priority", "type_info": "Int4" + }, + { + "ordinal": 20, + "name": "chain_id", + "type_info": "Int4" } ], "parameters": { "Left": [ "Int8", - "Int2" + "Int2", + "Int4" ] }, "nullable": [ @@ -130,8 +136,9 @@ true, true, false, + false, false ] }, - "hash": "c2c140d136df5303d7b3a66ccd0d34a5baece02812f8c950fc84d37eeebd33a4" + "hash": "dbb28977b65a0496fdcebd5e0ad00e284b5d2c0c50f2e4f0f2fc692bd715e040" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-5dd5a6ad9adb97a2c2fe8cec66682b12e9e81d7cc188369025b0d209b526b327.json b/prover/crates/lib/prover_dal/.sqlx/query-dfabe5ecdd9217abb7e75e400c3f4be22aa5d51b753e0fb261ad8fe612f60981.json similarity index 83% rename from prover/crates/lib/prover_dal/.sqlx/query-5dd5a6ad9adb97a2c2fe8cec66682b12e9e81d7cc188369025b0d209b526b327.json rename to prover/crates/lib/prover_dal/.sqlx/query-dfabe5ecdd9217abb7e75e400c3f4be22aa5d51b753e0fb261ad8fe612f60981.json index 49b17ea5b04a..5c919e1f5da1 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-5dd5a6ad9adb97a2c2fe8cec66682b12e9e81d7cc188369025b0d209b526b327.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-dfabe5ecdd9217abb7e75e400c3f4be22aa5d51b753e0fb261ad8fe612f60981.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n processing_started_at = NOW(),\n updated_at = NOW(),\n picked_by = $5\n WHERE\n id = (\n SELECT\n pj.id\n FROM\n (\n SELECT\n *\n FROM\n UNNEST($1::SMALLINT [], $2::SMALLINT [])\n ) AS tuple (circuit_id, round)\n JOIN LATERAL (\n SELECT\n *\n FROM\n prover_jobs_fri AS pj\n WHERE\n pj.status = 'queued'\n AND pj.protocol_version = $3\n AND pj.protocol_version_patch = $4\n AND pj.circuit_id = tuple.circuit_id\n AND pj.aggregation_round = tuple.round\n ORDER BY\n pj.priority DESC,\n pj.created_at ASC\n LIMIT\n 1\n ) AS pj ON TRUE\n ORDER BY\n pj.priority DESC,\n pj.created_at ASC,\n pj.aggregation_round DESC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n processing_started_at = NOW(),\n updated_at = NOW(),\n picked_by = $5\n WHERE\n id = (\n SELECT\n pj.id\n FROM\n (\n SELECT\n *\n FROM\n UNNEST($1::SMALLINT [], $2::SMALLINT [])\n ) AS tuple (circuit_id, round)\n JOIN LATERAL (\n SELECT\n *\n FROM\n prover_jobs_fri AS pj\n WHERE\n pj.status = 'queued'\n AND pj.protocol_version = $3\n AND pj.protocol_version_patch = $4\n AND pj.circuit_id = tuple.circuit_id\n AND pj.aggregation_round = tuple.round\n ORDER BY\n pj.priority DESC,\n pj.created_at ASC\n LIMIT\n 1\n ) AS pj ON TRUE\n ORDER BY\n pj.priority DESC,\n pj.created_at ASC,\n pj.aggregation_round DESC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -10,31 +10,36 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "l1_batch_number", "type_info": "Int8" }, { - "ordinal": 2, + "ordinal": 3, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "aggregation_round", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "sequence_number", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 6, "name": "depth", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 7, "name": "is_node_final_proof", "type_info": "Bool" } @@ -55,8 +60,9 @@ false, false, false, + false, false ] }, - "hash": "5dd5a6ad9adb97a2c2fe8cec66682b12e9e81d7cc188369025b0d209b526b327" + "hash": "dfabe5ecdd9217abb7e75e400c3f4be22aa5d51b753e0fb261ad8fe612f60981" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-779b75eef7433715bc5dea7f8e7bdc4424ac6384c5ad7ef6c08911529f05419a.json b/prover/crates/lib/prover_dal/.sqlx/query-e2a0b31993ba4aed24abbbd7de1e55be95aea85fd8096a06665ba9785257296b.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-779b75eef7433715bc5dea7f8e7bdc4424ac6384c5ad7ef6c08911529f05419a.json rename to prover/crates/lib/prover_dal/.sqlx/query-e2a0b31993ba4aed24abbbd7de1e55be95aea85fd8096a06665ba9785257296b.json index fa27d0291af1..5dcb8a2101b6 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-779b75eef7433715bc5dea7f8e7bdc4424ac6384c5ad7ef6c08911529f05419a.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-e2a0b31993ba4aed24abbbd7de1e55be95aea85fd8096a06665ba9785257296b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n status,\n attempts,\n error,\n picked_by\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n priority = priority + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n l1_batch_number,\n chain_id,\n status,\n attempts,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,21 +10,26 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "error", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "picked_by", "type_info": "Text" } @@ -39,9 +44,10 @@ false, false, false, + false, true, true ] }, - "hash": "779b75eef7433715bc5dea7f8e7bdc4424ac6384c5ad7ef6c08911529f05419a" + "hash": "e2a0b31993ba4aed24abbbd7de1e55be95aea85fd8096a06665ba9785257296b" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json b/prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json deleted file mode 100644 index 6594b6ee76c9..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM prover_jobs_fri\n WHERE\n l1_batch_number = $1;\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "e495b78add1c942d89d806e228093a4eb2ee0284aa89bca1ba958f470a2d6254" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json b/prover/crates/lib/prover_dal/.sqlx/query-e62f61185e6353ace2050287a45645592b6b8b57a323ceea26f515a4fad02684.json similarity index 63% rename from prover/crates/lib/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json rename to prover/crates/lib/prover_dal/.sqlx/query-e62f61185e6353ace2050287a45645592b6b8b57a323ceea26f515a4fad02684.json index d0c5d31aa3ec..89a018abf25a 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-e62f61185e6353ace2050287a45645592b6b8b57a323ceea26f515a4fad02684.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE witness_inputs_fri\n SET\n status = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n AND status != 'successful'\n ", + "query": "\n UPDATE witness_inputs_fri\n SET\n status = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n AND chain_id = $3\n AND status != 'successful'\n ", "describe": { "columns": [], "parameters": { "Left": [ "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "b25c66b9705b3f2fb8a3492f1bd20222e177262292241bd8cb89dbb9c1e74c2d" + "hash": "e62f61185e6353ace2050287a45645592b6b8b57a323ceea26f515a4fad02684" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e65d9d8389b60f48468561984f0fb9c8674344921b98b8b26e4d85994b3d72af.json b/prover/crates/lib/prover_dal/.sqlx/query-e65d9d8389b60f48468561984f0fb9c8674344921b98b8b26e4d85994b3d72af.json deleted file mode 100644 index 65a473ddeee5..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-e65d9d8389b60f48468561984f0fb9c8674344921b98b8b26e4d85994b3d72af.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE recursion_tip_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n l1_batch_number IN (\n SELECT\n prover_jobs_fri.l1_batch_number\n FROM\n prover_jobs_fri\n JOIN\n recursion_tip_witness_jobs_fri rtwj\n ON prover_jobs_fri.l1_batch_number = rtwj.l1_batch_number\n WHERE\n rtwj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n AND prover_jobs_fri.is_node_final_proof = TRUE\n GROUP BY\n prover_jobs_fri.l1_batch_number,\n rtwj.number_of_final_node_jobs\n HAVING\n COUNT(*) = rtwj.number_of_final_node_jobs\n )\n RETURNING\n l1_batch_number;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int2" - ] - }, - "nullable": [ - false - ] - }, - "hash": "e65d9d8389b60f48468561984f0fb9c8674344921b98b8b26e4d85994b3d72af" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-e91577193c3da5c4c60ca3f972fa030401f899503efb07f139d09fc36a24e86f.json b/prover/crates/lib/prover_dal/.sqlx/query-e91577193c3da5c4c60ca3f972fa030401f899503efb07f139d09fc36a24e86f.json new file mode 100644 index 000000000000..666838f5ab26 --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-e91577193c3da5c4c60ca3f972fa030401f899503efb07f139d09fc36a24e86f.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n witness_inputs_fri (\n l1_batch_number,\n chain_id,\n witness_inputs_blob_url,\n protocol_version,\n status,\n created_at,\n updated_at,\n protocol_version_patch\n )\n VALUES\n ($1, $2, $3, $4, 'queued', NOW(), NOW(), $5)\n ON CONFLICT (l1_batch_number, chain_id) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Text", + "Int4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "e91577193c3da5c4c60ca3f972fa030401f899503efb07f139d09fc36a24e86f" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-ea5d124ba45e81c0aa1d4d86ebf13e9c1aebb64b2985233e00e0f318d1f4aede.json b/prover/crates/lib/prover_dal/.sqlx/query-ea5d124ba45e81c0aa1d4d86ebf13e9c1aebb64b2985233e00e0f318d1f4aede.json new file mode 100644 index 000000000000..7ff87ecaa1ac --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-ea5d124ba45e81c0aa1d4d86ebf13e9c1aebb64b2985233e00e0f318d1f4aede.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n chain_id\n FROM\n proof_compression_jobs_fri\n WHERE\n status <> 'successful'\n AND status <> 'sent_to_server'\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false + ] + }, + "hash": "ea5d124ba45e81c0aa1d4d86ebf13e9c1aebb64b2985233e00e0f318d1f4aede" +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-7a41908eac57403ddc6785ff0a646830fcb6b1fdcfcbbd9a1b19d1a4b1e7a978.json b/prover/crates/lib/prover_dal/.sqlx/query-eca875e2159b5852d14fbbda302503bd3db402367bf6b5a0b6049e7b5839daf1.json similarity index 79% rename from prover/crates/lib/prover_dal/.sqlx/query-7a41908eac57403ddc6785ff0a646830fcb6b1fdcfcbbd9a1b19d1a4b1e7a978.json rename to prover/crates/lib/prover_dal/.sqlx/query-eca875e2159b5852d14fbbda302503bd3db402367bf6b5a0b6049e7b5839daf1.json index 7862c5b4e92a..2ed42f5bde08 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-7a41908eac57403ddc6785ff0a646830fcb6b1fdcfcbbd9a1b19d1a4b1e7a978.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-eca875e2159b5852d14fbbda302503bd3db402367bf6b5a0b6049e7b5839daf1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round = $4\n ORDER BY\n priority DESC,\n created_at ASC,\n circuit_id ASC,\n id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round = $4\n ORDER BY\n priority DESC,\n created_at ASC,\n circuit_id ASC,\n id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.chain_id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -10,31 +10,36 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "l1_batch_number", "type_info": "Int8" }, { - "ordinal": 2, + "ordinal": 3, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "aggregation_round", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "sequence_number", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 6, "name": "depth", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 7, "name": "is_node_final_proof", "type_info": "Bool" } @@ -54,8 +59,9 @@ false, false, false, + false, false ] }, - "hash": "7a41908eac57403ddc6785ff0a646830fcb6b1fdcfcbbd9a1b19d1a4b1e7a978" + "hash": "eca875e2159b5852d14fbbda302503bd3db402367bf6b5a0b6049e7b5839daf1" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json b/prover/crates/lib/prover_dal/.sqlx/query-ecfba455b3af61411965022c89ca8b41eff6b683e4e9c70216076c2434218ade.json similarity index 62% rename from prover/crates/lib/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json rename to prover/crates/lib/prover_dal/.sqlx/query-ecfba455b3af61411965022c89ca8b41eff6b683e4e9c70216076c2434218ade.json index 5ebb1951966d..bc3ee00a6996 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-ecfba455b3af61411965022c89ca8b41eff6b683e4e9c70216076c2434218ade.json @@ -1,15 +1,16 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n id = $2\n ", + "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'successful',\n updated_at = NOW(),\n time_taken = $1\n WHERE\n id = $2\n AND chain_id = $3\n ", "describe": { "columns": [], "parameters": { "Left": [ "Time", - "Int8" + "Int8", + "Int4" ] }, "nullable": [] }, - "hash": "46c4696fff5a4b8cc5cb46b05645da82065836fe17687ffad04126a6a8b2b27c" + "hash": "ecfba455b3af61411965022c89ca8b41eff6b683e4e9c70216076c2434218ade" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-bf25e4d0f807f618a822c83f3ff42d00d76a304106cbda611864cc64cdcd339a.json b/prover/crates/lib/prover_dal/.sqlx/query-ee643991981290bfc43ead208194d52a495f9015d135dde998a91578a20a793e.json similarity index 78% rename from prover/crates/lib/prover_dal/.sqlx/query-bf25e4d0f807f618a822c83f3ff42d00d76a304106cbda611864cc64cdcd339a.json rename to prover/crates/lib/prover_dal/.sqlx/query-ee643991981290bfc43ead208194d52a495f9015d135dde998a91578a20a793e.json index cb82591b2a70..a9a0cb02a709 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-bf25e4d0f807f618a822c83f3ff42d00d76a304106cbda611864cc64cdcd339a.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-ee643991981290bfc43ead208194d52a495f9015d135dde998a91578a20a793e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n attempts = attempts + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", + "query": "\n UPDATE leaf_aggregation_witness_jobs_fri\n SET\n status = 'queued',\n updated_at = NOW(),\n processing_started_at = NOW(),\n attempts = attempts + 1\n WHERE\n (\n status = 'in_progress'\n AND processing_started_at <= NOW() - $1::INTERVAL\n AND attempts < $2\n )\n OR (\n status = 'failed'\n AND attempts < $2\n )\n RETURNING\n id,\n chain_id,\n status,\n attempts,\n circuit_id,\n error,\n picked_by\n ", "describe": { "columns": [ { @@ -10,26 +10,31 @@ }, { "ordinal": 1, + "name": "chain_id", + "type_info": "Int4" + }, + { + "ordinal": 2, "name": "status", "type_info": "Text" }, { - "ordinal": 2, + "ordinal": 3, "name": "attempts", "type_info": "Int2" }, { - "ordinal": 3, + "ordinal": 4, "name": "circuit_id", "type_info": "Int2" }, { - "ordinal": 4, + "ordinal": 5, "name": "error", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 6, "name": "picked_by", "type_info": "Text" } @@ -45,9 +50,10 @@ false, false, false, + false, true, true ] }, - "hash": "bf25e4d0f807f618a822c83f3ff42d00d76a304106cbda611864cc64cdcd339a" + "hash": "ee643991981290bfc43ead208194d52a495f9015d135dde998a91578a20a793e" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json b/prover/crates/lib/prover_dal/.sqlx/query-f76966131ebc989eb255e444fea7912933a28b1222b272801a8f83254323af33.json similarity index 62% rename from prover/crates/lib/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json rename to prover/crates/lib/prover_dal/.sqlx/query-f76966131ebc989eb255e444fea7912933a28b1222b272801a8f83254323af33.json index 90eface5350b..318a8b1e1d12 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-f76966131ebc989eb255e444fea7912933a28b1222b272801a8f83254323af33.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n error = $2,\n updated_at = NOW()\n WHERE\n l1_batch_number = $3\n AND status != $4\n AND status != $5\n ", + "query": "\n UPDATE proof_compression_jobs_fri\n SET\n status = $1,\n error = $2,\n updated_at = NOW()\n WHERE\n l1_batch_number = $3\n AND chain_id = $4\n AND status != $5\n AND status != $6\n ", "describe": { "columns": [], "parameters": { @@ -8,11 +8,12 @@ "Text", "Text", "Int8", + "Int4", "Text", "Text" ] }, "nullable": [] }, - "hash": "93b9706aa8eb840d574d7c156cc866e8f67a380302762c272bfb27307682d62e" + "hash": "f76966131ebc989eb255e444fea7912933a28b1222b272801a8f83254323af33" } diff --git a/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs b/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs index 547e2ad3df4d..ec51e5e35fe2 100644 --- a/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,13 +1,9 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use zksync_basic_types::{ - protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, - prover_dal::{ - JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, - }, - L1BatchNumber, -}; +use zksync_basic_types::{protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{ + JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, +}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; use crate::{duration_to_naive_time, pg_interval_from_duration, Prover}; @@ -21,6 +17,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn insert_proof_compression_job( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, fri_proof_blob_url: &str, protocol_version: ProtocolSemanticVersion, ) { @@ -29,6 +26,7 @@ impl FriProofCompressorDal<'_, '_> { INSERT INTO proof_compression_jobs_fri ( l1_batch_number, + chain_id, fri_proof_blob_url, status, created_at, @@ -37,10 +35,11 @@ impl FriProofCompressorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, NOW(), NOW(), $4, $5) + ($1, $2, $3, $4, NOW(), NOW(), $5, $6) ON CONFLICT (l1_batch_number) DO NOTHING "#, i64::from(block_number.0), + chain_id.as_u64() as i32, fri_proof_blob_url, ProofCompressionJobStatus::Queued.to_string(), protocol_version.minor as i32, @@ -55,7 +54,7 @@ impl FriProofCompressorDal<'_, '_> { &mut self, picked_by: &str, protocol_version: ProtocolSemanticVersion, - ) -> Option { + ) -> Option<(L2ChainId, L1BatchNumber)> { sqlx::query!( r#" UPDATE proof_compression_jobs_fri @@ -66,9 +65,10 @@ impl FriProofCompressorDal<'_, '_> { processing_started_at = NOW(), picked_by = $3 WHERE - l1_batch_number = ( + (l1_batch_number, chain_id) = ( SELECT - l1_batch_number + l1_batch_number, + chain_id FROM proof_compression_jobs_fri WHERE @@ -84,7 +84,8 @@ impl FriProofCompressorDal<'_, '_> { SKIP LOCKED ) RETURNING - proof_compression_jobs_fri.l1_batch_number + proof_compression_jobs_fri.l1_batch_number, + proof_compression_jobs_fri.chain_id "#, ProofCompressionJobStatus::InProgress.to_string(), ProofCompressionJobStatus::Queued.to_string(), @@ -95,12 +96,13 @@ impl FriProofCompressorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) } pub async fn get_proof_compression_job_attempts( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> sqlx::Result> { let attempts = sqlx::query!( r#" @@ -110,8 +112,10 @@ impl FriProofCompressorDal<'_, '_> { proof_compression_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32 ) .fetch_optional(self.storage.conn()) .await? @@ -123,6 +127,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn mark_proof_compression_job_successful( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, time_taken: Duration, l1_proof_blob_url: &str, ) { @@ -136,11 +141,13 @@ impl FriProofCompressorDal<'_, '_> { l1_proof_blob_url = $3 WHERE l1_batch_number = $4 + AND chain_id = $5 "#, ProofCompressionJobStatus::Successful.to_string(), duration_to_naive_time(time_taken), l1_proof_blob_url, - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -151,6 +158,7 @@ impl FriProofCompressorDal<'_, '_> { &mut self, error: &str, block_number: L1BatchNumber, + chain_id: L2ChainId, ) { sqlx::query!( r#" @@ -161,12 +169,14 @@ impl FriProofCompressorDal<'_, '_> { updated_at = NOW() WHERE l1_batch_number = $3 - AND status != $4 + AND chain_id = $4 AND status != $5 + AND status != $6 "#, ProofCompressionJobStatus::Failed.to_string(), error, i64::from(block_number.0), + chain_id.as_u64() as i32, ProofCompressionJobStatus::Successful.to_string(), ProofCompressionJobStatus::SentToServer.to_string(), ) @@ -178,6 +188,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn get_least_proven_block_not_sent_to_server( &mut self, ) -> Option<( + L2ChainId, L1BatchNumber, ProtocolSemanticVersion, ProofCompressionJobStatus, @@ -186,20 +197,23 @@ impl FriProofCompressorDal<'_, '_> { r#" SELECT l1_batch_number, + chain_id, status, protocol_version, protocol_version_patch FROM proof_compression_jobs_fri WHERE - l1_batch_number = ( + (l1_batch_number, chain_id) = ( SELECT - MIN(l1_batch_number) + MIN(l1_batch_number), + chain_id FROM proof_compression_jobs_fri WHERE status = $1 OR status = $2 + GROUP BY chain_id ) "#, ProofCompressionJobStatus::Successful.to_string(), @@ -210,6 +224,7 @@ impl FriProofCompressorDal<'_, '_> { .ok()?; match row { Some(row) => Some(( + L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32), ProtocolSemanticVersion::new( ProtocolVersionId::try_from(row.protocol_version.unwrap() as u16).unwrap(), @@ -224,6 +239,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn mark_proof_sent_to_server( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, ) -> DalResult<()> { sqlx::query!( r#" @@ -233,9 +249,11 @@ impl FriProofCompressorDal<'_, '_> { updated_at = NOW() WHERE l1_batch_number = $2 + AND chain_id = $3 "#, ProofCompressionJobStatus::SentToServer.to_string(), - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32 ) .instrument("mark_proof_sent_to_server") .execute(self.storage) @@ -284,11 +302,12 @@ impl FriProofCompressorDal<'_, '_> { .collect() } - pub async fn get_oldest_not_compressed_batch(&mut self) -> Option { - let result: Option = sqlx::query!( + pub async fn get_oldest_not_compressed_batch(&mut self) -> Option<(L2ChainId, L1BatchNumber)> { + let result: Option<(L2ChainId, L1BatchNumber)> = sqlx::query!( r#" SELECT - l1_batch_number + l1_batch_number, + chain_id FROM proof_compression_jobs_fri WHERE @@ -303,7 +322,7 @@ impl FriProofCompressorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))); result } @@ -335,6 +354,7 @@ impl FriProofCompressorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, status, attempts, error, @@ -349,6 +369,7 @@ impl FriProofCompressorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -362,6 +383,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn get_proof_compression_job_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -371,14 +393,17 @@ impl FriProofCompressorDal<'_, '_> { proof_compression_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32, ) .fetch_optional(self.storage.conn()) .await .unwrap() .map(|row| ProofCompressionJobInfo { l1_batch_number: block_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), attempts: row.attempts as u32, status: ProofCompressionJobStatus::from_str(&row.status).unwrap(), fri_proof_blob_url: row.fri_proof_blob_url, @@ -395,14 +420,17 @@ impl FriProofCompressorDal<'_, '_> { pub async fn delete_batch_data( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, ) -> sqlx::Result { sqlx::query!( r#" DELETE FROM proof_compression_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32, ) .execute(self.storage.conn()) .await @@ -421,6 +449,7 @@ impl FriProofCompressorDal<'_, '_> { pub async fn requeue_stuck_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { { @@ -436,7 +465,8 @@ impl FriProofCompressorDal<'_, '_> { priority = priority + 1 WHERE l1_batch_number = $1 - AND attempts >= $2 + AND chain_id = $2 + AND attempts >= $3 AND ( status = 'in_progress' OR status = 'failed' @@ -448,6 +478,7 @@ impl FriProofCompressorDal<'_, '_> { picked_by "#, i64::from(block_number.0), + chain_id.as_u64() as i32, max_attempts as i32, ) .fetch_all(self.storage.conn()) @@ -456,6 +487,7 @@ impl FriProofCompressorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: block_number.0 as u64, + chain_id, status: row.status, attempts: row.attempts as u64, circuit_id: None, diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index c2bb72357db7..8efe2ce30eb5 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -7,17 +7,12 @@ use std::{ }; use sqlx::QueryBuilder; -use zksync_basic_types::{ - basic_fri_types::{ - AggregationRound, CircuitIdRoundTuple, CircuitProverStatsEntry, - ProtocolVersionedCircuitProverStats, - }, - protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, - prover_dal::{ - FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, - }, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::{ + AggregationRound, CircuitIdRoundTuple, CircuitProverStatsEntry, + ProtocolVersionedCircuitProverStats, +}, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{ + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, +}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, instrument::InstrumentExt, metrics::MethodLatency, }; @@ -39,6 +34,7 @@ impl FriProverDal<'_, '_> { pub async fn insert_prover_jobs( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, circuit_ids_and_urls: Vec<(u8, String)>, aggregation_round: AggregationRound, depth: u16, @@ -58,6 +54,7 @@ impl FriProverDal<'_, '_> { r#" INSERT INTO prover_jobs_fri ( l1_batch_number, + chain_id, circuit_id, circuit_blob_url, aggregation_round, @@ -77,6 +74,7 @@ impl FriProverDal<'_, '_> { chunk.iter().enumerate(), |mut row, (i, (circuit_id, circuit_blob_url))| { row.push_bind(l1_batch_number.0 as i64) + .push_bind(chain_id.as_u64() as i32) .push_bind(*circuit_id as i16) .push_bind(circuit_blob_url) .push_bind(aggregation_round as i64) @@ -94,7 +92,7 @@ impl FriProverDal<'_, '_> { // Add the ON CONFLICT clause query_builder.push( r#" - ON CONFLICT (l1_batch_number, aggregation_round, circuit_id, depth, sequence_number) + ON CONFLICT (l1_batch_number, chain_id, aggregation_round, circuit_id, depth, sequence_number) DO UPDATE SET updated_at = NOW() "#, @@ -156,6 +154,7 @@ impl FriProverDal<'_, '_> { ) RETURNING prover_jobs_fri.id, + prover_jobs_fri.chain_id, prover_jobs_fri.l1_batch_number, prover_jobs_fri.circuit_id, prover_jobs_fri.aggregation_round, @@ -174,6 +173,7 @@ impl FriProverDal<'_, '_> { .map(|row| FriProverJobMetadata { id: row.id as u32, block_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u8, aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) .unwrap(), @@ -234,6 +234,7 @@ impl FriProverDal<'_, '_> { ) RETURNING prover_jobs_fri.id, + prover_jobs_fri.chain_id, prover_jobs_fri.l1_batch_number, prover_jobs_fri.circuit_id, prover_jobs_fri.aggregation_round, @@ -252,6 +253,7 @@ impl FriProverDal<'_, '_> { .map(|row| FriProverJobMetadata { id: row.id as u32, block_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u8, aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) .unwrap(), @@ -297,6 +299,7 @@ impl FriProverDal<'_, '_> { ) RETURNING prover_jobs_fri.id, + prover_jobs_fri.chain_id, prover_jobs_fri.l1_batch_number, prover_jobs_fri.circuit_id, prover_jobs_fri.aggregation_round, @@ -314,6 +317,7 @@ impl FriProverDal<'_, '_> { .map(|row| FriProverJobMetadata { id: row.id as u32, block_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u8, aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) .unwrap(), @@ -385,6 +389,7 @@ impl FriProverDal<'_, '_> { ) RETURNING prover_jobs_fri.id, + prover_jobs_fri.chain_id, prover_jobs_fri.l1_batch_number, prover_jobs_fri.circuit_id, prover_jobs_fri.aggregation_round, @@ -404,6 +409,7 @@ impl FriProverDal<'_, '_> { .map(|row| FriProverJobMetadata { id: row.id as u32, block_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u8, aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) .unwrap(), @@ -414,7 +420,7 @@ impl FriProverDal<'_, '_> { }) } - pub async fn save_proof_error(&mut self, id: u32, error: String) { + pub async fn save_proof_error(&mut self, id: u32, chain_id: L2ChainId, error: String) { { sqlx::query!( r#" @@ -425,10 +431,12 @@ impl FriProverDal<'_, '_> { updated_at = NOW() WHERE id = $2 + AND chain_id = $3 AND status != 'successful' "#, error, - i64::from(id) + i64::from(id), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -436,7 +444,7 @@ impl FriProverDal<'_, '_> { } } - pub async fn get_prover_job_attempts(&mut self, id: u32) -> sqlx::Result> { + pub async fn get_prover_job_attempts(&mut self, id: u32, chain_id: L2ChainId) -> sqlx::Result> { let attempts = sqlx::query!( r#" SELECT @@ -445,8 +453,10 @@ impl FriProverDal<'_, '_> { prover_jobs_fri WHERE id = $1 + AND chain_id = $2 "#, - i64::from(id) + i64::from(id), + chain_id.as_u64() as i32 ) .fetch_optional(self.storage.conn()) .await? @@ -458,6 +468,7 @@ impl FriProverDal<'_, '_> { pub async fn save_proof( &mut self, id: u32, + chain_id: L2ChainId, time_taken: Duration, blob_url: &str, ) -> FriProverJobMetadata { @@ -471,8 +482,10 @@ impl FriProverDal<'_, '_> { proof_blob_url = $2 WHERE id = $3 + AND chain_id = $4 RETURNING prover_jobs_fri.id, + prover_jobs_fri.chain_id, prover_jobs_fri.l1_batch_number, prover_jobs_fri.circuit_id, prover_jobs_fri.aggregation_round, @@ -482,7 +495,8 @@ impl FriProverDal<'_, '_> { "#, duration_to_naive_time(time_taken), blob_url, - i64::from(id) + i64::from(id), + chain_id.as_u64() as i32 ) .instrument("save_fri_proof") .report_latency() @@ -493,6 +507,7 @@ impl FriProverDal<'_, '_> { .map(|row| FriProverJobMetadata { id: row.id as u32, block_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u8, aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) .unwrap(), @@ -520,9 +535,10 @@ impl FriProverDal<'_, '_> { processing_started_at = NOW(), priority = priority + 1 WHERE - id IN ( + (id, chain_id) IN ( SELECT - id + id, + chain_id FROM prover_jobs_fri WHERE @@ -540,6 +556,7 @@ impl FriProverDal<'_, '_> { ) RETURNING id, + chain_id, status, attempts, circuit_id, @@ -555,6 +572,7 @@ impl FriProverDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.id as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: Some(row.circuit_id as u32), @@ -569,6 +587,7 @@ impl FriProverDal<'_, '_> { pub async fn insert_prover_job( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, circuit_id: u8, depth: u16, sequence_number: usize, @@ -582,6 +601,7 @@ impl FriProverDal<'_, '_> { INSERT INTO prover_jobs_fri ( l1_batch_number, + chain_id, circuit_id, circuit_blob_url, aggregation_round, @@ -595,7 +615,7 @@ impl FriProverDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, 'queued', NOW(), NOW(), $9) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, 'queued', NOW(), NOW(), $10) ON CONFLICT ( l1_batch_number, aggregation_round, circuit_id, depth, sequence_number ) DO @@ -604,6 +624,7 @@ impl FriProverDal<'_, '_> { updated_at = NOW() "#, i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, i16::from(circuit_id), circuit_blob_url, aggregation_round as i64, @@ -710,20 +731,22 @@ impl FriProverDal<'_, '_> { } } - pub async fn min_unproved_l1_batch_number(&mut self) -> HashMap<(u8, u8), L1BatchNumber> { + pub async fn min_unproved_l1_batch_number(&mut self) -> HashMap<(u8, u8), (L2ChainId, L1BatchNumber)> { { sqlx::query!( r#" SELECT MIN(l1_batch_number) AS "l1_batch_number!", circuit_id, - aggregation_round + aggregation_round, + chain_id FROM prover_jobs_fri WHERE status IN ('queued', 'in_gpu_proof', 'in_progress', 'failed') GROUP BY circuit_id, + chain_id, aggregation_round "# ) @@ -734,7 +757,7 @@ impl FriProverDal<'_, '_> { .map(|row| { ( (row.circuit_id as u8, row.aggregation_round as u8), - L1BatchNumber(row.l1_batch_number as u32), + (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32)), ) }) .collect() @@ -787,6 +810,7 @@ impl FriProverDal<'_, '_> { pub async fn get_recursion_tip_proof_job_id( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -796,10 +820,12 @@ impl FriProverDal<'_, '_> { prover_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 AND status = 'successful' - AND aggregation_round = $2 + AND aggregation_round = $3 "#, l1_batch_number.0 as i64, + chain_id.as_u64() as i32, AggregationRound::RecursionTip as i16, ) .fetch_optional(self.storage.conn()) @@ -839,6 +865,7 @@ impl FriProverDal<'_, '_> { pub async fn get_final_node_proof_job_ids_for( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Vec<(u8, u32)> { sqlx::query!( r#" @@ -849,12 +876,14 @@ impl FriProverDal<'_, '_> { prover_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 AND is_node_final_proof = TRUE AND status = 'successful' ORDER BY circuit_id ASC "#, - l1_batch_number.0 as i64 + l1_batch_number.0 as i64, + chain_id.as_u64() as i32 ) .fetch_all(self.storage.conn()) .await @@ -867,6 +896,7 @@ impl FriProverDal<'_, '_> { pub async fn get_prover_jobs_stats_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, aggregation_round: AggregationRound, ) -> Vec { sqlx::query!( @@ -878,8 +908,10 @@ impl FriProverDal<'_, '_> { WHERE l1_batch_number = $1 AND aggregation_round = $2 + AND chain_id = $3 "#, i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, aggregation_round as i16 ) .fetch_all(self.storage.conn()) @@ -889,6 +921,7 @@ impl FriProverDal<'_, '_> { .map(|row| ProverJobFriInfo { id: row.id as u32, l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u32, circuit_blob_url: row.circuit_blob_url.clone(), aggregation_round, @@ -914,14 +947,17 @@ impl FriProverDal<'_, '_> { pub async fn delete_prover_jobs_fri_batch_data( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> sqlx::Result { sqlx::query!( r#" DELETE FROM prover_jobs_fri WHERE - l1_batch_number = $1; + l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -930,8 +966,9 @@ impl FriProverDal<'_, '_> { pub async fn delete_batch_data( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> sqlx::Result { - self.delete_prover_jobs_fri_batch_data(l1_batch_number) + self.delete_prover_jobs_fri_batch_data(l1_batch_number, chain_id) .await } @@ -952,6 +989,7 @@ impl FriProverDal<'_, '_> { pub async fn requeue_stuck_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { { @@ -967,13 +1005,15 @@ impl FriProverDal<'_, '_> { priority = priority + 1 WHERE l1_batch_number = $1 - AND attempts >= $2 + AND chain_id = $2 + AND attempts >= $3 AND ( status = 'in_progress' OR status = 'failed' ) RETURNING id, + chain_id, status, attempts, circuit_id, @@ -981,6 +1021,7 @@ impl FriProverDal<'_, '_> { picked_by "#, i64::from(block_number.0), + chain_id.as_u64() as i32, max_attempts as i32, ) .fetch_all(self.storage.conn()) @@ -989,6 +1030,7 @@ impl FriProverDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.id as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: Some(row.circuit_id as u32), @@ -1002,14 +1044,16 @@ impl FriProverDal<'_, '_> { pub async fn prover_job_ids_for( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, circuit_id: u8, round: AggregationRound, depth: u16, - ) -> Vec { + ) -> Vec<(L2ChainId, u32)> { sqlx::query!( r#" SELECT - id + id, + chain_id FROM prover_jobs_fri WHERE @@ -1017,6 +1061,7 @@ impl FriProverDal<'_, '_> { AND circuit_id = $2 AND aggregation_round = $3 AND depth = $4 + AND chain_id = $5 AND status = 'successful' ORDER BY sequence_number ASC; @@ -1024,13 +1069,14 @@ impl FriProverDal<'_, '_> { i64::from(block_number.0), i16::from(circuit_id), round as i16, - i32::from(depth) + i32::from(depth), + chain_id.as_u64() as i32 ) .fetch_all(self.storage.conn()) .await .unwrap() .into_iter() - .map(|row| row.id as u32) + .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), row.id as u32)) .collect::<_>() } @@ -1084,6 +1130,7 @@ mod tests { .fri_prover_jobs_dal() .insert_prover_jobs( L1BatchNumber(1), + L2ChainId::new(1).unwrap(), mock_circuit_ids_and_urls(10000), AggregationRound::Scheduler, 1, diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs index 831ad4f8b513..e16e2fbee695 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs @@ -1,10 +1,6 @@ use std::time::Duration; -use zksync_basic_types::{ - protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, - prover_dal::{BasicWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, - L1BatchNumber, -}; +use zksync_basic_types::{protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{BasicWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, error::DalResult, @@ -23,6 +19,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { pub async fn save_witness_inputs( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, witness_inputs_blob_url: &str, protocol_version: ProtocolSemanticVersion, ) -> DalResult<()> { @@ -31,6 +28,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { INSERT INTO witness_inputs_fri ( l1_batch_number, + chain_id, witness_inputs_blob_url, protocol_version, status, @@ -39,10 +37,11 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, 'queued', NOW(), NOW(), $4) - ON CONFLICT (l1_batch_number) DO NOTHING + ($1, $2, $3, $4, 'queued', NOW(), NOW(), $5) + ON CONFLICT (l1_batch_number, chain_id) DO NOTHING "#, i64::from(block_number.0), + chain_id.as_u64() as i32, witness_inputs_blob_url, protocol_version.minor as i32, protocol_version.patch.0 as i32, @@ -59,7 +58,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { &mut self, protocol_version: ProtocolSemanticVersion, picked_by: &str, - ) -> Option { + ) -> Option<(L2ChainId, L1BatchNumber)> { sqlx::query!( r#" UPDATE witness_inputs_fri @@ -88,6 +87,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { SKIP LOCKED ) RETURNING + witness_inputs_fri.chain_id, witness_inputs_fri.l1_batch_number "#, protocol_version.minor as i32, @@ -97,13 +97,14 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) } pub async fn set_status_for_basic_witness_job( &mut self, status: FriWitnessJobStatus, block_number: L1BatchNumber, + chain_id: L2ChainId, ) { sqlx::query!( r#" @@ -113,10 +114,12 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { updated_at = NOW() WHERE l1_batch_number = $2 + AND chain_id = $3 AND status != 'successful' "#, status.to_string(), - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -126,6 +129,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { pub async fn mark_witness_job_as_successful( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, time_taken: Duration, ) { sqlx::query!( @@ -137,9 +141,11 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { time_taken = $1 WHERE l1_batch_number = $2 + AND chain_id = $3 "#, duration_to_naive_time(time_taken), - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -172,6 +178,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, status, attempts, error, @@ -186,6 +193,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -195,9 +203,10 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { .collect() } - pub async fn protocol_version_for_l1_batch( + pub async fn protocol_version_for_l1_batch_and_chain( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId ) -> ProtocolSemanticVersion { let result = sqlx::query!( r#" @@ -208,8 +217,10 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { witness_inputs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32 ) .fetch_one(self.storage.conn()) .await @@ -224,6 +235,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { pub async fn get_basic_witness_generator_job_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -233,14 +245,17 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { witness_inputs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32 ) .fetch_optional(self.storage.conn()) .await .unwrap() .map(|row| BasicWitnessGeneratorJobInfo { l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), witness_inputs_blob_url: row.witness_inputs_blob_url, attempts: row.attempts as u32, status: row.status.parse::().unwrap(), @@ -257,6 +272,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { pub async fn requeue_stuck_witness_inputs_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { sqlx::query!( @@ -269,19 +285,22 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { priority = priority + 1 WHERE l1_batch_number = $1 - AND attempts >= $2 + AND chain_id = $2 + AND attempts >= $3 AND ( status = 'in_progress' OR status = 'failed' ) RETURNING l1_batch_number, + chain_id, status, attempts, error, picked_by "#, i64::from(block_number.0), + chain_id.as_u64() as i32, max_attempts as i64 ) .fetch_all(self.storage.conn()) @@ -290,6 +309,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs index 8f3e9cb372c1..ed2838aa8dee 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs @@ -1,13 +1,9 @@ use std::{str::FromStr, time::Duration}; +use std::io::Chain; -use zksync_basic_types::{ - basic_fri_types::AggregationRound, - protocol_version::ProtocolSemanticVersion, - prover_dal::{ - LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, - }, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{ + LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, +}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -21,7 +17,7 @@ pub struct FriLeafWitnessGeneratorDal<'a, 'c> { } impl FriLeafWitnessGeneratorDal<'_, '_> { - pub async fn mark_leaf_aggregation_as_successful(&mut self, id: u32, time_taken: Duration) { + pub async fn mark_leaf_aggregation_as_successful(&mut self, id: u32, chain_id: L2ChainId, time_taken: Duration) { sqlx::query!( r#" UPDATE leaf_aggregation_witness_jobs_fri @@ -31,9 +27,11 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { time_taken = $1 WHERE id = $2 + AND chain_id = $3 "#, duration_to_naive_time(time_taken), - i64::from(id) + i64::from(id), + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -84,11 +82,13 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .unwrap()?; let block_number = L1BatchNumber(row.l1_batch_number as u32); + let chain_id = L2ChainId::new(row.chain_id as u64).unwrap(); let proof_job_ids = self .storage .fri_prover_jobs_dal() .prover_job_ids_for( block_number, + chain_id, row.circuit_id as u8, AggregationRound::BasicCircuits, 0, @@ -96,28 +96,31 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .await; Some(LeafAggregationJobMetadata { id: row.id as u32, + chain_id, block_number, circuit_id: row.circuit_id as u8, - prover_job_ids_for_proofs: proof_job_ids, + prover_job_ids_for_proofs: proof_job_ids.into_iter().map(|ids| ids.1 as u32).collect(), }) } - pub async fn move_leaf_aggregation_jobs_from_waiting_to_queued(&mut self) -> Vec<(i64, u8)> { + pub async fn move_leaf_aggregation_jobs_from_waiting_to_queued(&mut self) -> Vec<(i64, L2ChainId, u8)> { sqlx::query!( r#" UPDATE leaf_aggregation_witness_jobs_fri SET status = 'queued' WHERE - (l1_batch_number, circuit_id) IN ( + (l1_batch_number, chain_id, circuit_id) IN ( SELECT prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id, prover_jobs_fri.circuit_id FROM prover_jobs_fri JOIN leaf_aggregation_witness_jobs_fri lawj ON prover_jobs_fri.l1_batch_number = lawj.l1_batch_number + AND prover_jobs_fri.chain_id = lawj.chain_id AND prover_jobs_fri.circuit_id = lawj.circuit_id WHERE lawj.status = 'waiting_for_proofs' @@ -125,6 +128,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { AND prover_jobs_fri.aggregation_round = 0 GROUP BY prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id, prover_jobs_fri.circuit_id, lawj.number_of_basic_circuits HAVING @@ -132,6 +136,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, circuit_id; "#, ) @@ -139,7 +144,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, row.circuit_id as u8)) + .map(|row| (row.l1_batch_number, L2ChainId::new(row.chain_id as u64).unwrap(), row.circuit_id as u8)) .collect() } @@ -169,6 +174,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { ) RETURNING id, + chain_id, status, attempts, circuit_id, @@ -184,6 +190,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.id as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: Some(row.circuit_id as u32), @@ -196,6 +203,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { pub async fn get_leaf_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Vec { sqlx::query!( r#" @@ -205,8 +213,10 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { leaf_aggregation_witness_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, ) .fetch_all(self.storage.conn()) .await @@ -215,6 +225,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .map(|row| LeafWitnessGeneratorJobInfo { id: row.id as u32, l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u32, closed_form_inputs_blob_url: row.closed_form_inputs_blob_url.clone(), attempts: row.attempts as u32, @@ -234,6 +245,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { pub async fn insert_leaf_aggregation_jobs( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, protocol_version: ProtocolSemanticVersion, circuit_id: u8, closed_form_inputs_url: String, @@ -244,6 +256,7 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { INSERT INTO leaf_aggregation_witness_jobs_fri ( l1_batch_number, + chain_id, circuit_id, closed_form_inputs_blob_url, number_of_basic_circuits, @@ -254,13 +267,14 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, $4, $5, 'waiting_for_proofs', NOW(), NOW(), $6) - ON CONFLICT (l1_batch_number, circuit_id) DO + ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', NOW(), NOW(), $7) + ON CONFLICT (l1_batch_number, chain_id, circuit_id) DO UPDATE SET updated_at = NOW() "#, i64::from(block_number.0), + chain_id.as_u64() as i32, i16::from(circuit_id), closed_form_inputs_url, number_of_basic_circuits as i32, diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs index f4130f8ee529..c323a0ee8b32 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs @@ -7,14 +7,10 @@ pub mod recursion_tip; pub mod scheduler; use std::collections::HashMap; +use std::io::Chain; use sqlx::{types::chrono::NaiveDateTime, Row}; -use zksync_basic_types::{ - basic_fri_types::AggregationRound, - protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, - prover_dal::{JobCountStatistics, ProofGenerationTime, StuckJobs}, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{JobCountStatistics, ProofGenerationTime, StuckJobs}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{connection::Connection, utils::naive_time_from_pg_interval}; use crate::Prover; @@ -83,6 +79,7 @@ impl FriWitnessGeneratorDal<'_, '_> { &mut self, error: &str, job_id: u32, + chain_id: L2ChainId, aggregation_round: AggregationRound, ) { let table = match aggregation_round { @@ -110,8 +107,10 @@ impl FriWitnessGeneratorDal<'_, '_> { updated_at = NOW() WHERE {job_id_column} = {job_id} + AND chain_id = {} AND status != 'successful' "#, + chain_id.as_u64() ); sqlx::query(&query) @@ -175,6 +174,7 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn delete_witness_generator_data_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, aggregation_round: AggregationRound, ) -> sqlx::Result { sqlx::query( @@ -184,9 +184,11 @@ impl FriWitnessGeneratorDal<'_, '_> { {table} WHERE l1_batch_number = {l1_batch_number} + AND chain_id = {chain_id} "#, table = Self::input_table_name_for(aggregation_round), l1_batch_number = i64::from(block_number.0), + chain_id = chain_id.as_u64(), ) .as_str(), ) @@ -197,22 +199,26 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn delete_batch_data( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, ) -> sqlx::Result { - self.delete_witness_generator_data_for_batch(block_number, AggregationRound::BasicCircuits) + self.delete_witness_generator_data_for_batch(block_number, chain_id, AggregationRound::BasicCircuits) .await?; self.delete_witness_generator_data_for_batch( block_number, + chain_id, AggregationRound::LeafAggregation, ) .await?; self.delete_witness_generator_data_for_batch( block_number, + chain_id, AggregationRound::NodeAggregation, ) .await?; + // TODO: THIS LOOKS SUS self.delete_witness_generator_data(AggregationRound::RecursionTip) .await?; - self.delete_witness_generator_data_for_batch(block_number, AggregationRound::Scheduler) + self.delete_witness_generator_data_for_batch(block_number, chain_id, AggregationRound::Scheduler) .await } @@ -250,11 +256,13 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn requeue_stuck_leaf_aggregation_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { self.requeue_stuck_jobs_for_batch_in_aggregation_round( AggregationRound::LeafAggregation, block_number, + chain_id, max_attempts, ) .await @@ -263,11 +271,13 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn requeue_stuck_node_aggregation_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { self.requeue_stuck_jobs_for_batch_in_aggregation_round( AggregationRound::NodeAggregation, block_number, + chain_id, max_attempts, ) .await @@ -277,6 +287,7 @@ impl FriWitnessGeneratorDal<'_, '_> { &mut self, aggregation_round: AggregationRound, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { let table_name = Self::input_table_name_for(aggregation_round); @@ -290,10 +301,12 @@ impl FriWitnessGeneratorDal<'_, '_> { processing_started_at = NOW() WHERE l1_batch_number = {} + AND chain_id = {} AND attempts >= {} AND (status = 'in_progress' OR status = 'failed') RETURNING {}, + chain_id, status, attempts, circuit_id, @@ -302,6 +315,7 @@ impl FriWitnessGeneratorDal<'_, '_> { "#, table_name, i64::from(block_number.0), + chain_id.as_u64(), max_attempts, job_id_table_name ); @@ -312,6 +326,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.get::(job_id_table_name) as u64, + chain_id: L2ChainId::new(row.get::("chain_id") as u64).unwrap(), status: row.get("status"), attempts: row.get::("attempts") as u64, circuit_id: Some(row.get::("circuit_id") as u32), @@ -338,11 +353,12 @@ impl FriWitnessGeneratorDal<'_, '_> { r#" SELECT comp.l1_batch_number, + comp.chain_id, (comp.updated_at - wit.created_at) AS time_taken, wit.created_at FROM proof_compression_jobs_fri AS comp - JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number + JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number AND comp.chain_id = wit.chain_id WHERE wit.created_at > $1 ORDER BY @@ -355,6 +371,7 @@ impl FriWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| ProofGenerationTime { l1_batch_number: L1BatchNumber(row.l1_batch_number as u32), + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), time_taken: naive_time_from_pg_interval( row.time_taken.expect("time_taken must be present"), ), diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs index 311b9476b253..85cb2834eae8 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs @@ -1,13 +1,8 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{ - basic_fri_types::AggregationRound, - protocol_version::ProtocolSemanticVersion, - prover_dal::{ - NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, - }, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{ + NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, +}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -24,6 +19,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { pub async fn update_node_aggregation_jobs_url( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, circuit_id: u8, number_of_dependent_jobs: usize, depth: u16, @@ -40,12 +36,14 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { l1_batch_number = $2 AND circuit_id = $3 AND depth = $4 + AND chain_id = $6 "#, url, i64::from(block_number.0), i16::from(circuit_id), i32::from(depth), number_of_dependent_jobs as i32, + chain_id.as_u64() as i32, ) .execute(self.storage.conn()) .await @@ -104,21 +102,23 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { }; let block_number = L1BatchNumber(row.l1_batch_number as u32); + let chain_id = L2ChainId::new(row.chain_id as u64).unwrap(); let prover_job_ids = self .storage .fri_prover_jobs_dal() - .prover_job_ids_for(block_number, row.circuit_id as u8, round, depth) + .prover_job_ids_for(block_number, chain_id, row.circuit_id as u8, round, depth) .await; Some(NodeAggregationJobMetadata { id: row.id as u32, + chain_id, block_number, circuit_id: row.circuit_id as u8, depth, - prover_job_ids_for_proofs: prover_job_ids, + prover_job_ids_for_proofs: prover_job_ids.into_iter().map(|ids| ids.1).collect(), }) } - pub async fn mark_node_aggregation_as_successful(&mut self, id: u32, time_taken: Duration) { + pub async fn mark_node_aggregation_as_successful(&mut self, id: u32, chain_id: L2ChainId, time_taken: Duration) { sqlx::query!( r#" UPDATE node_aggregation_witness_jobs_fri @@ -128,9 +128,11 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { time_taken = $1 WHERE id = $2 + AND chain_id = $3 "#, duration_to_naive_time(time_taken), - i64::from(id) + i64::from(id), + chain_id.as_u64() as i32, ) .execute(self.storage.conn()) .await @@ -140,6 +142,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { pub async fn insert_node_aggregation_jobs( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, circuit_id: u8, number_of_dependent_jobs: Option, depth: u16, @@ -151,6 +154,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { INSERT INTO node_aggregation_witness_jobs_fri ( l1_batch_number, + chain_id, circuit_id, depth, aggregations_url, @@ -162,13 +166,14 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', NOW(), NOW(), $7) - ON CONFLICT (l1_batch_number, circuit_id, depth) DO + ($1, $2, $3, $4, $5, $6, $7, 'waiting_for_proofs', NOW(), NOW(), $8) + ON CONFLICT (l1_batch_number, chain_id, circuit_id, depth) DO UPDATE SET updated_at = NOW() "#, i64::from(block_number.0), + chain_id.as_u64() as i32, i16::from(circuit_id), i32::from(depth), aggregations_url, @@ -181,16 +186,17 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { .unwrap(); } - pub async fn move_depth_zero_node_aggregation_jobs(&mut self) -> Vec<(i64, u8, u16)> { + pub async fn move_depth_zero_node_aggregation_jobs(&mut self) -> Vec<(i64, u64, u8, u16)> { sqlx::query!( r#" UPDATE node_aggregation_witness_jobs_fri SET status = 'queued' WHERE - (l1_batch_number, circuit_id, depth) IN ( + (l1_batch_number, chain_id, circuit_id, depth) IN ( SELECT prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id, prover_jobs_fri.circuit_id, prover_jobs_fri.depth FROM @@ -198,6 +204,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { JOIN node_aggregation_witness_jobs_fri nawj ON prover_jobs_fri.l1_batch_number = nawj.l1_batch_number + AND prover_jobs_fri.chain_id = nawj.chain_id AND prover_jobs_fri.circuit_id = nawj.circuit_id AND prover_jobs_fri.depth = nawj.depth WHERE @@ -207,6 +214,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { AND prover_jobs_fri.depth = 0 GROUP BY prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id, prover_jobs_fri.circuit_id, prover_jobs_fri.depth, nawj.number_of_dependent_jobs @@ -215,6 +223,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, circuit_id, depth; "#, @@ -223,7 +232,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, row.circuit_id as u8, row.depth as u16)) + .map(|row| (row.l1_batch_number, row.chain_id as u64, row.circuit_id as u8, row.depth as u16)) .collect() } @@ -298,6 +307,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { ) RETURNING id, + chain_id, status, attempts, circuit_id, @@ -313,6 +323,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.id as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: Some(row.circuit_id as u32), @@ -325,6 +336,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { pub async fn get_node_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Vec { sqlx::query!( r#" @@ -334,8 +346,10 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { node_aggregation_witness_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, ) .fetch_all(self.storage.conn()) .await @@ -344,6 +358,7 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { .map(|row| NodeWitnessGeneratorJobInfo { id: row.id as u32, l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), circuit_id: row.circuit_id as u32, depth: row.depth as u32, status: WitnessJobStatus::from_str(&row.status).unwrap(), diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs index 6823b93a7e72..3b14dd537676 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs @@ -1,11 +1,6 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{ - basic_fri_types::AggregationRound, - protocol_version::ProtocolSemanticVersion, - prover_dal::{RecursionTipWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{RecursionTipWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -19,16 +14,17 @@ pub struct FriRecursionTipWitnessGeneratorDal<'a, 'c> { } impl FriRecursionTipWitnessGeneratorDal<'_, '_> { - pub async fn move_recursion_tip_jobs_from_waiting_to_queued(&mut self) -> Vec { + pub async fn move_recursion_tip_jobs_from_waiting_to_queued(&mut self) -> Vec<(u64, u64)> { sqlx::query!( r#" UPDATE recursion_tip_witness_jobs_fri SET status = 'queued' WHERE - l1_batch_number IN ( + (l1_batch_number, chain_id) IN ( SELECT - prover_jobs_fri.l1_batch_number + prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id FROM prover_jobs_fri JOIN @@ -41,12 +37,14 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { AND prover_jobs_fri.is_node_final_proof = TRUE GROUP BY prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id, rtwj.number_of_final_node_jobs HAVING COUNT(*) = rtwj.number_of_final_node_jobs ) RETURNING - l1_batch_number; + l1_batch_number, + chain_id; "#, AggregationRound::NodeAggregation as i64, ) @@ -54,7 +52,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number as u64)) + .map(|row| (row.chain_id as u64, row.l1_batch_number as u64)) .collect() } @@ -84,6 +82,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, status, attempts, error, @@ -98,6 +97,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -111,7 +111,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { &mut self, protocol_version: ProtocolSemanticVersion, picked_by: &str, - ) -> Option<(L1BatchNumber, i32)> { + ) -> Option<(L2ChainId, L1BatchNumber, i32)> { sqlx::query!( r#" UPDATE recursion_tip_witness_jobs_fri @@ -141,6 +141,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { ) RETURNING recursion_tip_witness_jobs_fri.l1_batch_number, + recursion_tip_witness_jobs_fri.chain_id, recursion_tip_witness_jobs_fri.number_of_final_node_jobs "#, protocol_version.minor as i32, @@ -152,6 +153,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { .unwrap() .map(|row| { ( + L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32), row.number_of_final_node_jobs, ) @@ -161,6 +163,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { pub async fn mark_recursion_tip_job_as_successful( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, time_taken: Duration, ) { sqlx::query!( @@ -172,9 +175,11 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { time_taken = $1 WHERE l1_batch_number = $2 + AND chain_id = $3 "#, duration_to_naive_time(time_taken), - l1_batch_number.0 as i64 + l1_batch_number.0 as i64, + chain_id.as_u64() as i32 ) .execute(self.storage.conn()) .await @@ -184,6 +189,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { pub async fn get_recursion_tip_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -193,14 +199,17 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { recursion_tip_witness_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32 ) .fetch_optional(self.storage.conn()) .await .unwrap() .map(|row| RecursionTipWitnessGeneratorJobInfo { l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: WitnessJobStatus::from_str(&row.status).unwrap(), attempts: row.attempts as u32, processing_started_at: row.processing_started_at, @@ -217,6 +226,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { pub async fn requeue_stuck_recursion_tip_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { sqlx::query!( @@ -229,19 +239,22 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { priority = priority + 1 WHERE l1_batch_number = $1 - AND attempts >= $2 + AND chain_id = $2 + AND attempts >= $3 AND ( status = 'in_progress' OR status = 'failed' ) RETURNING l1_batch_number, + chain_id, status, attempts, error, picked_by "#, i64::from(block_number.0), + chain_id.as_u64() as i32, max_attempts as i64 ) .fetch_all(self.storage.conn()) @@ -250,6 +263,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -262,6 +276,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { pub async fn insert_recursion_tip_aggregation_jobs( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, closed_form_inputs_and_urls: &[(u8, String, usize)], protocol_version: ProtocolSemanticVersion, ) { @@ -270,6 +285,7 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { INSERT INTO recursion_tip_witness_jobs_fri ( l1_batch_number, + chain_id, status, number_of_final_node_jobs, protocol_version, @@ -278,13 +294,14 @@ impl FriRecursionTipWitnessGeneratorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, 'waiting_for_proofs', $2, $3, NOW(), NOW(), $4) + ($1, $2, 'waiting_for_proofs', $3, $4, NOW(), NOW(), $5) ON CONFLICT (l1_batch_number) DO UPDATE SET updated_at = NOW() "#, block_number.0 as i64, + chain_id.as_u64() as i32, closed_form_inputs_and_urls.len() as i32, protocol_version.minor as i32, protocol_version.patch.0 as i32, diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs index 0c0382a08f3e..1c74aa398eda 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs @@ -1,11 +1,6 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{ - basic_fri_types::AggregationRound, - protocol_version::ProtocolSemanticVersion, - prover_dal::{SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, - L1BatchNumber, -}; +use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -19,28 +14,31 @@ pub struct FriSchedulerWitnessGeneratorDal<'a, 'c> { } impl FriSchedulerWitnessGeneratorDal<'_, '_> { - pub async fn move_scheduler_jobs_from_waiting_to_queued(&mut self) -> Vec { + pub async fn move_scheduler_jobs_from_waiting_to_queued(&mut self) -> Vec<(u64, u64)> { sqlx::query!( r#" UPDATE scheduler_witness_jobs_fri SET status = 'queued' WHERE - l1_batch_number IN ( + (l1_batch_number, chain_id) IN ( SELECT - prover_jobs_fri.l1_batch_number + prover_jobs_fri.l1_batch_number, + prover_jobs_fri.chain_id FROM prover_jobs_fri JOIN scheduler_witness_jobs_fri swj ON prover_jobs_fri.l1_batch_number = swj.l1_batch_number + AND prover_jobs_fri.chain_id = swj.chain_id WHERE swj.status = 'waiting_for_proofs' AND prover_jobs_fri.status = 'successful' AND prover_jobs_fri.aggregation_round = $1 ) RETURNING - l1_batch_number; + l1_batch_number, + chain_id; "#, AggregationRound::RecursionTip as i64, ) @@ -48,11 +46,11 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number as u64)) + .map(|row| (row.chain_id as u64, row.l1_batch_number as u64)) .collect() } - pub async fn mark_scheduler_jobs_as_queued(&mut self, l1_batch_number: i64) { + pub async fn mark_scheduler_jobs_as_queued(&mut self, l1_batch_number: i64, chain_id: L2ChainId) { sqlx::query!( r#" UPDATE scheduler_witness_jobs_fri @@ -60,10 +58,12 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { status = 'queued' WHERE l1_batch_number = $1 + AND chain_id = $2 AND status != 'successful' AND status != 'in_progress' "#, - l1_batch_number + l1_batch_number, + chain_id.as_u64() as i32, ) .execute(self.storage.conn()) .await @@ -96,6 +96,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { ) RETURNING l1_batch_number, + chain_id, status, attempts, error, @@ -110,6 +111,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -123,7 +125,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { &mut self, protocol_version: ProtocolSemanticVersion, picked_by: &str, - ) -> Option { + ) -> Option<(L2ChainId, L1BatchNumber)> { sqlx::query!( r#" UPDATE scheduler_witness_jobs_fri @@ -134,9 +136,10 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { processing_started_at = NOW(), picked_by = $2 WHERE - l1_batch_number = ( + (l1_batch_number, chain_id) = ( SELECT - l1_batch_number + l1_batch_number, + chain_id FROM scheduler_witness_jobs_fri WHERE @@ -161,12 +164,13 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) } pub async fn mark_scheduler_job_as_successful( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, time_taken: Duration, ) { sqlx::query!( @@ -178,9 +182,11 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { time_taken = $1 WHERE l1_batch_number = $2 + AND chain_id = $3 "#, duration_to_naive_time(time_taken), - i64::from(block_number.0) + i64::from(block_number.0), + chain_id.as_u64() as i32, ) .execute(self.storage.conn()) .await @@ -190,6 +196,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { pub async fn get_scheduler_witness_generator_jobs_for_batch( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -199,14 +206,17 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { scheduler_witness_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 "#, - i64::from(l1_batch_number.0) + i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, ) .fetch_optional(self.storage.conn()) .await .unwrap() .map(|row| SchedulerWitnessGeneratorJobInfo { l1_batch_number, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), scheduler_partial_input_blob_url: row.scheduler_partial_input_blob_url.clone(), status: WitnessJobStatus::from_str(&row.status).unwrap(), processing_started_at: row.processing_started_at, @@ -223,6 +233,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { pub async fn requeue_stuck_scheduler_jobs_for_batch( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, max_attempts: u32, ) -> Vec { sqlx::query!( @@ -235,19 +246,22 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { priority = priority + 1 WHERE l1_batch_number = $1 - AND attempts >= $2 + AND chain_id = $2 + AND attempts >= $3 AND ( status = 'in_progress' OR status = 'failed' ) RETURNING l1_batch_number, + chain_id, status, attempts, error, picked_by "#, i64::from(block_number.0), + chain_id.as_u64() as i32, max_attempts as i64 ) .fetch_all(self.storage.conn()) @@ -256,6 +270,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .into_iter() .map(|row| StuckJobs { id: row.l1_batch_number as u64, + chain_id: L2ChainId::new(row.chain_id as u64).unwrap(), status: row.status, attempts: row.attempts as u64, circuit_id: None, @@ -268,6 +283,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { pub async fn insert_scheduler_aggregation_jobs( &mut self, block_number: L1BatchNumber, + chain_id: L2ChainId, scheduler_partial_input_blob_url: &str, protocol_version: ProtocolSemanticVersion, ) { @@ -276,6 +292,7 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { INSERT INTO scheduler_witness_jobs_fri ( l1_batch_number, + chain_id, scheduler_partial_input_blob_url, protocol_version, status, @@ -284,13 +301,14 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { protocol_version_patch ) VALUES - ($1, $2, $3, 'waiting_for_proofs', NOW(), NOW(), $4) + ($1, $2, $3, $4, 'waiting_for_proofs', NOW(), NOW(), $5) ON CONFLICT (l1_batch_number) DO UPDATE SET updated_at = NOW() "#, i64::from(block_number.0), + chain_id.as_u64() as i32, scheduler_partial_input_blob_url, protocol_version.minor as i32, protocol_version.patch.0 as i32, diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index 5b7a54a31052..db53ada7a86d 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -16,11 +16,7 @@ use circuit_definitions::{ }; use keys::RamPermutationQueueWitnessKey; use zksync_object_store::{serialize_using_bincode, Bucket, StoredObject}; -use zksync_types::{ - basic_fri_types::AggregationRound, - protocol_version::{ProtocolSemanticVersion, VersionPatch}, - L1BatchNumber, ProtocolVersionId, -}; +use zksync_types::{basic_fri_types::AggregationRound, protocol_version::{ProtocolSemanticVersion, VersionPatch}, L1BatchNumber, ProtocolVersionId, L2ChainId}; use crate::keys::FriCircuitKey; @@ -56,13 +52,14 @@ impl StoredObject for CircuitWrapper { fn encode_key(key: Self::Key<'_>) -> String { let FriCircuitKey { + chain_id, block_number, sequence_number, circuit_id, aggregation_round, depth, } = key; - format!("{block_number}_{sequence_number}_{circuit_id}_{aggregation_round:?}_{depth}.bin") + format!("{}_{block_number}_{sequence_number}_{circuit_id}_{aggregation_round:?}_{depth}.bin", chain_id.as_u64()) } serialize_using_bincode!(); @@ -76,10 +73,10 @@ pub enum FriProofWrapper { impl StoredObject for FriProofWrapper { const BUCKET: Bucket = Bucket::ProofsFri; - type Key<'a> = u32; + type Key<'a> = (L2ChainId, u32); fn encode_key(key: Self::Key<'_>) -> String { - format!("proof_{key}.bin") + format!("proof_{}_{}.bin", key.0.as_u64(), key.1) } serialize_using_bincode!(); @@ -126,13 +123,15 @@ impl WitnessVectorArtifactsTemp { /// Data structure containing the proof generated by the circuit prover. #[derive(Debug)] pub struct ProverArtifacts { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub proof_wrapper: FriProofWrapper, } impl ProverArtifacts { - pub fn new(block_number: L1BatchNumber, proof_wrapper: FriProofWrapper) -> Self { + pub fn new(chain_id: L2ChainId, block_number: L1BatchNumber, proof_wrapper: FriProofWrapper) -> Self { Self { + chain_id, block_number, proof_wrapper, } @@ -141,6 +140,7 @@ impl ProverArtifacts { #[derive(Clone, serde::Serialize, serde::Deserialize)] pub struct ProverJob { + pub chain_id: L2ChainId, pub block_number: L1BatchNumber, pub job_id: u32, pub circuit_wrapper: CircuitWrapper, @@ -149,12 +149,14 @@ pub struct ProverJob { impl ProverJob { pub fn new( + chain_id: L2ChainId, block_number: L1BatchNumber, job_id: u32, circuit_wrapper: CircuitWrapper, setup_data_key: ProverServiceDataKey, ) -> Self { Self { + chain_id, block_number, job_id, circuit_wrapper, @@ -332,10 +334,10 @@ pub struct AuxOutputWitnessWrapper(pub BlockAuxilaryOutputWitness = L1BatchNumber; + type Key<'a> = (L2ChainId, L1BatchNumber); fn encode_key(key: Self::Key<'_>) -> String { - format!("aux_output_witness_{key}.bin") + format!("aux_output_witness_{}_{}.bin", key.0.as_u64(), key.1) } serialize_using_bincode!(); @@ -356,12 +358,14 @@ impl StoredObject for RamPermutationQueueWitness { fn encode_key(key: Self::Key<'_>) -> String { let RamPermutationQueueWitnessKey { + chain_id, block_number, circuit_subsequence_number, is_sorted, } = key; format!( - "queue_witness_{block_number}_{circuit_subsequence_number}_{}.bin", + "queue_witness_{}_{block_number}_{circuit_subsequence_number}_{}.bin", + chain_id.as_u64(), is_sorted as u64 ) } From 7c5181eb53e3b374a198c26130151e68f21dce3a Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Fri, 21 Feb 2025 15:17:56 +0200 Subject: [PATCH 4/5] fmt --- .../src/rounds/basic_circuits/artifacts.rs | 8 +++-- .../src/rounds/basic_circuits/mod.rs | 11 +++++-- .../src/rounds/leaf_aggregation/artifacts.rs | 4 ++- .../src/rounds/leaf_aggregation/mod.rs | 5 ++- .../bin/witness_generator/src/rounds/mod.rs | 3 +- .../crates/bin/witness_generator/src/utils.rs | 14 ++++++-- .../src/fri_proof_compressor_dal.rs | 24 +++++++++++--- .../lib/prover_dal/src/fri_prover_dal.rs | 32 +++++++++++++----- .../src/fri_witness_generator_dal/basic.rs | 15 +++++++-- .../src/fri_witness_generator_dal/leaf.rs | 33 ++++++++++++++----- .../src/fri_witness_generator_dal/mod.rs | 32 +++++++++++++----- .../src/fri_witness_generator_dal/node.rs | 27 ++++++++++++--- .../recursion_tip.rs | 7 +++- .../fri_witness_generator_dal/scheduler.rs | 25 +++++++++++--- prover/crates/lib/prover_fri_types/src/lib.rs | 17 ++++++++-- 15 files changed, 198 insertions(+), 59 deletions(-) diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs index 30cc68fd269e..136f08a86b5a 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/artifacts.rs @@ -1,5 +1,4 @@ -use std::{sync::Arc, time::Instant}; -use std::iter::chain; +use std::{iter::chain, sync::Arc, time::Instant}; use async_trait::async_trait; use zksync_object_store::ObjectStore; @@ -56,7 +55,10 @@ impl ArtifactsManager for BasicCircuits { } object_store - .put((chain_id, L1BatchNumber(job_id)), &aux_output_witness_wrapper) + .put( + (chain_id, L1BatchNumber(job_id)), + &aux_output_witness_wrapper, + ) .await .unwrap(); let wrapper = SchedulerPartialInputWrapper(artifacts.scheduler_witness); diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs index b4b87821d688..5c2715ae313e 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs @@ -13,7 +13,10 @@ use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::get_current_pod_name; use zksync_prover_interface::inputs::WitnessInputData; use zksync_prover_keystore::keystore::Keystore; -use zksync_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, L2ChainId}; +use zksync_types::{ + basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, + L2ChainId, +}; use crate::{ artifacts::ArtifactsManager, @@ -107,7 +110,11 @@ impl JobManager for BasicCircuits { object_store: &dyn ObjectStore, _keystore: Keystore, ) -> anyhow::Result { - tracing::info!("Processing FRI basic witness-gen for chain {} block {}", metadata.0.as_u64(), metadata.1); + tracing::info!( + "Processing FRI basic witness-gen for chain {} block {}", + metadata.0.as_u64(), + metadata.1 + ); let started_at = Instant::now(); let job = Self::get_artifacts(&metadata, object_store).await?; diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs index 3412224d86fd..fd8bab7e68a7 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/artifacts.rs @@ -5,7 +5,9 @@ use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::keys::{AggregationsKey, ClosedFormInputKey}; use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; -use zksync_types::{basic_fri_types::AggregationRound, L2ChainId, prover_dal::LeafAggregationJobMetadata}; +use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::LeafAggregationJobMetadata, L2ChainId, +}; use crate::{ artifacts::{AggregationBlobUrls, ArtifactsManager}, diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs index 9c2f7b50d195..f2f7bc817451 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs @@ -24,7 +24,10 @@ use zksync_prover_fri_types::{ get_current_pod_name, FriProofWrapper, }; use zksync_prover_keystore::keystore::Keystore; -use zksync_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::LeafAggregationJobMetadata, L1BatchNumber, L2ChainId}; +use zksync_types::{ + basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, + prover_dal::LeafAggregationJobMetadata, L1BatchNumber, L2ChainId, +}; use crate::{ artifacts::ArtifactsManager, diff --git a/prover/crates/bin/witness_generator/src/rounds/mod.rs b/prover/crates/bin/witness_generator/src/rounds/mod.rs index 502c0ce04fd2..b15d1318df4a 100644 --- a/prover/crates/bin/witness_generator/src/rounds/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/mod.rs @@ -23,8 +23,7 @@ pub use leaf_aggregation::LeafAggregation; pub use node_aggregation::NodeAggregation; pub use recursion_tip::RecursionTip; pub use scheduler::Scheduler; -use zksync_types::basic_fri_types::AggregationRound; -use zksync_types::L2ChainId; +use zksync_types::{basic_fri_types::AggregationRound, L2ChainId}; use crate::metrics::WITNESS_GENERATOR_METRICS; diff --git a/prover/crates/bin/witness_generator/src/utils.rs b/prover/crates/bin/witness_generator/src/utils.rs index 73e402fd55de..e6a0bba54b83 100644 --- a/prover/crates/bin/witness_generator/src/utils.rs +++ b/prover/crates/bin/witness_generator/src/utils.rs @@ -24,7 +24,9 @@ use zksync_prover_fri_types::{ keys::{AggregationsKey, ClosedFormInputKey, FriCircuitKey}, CircuitWrapper, FriProofWrapper, }; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId, ProtocolVersionId, U256}; +use zksync_types::{ + basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId, ProtocolVersionId, U256, +}; // Creates a temporary file with the serialized KZG setup usable by `zkevm_test_harness` functions. pub(crate) static KZG_TRUSTED_SETUP_FILE: Lazy = Lazy::new(|| { @@ -66,7 +68,10 @@ impl StoredObject for ClosedFormInputWrapper { block_number, circuit_id, } = key; - format!("closed_form_inputs_{}_{block_number}_{circuit_id}.bin", chain_id.as_u64()) + format!( + "closed_form_inputs_{}_{block_number}_{circuit_id}.bin", + chain_id.as_u64() + ) } serialize_using_bincode!(); @@ -86,7 +91,10 @@ impl StoredObject for AggregationWrapper { circuit_id, depth, } = key; - format!("aggregations_{}_{block_number}_{circuit_id}_{depth}.bin", chain_id.as_u64()) + format!( + "aggregations_{}_{block_number}_{circuit_id}_{depth}.bin", + chain_id.as_u64() + ) } serialize_using_bincode!(); diff --git a/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs b/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs index ec51e5e35fe2..19ac1e07baef 100644 --- a/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_proof_compressor_dal.rs @@ -1,9 +1,13 @@ #![doc = include_str!("../doc/FriProofCompressorDal.md")] use std::{collections::HashMap, str::FromStr, time::Duration}; -use zksync_basic_types::{protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{ - JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, -}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + prover_dal::{ + JobCountStatistics, ProofCompressionJobInfo, ProofCompressionJobStatus, StuckJobs, + }, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; use crate::{duration_to_naive_time, pg_interval_from_duration, Prover}; @@ -96,7 +100,12 @@ impl FriProofCompressorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) + .map(|row| { + ( + L2ChainId::new(row.chain_id as u64).unwrap(), + L1BatchNumber(row.l1_batch_number as u32), + ) + }) } pub async fn get_proof_compression_job_attempts( @@ -322,7 +331,12 @@ impl FriProofCompressorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))); + .map(|row| { + ( + L2ChainId::new(row.chain_id as u64).unwrap(), + L1BatchNumber(row.l1_batch_number as u32), + ) + }); result } diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index 8efe2ce30eb5..5faed9f54fa7 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -7,12 +7,17 @@ use std::{ }; use sqlx::QueryBuilder; -use zksync_basic_types::{basic_fri_types::{ - AggregationRound, CircuitIdRoundTuple, CircuitProverStatsEntry, - ProtocolVersionedCircuitProverStats, -}, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{ - FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, -}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::{ + AggregationRound, CircuitIdRoundTuple, CircuitProverStatsEntry, + ProtocolVersionedCircuitProverStats, + }, + protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + prover_dal::{ + FriProverJobMetadata, JobCountStatistics, ProverJobFriInfo, ProverJobStatus, StuckJobs, + }, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, instrument::InstrumentExt, metrics::MethodLatency, }; @@ -444,7 +449,11 @@ impl FriProverDal<'_, '_> { } } - pub async fn get_prover_job_attempts(&mut self, id: u32, chain_id: L2ChainId) -> sqlx::Result> { + pub async fn get_prover_job_attempts( + &mut self, + id: u32, + chain_id: L2ChainId, + ) -> sqlx::Result> { let attempts = sqlx::query!( r#" SELECT @@ -731,7 +740,9 @@ impl FriProverDal<'_, '_> { } } - pub async fn min_unproved_l1_batch_number(&mut self) -> HashMap<(u8, u8), (L2ChainId, L1BatchNumber)> { + pub async fn min_unproved_l1_batch_number( + &mut self, + ) -> HashMap<(u8, u8), (L2ChainId, L1BatchNumber)> { { sqlx::query!( r#" @@ -757,7 +768,10 @@ impl FriProverDal<'_, '_> { .map(|row| { ( (row.circuit_id as u8, row.aggregation_round as u8), - (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32)), + ( + L2ChainId::new(row.chain_id as u64).unwrap(), + L1BatchNumber(row.l1_batch_number as u32), + ), ) }) .collect() diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs index e16e2fbee695..86998c5c0a6a 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/basic.rs @@ -1,6 +1,10 @@ use std::time::Duration; -use zksync_basic_types::{protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{BasicWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + prover_dal::{BasicWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, error::DalResult, @@ -97,7 +101,12 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) + .map(|row| { + ( + L2ChainId::new(row.chain_id as u64).unwrap(), + L1BatchNumber(row.l1_batch_number as u32), + ) + }) } pub async fn set_status_for_basic_witness_job( @@ -206,7 +215,7 @@ impl FriBasicWitnessGeneratorDal<'_, '_> { pub async fn protocol_version_for_l1_batch_and_chain( &mut self, l1_batch_number: L1BatchNumber, - chain_id: L2ChainId + chain_id: L2ChainId, ) -> ProtocolSemanticVersion { let result = sqlx::query!( r#" diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs index ed2838aa8dee..4648efe45a61 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/leaf.rs @@ -1,9 +1,13 @@ -use std::{str::FromStr, time::Duration}; -use std::io::Chain; +use std::{io::Chain, str::FromStr, time::Duration}; -use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{ - LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, -}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::AggregationRound, + protocol_version::ProtocolSemanticVersion, + prover_dal::{ + LeafAggregationJobMetadata, LeafWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, + }, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -17,7 +21,12 @@ pub struct FriLeafWitnessGeneratorDal<'a, 'c> { } impl FriLeafWitnessGeneratorDal<'_, '_> { - pub async fn mark_leaf_aggregation_as_successful(&mut self, id: u32, chain_id: L2ChainId, time_taken: Duration) { + pub async fn mark_leaf_aggregation_as_successful( + &mut self, + id: u32, + chain_id: L2ChainId, + time_taken: Duration, + ) { sqlx::query!( r#" UPDATE leaf_aggregation_witness_jobs_fri @@ -103,7 +112,9 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { }) } - pub async fn move_leaf_aggregation_jobs_from_waiting_to_queued(&mut self) -> Vec<(i64, L2ChainId, u8)> { + pub async fn move_leaf_aggregation_jobs_from_waiting_to_queued( + &mut self, + ) -> Vec<(i64, L2ChainId, u8)> { sqlx::query!( r#" UPDATE leaf_aggregation_witness_jobs_fri @@ -144,7 +155,13 @@ impl FriLeafWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, L2ChainId::new(row.chain_id as u64).unwrap(), row.circuit_id as u8)) + .map(|row| { + ( + row.l1_batch_number, + L2ChainId::new(row.chain_id as u64).unwrap(), + row.circuit_id as u8, + ) + }) .collect() } diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs index c323a0ee8b32..befe27055c20 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs @@ -6,11 +6,15 @@ pub mod node; pub mod recursion_tip; pub mod scheduler; -use std::collections::HashMap; -use std::io::Chain; +use std::{collections::HashMap, io::Chain}; use sqlx::{types::chrono::NaiveDateTime, Row}; -use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, prover_dal::{JobCountStatistics, ProofGenerationTime, StuckJobs}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::AggregationRound, + protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + prover_dal::{JobCountStatistics, ProofGenerationTime, StuckJobs}, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{connection::Connection, utils::naive_time_from_pg_interval}; use crate::Prover; @@ -201,8 +205,12 @@ impl FriWitnessGeneratorDal<'_, '_> { block_number: L1BatchNumber, chain_id: L2ChainId, ) -> sqlx::Result { - self.delete_witness_generator_data_for_batch(block_number, chain_id, AggregationRound::BasicCircuits) - .await?; + self.delete_witness_generator_data_for_batch( + block_number, + chain_id, + AggregationRound::BasicCircuits, + ) + .await?; self.delete_witness_generator_data_for_batch( block_number, chain_id, @@ -218,8 +226,12 @@ impl FriWitnessGeneratorDal<'_, '_> { // TODO: THIS LOOKS SUS self.delete_witness_generator_data(AggregationRound::RecursionTip) .await?; - self.delete_witness_generator_data_for_batch(block_number, chain_id, AggregationRound::Scheduler) - .await + self.delete_witness_generator_data_for_batch( + block_number, + chain_id, + AggregationRound::Scheduler, + ) + .await } pub async fn delete_witness_generator_data( @@ -358,7 +370,11 @@ impl FriWitnessGeneratorDal<'_, '_> { wit.created_at FROM proof_compression_jobs_fri AS comp - JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number AND comp.chain_id = wit.chain_id + JOIN + witness_inputs_fri AS wit + ON + comp.l1_batch_number = wit.l1_batch_number + AND comp.chain_id = wit.chain_id WHERE wit.created_at > $1 ORDER BY diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs index 85cb2834eae8..3c2d710f5728 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/node.rs @@ -1,8 +1,13 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{ - NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, -}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::AggregationRound, + protocol_version::ProtocolSemanticVersion, + prover_dal::{ + NodeAggregationJobMetadata, NodeWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus, + }, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -118,7 +123,12 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { }) } - pub async fn mark_node_aggregation_as_successful(&mut self, id: u32, chain_id: L2ChainId, time_taken: Duration) { + pub async fn mark_node_aggregation_as_successful( + &mut self, + id: u32, + chain_id: L2ChainId, + time_taken: Duration, + ) { sqlx::query!( r#" UPDATE node_aggregation_witness_jobs_fri @@ -232,7 +242,14 @@ impl FriNodeWitnessGeneratorDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| (row.l1_batch_number, row.chain_id as u64, row.circuit_id as u8, row.depth as u16)) + .map(|row| { + ( + row.l1_batch_number, + row.chain_id as u64, + row.circuit_id as u8, + row.depth as u16, + ) + }) .collect() } diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs index 3b14dd537676..ffd782b464aa 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/recursion_tip.rs @@ -1,6 +1,11 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{RecursionTipWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::AggregationRound, + protocol_version::ProtocolSemanticVersion, + prover_dal::{RecursionTipWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs index 1c74aa398eda..131966047b09 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/scheduler.rs @@ -1,6 +1,11 @@ use std::{str::FromStr, time::Duration}; -use zksync_basic_types::{basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::{SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, L1BatchNumber, L2ChainId}; +use zksync_basic_types::{ + basic_fri_types::AggregationRound, + protocol_version::ProtocolSemanticVersion, + prover_dal::{SchedulerWitnessGeneratorJobInfo, StuckJobs, WitnessJobStatus}, + L1BatchNumber, L2ChainId, +}; use zksync_db_connection::{ connection::Connection, utils::{duration_to_naive_time, pg_interval_from_duration}, @@ -29,8 +34,9 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { prover_jobs_fri JOIN scheduler_witness_jobs_fri swj - ON prover_jobs_fri.l1_batch_number = swj.l1_batch_number - AND prover_jobs_fri.chain_id = swj.chain_id + ON + prover_jobs_fri.l1_batch_number = swj.l1_batch_number + AND prover_jobs_fri.chain_id = swj.chain_id WHERE swj.status = 'waiting_for_proofs' AND prover_jobs_fri.status = 'successful' @@ -50,7 +56,11 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .collect() } - pub async fn mark_scheduler_jobs_as_queued(&mut self, l1_batch_number: i64, chain_id: L2ChainId) { + pub async fn mark_scheduler_jobs_as_queued( + &mut self, + l1_batch_number: i64, + chain_id: L2ChainId, + ) { sqlx::query!( r#" UPDATE scheduler_witness_jobs_fri @@ -164,7 +174,12 @@ impl FriSchedulerWitnessGeneratorDal<'_, '_> { .fetch_optional(self.storage.conn()) .await .unwrap() - .map(|row| (L2ChainId::new(row.chain_id as u64).unwrap(), L1BatchNumber(row.l1_batch_number as u32))) + .map(|row| { + ( + L2ChainId::new(row.chain_id as u64).unwrap(), + L1BatchNumber(row.l1_batch_number as u32), + ) + }) } pub async fn mark_scheduler_job_as_successful( diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index db53ada7a86d..1a859ae5c47e 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -16,7 +16,11 @@ use circuit_definitions::{ }; use keys::RamPermutationQueueWitnessKey; use zksync_object_store::{serialize_using_bincode, Bucket, StoredObject}; -use zksync_types::{basic_fri_types::AggregationRound, protocol_version::{ProtocolSemanticVersion, VersionPatch}, L1BatchNumber, ProtocolVersionId, L2ChainId}; +use zksync_types::{ + basic_fri_types::AggregationRound, + protocol_version::{ProtocolSemanticVersion, VersionPatch}, + L1BatchNumber, L2ChainId, ProtocolVersionId, +}; use crate::keys::FriCircuitKey; @@ -59,7 +63,10 @@ impl StoredObject for CircuitWrapper { aggregation_round, depth, } = key; - format!("{}_{block_number}_{sequence_number}_{circuit_id}_{aggregation_round:?}_{depth}.bin", chain_id.as_u64()) + format!( + "{}_{block_number}_{sequence_number}_{circuit_id}_{aggregation_round:?}_{depth}.bin", + chain_id.as_u64() + ) } serialize_using_bincode!(); @@ -129,7 +136,11 @@ pub struct ProverArtifacts { } impl ProverArtifacts { - pub fn new(chain_id: L2ChainId, block_number: L1BatchNumber, proof_wrapper: FriProofWrapper) -> Self { + pub fn new( + chain_id: L2ChainId, + block_number: L1BatchNumber, + proof_wrapper: FriProofWrapper, + ) -> Self { Self { chain_id, block_number, From 3c3e303d3ae6485f58f13d48692e1d0b85008d80 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Fri, 21 Feb 2025 17:44:32 +0200 Subject: [PATCH 5/5] apply changes to components --- core/lib/basic_types/src/prover_dal.rs | 1 - .../proof_fri_compressor/src/compressor.rs | 32 ++++++++++++------- .../src/rounds/basic_circuits/mod.rs | 16 ++++++++-- .../src/rounds/basic_circuits/utils.rs | 10 +++++- .../src/rounds/leaf_aggregation/mod.rs | 1 + .../bin/witness_generator/src/rounds/mod.rs | 26 ++++++++++----- .../src/rounds/node_aggregation/artifacts.rs | 15 +++++++-- .../src/rounds/node_aggregation/mod.rs | 9 ++++-- .../src/rounds/recursion_tip/artifacts.rs | 24 +++++++++----- .../src/rounds/recursion_tip/mod.rs | 8 +++-- .../src/rounds/scheduler/artifacts.rs | 12 ++++--- .../src/rounds/scheduler/mod.rs | 9 ++++-- .../crates/bin/witness_generator/src/utils.rs | 2 +- .../gpu_circuit_prover_executor.rs | 8 +++-- .../gpu_circuit_prover_job_picker.rs | 3 +- .../gpu_circuit_prover_job_saver.rs | 20 ++++++++---- .../witness_vector_generator_job_picker.rs | 7 ++-- .../witness_vector_generator_job_saver.rs | 14 +++++--- ...63bdc06c27ed81d6097cf71011dc50ef7fe9.json} | 4 +-- ...c9a64904026506914abae2946e5d353d6a604.json | 23 ------------- ...dce3ecd8e742a5eeff25a66e691a77d133d1.json} | 4 +-- .../lib/prover_dal/src/fri_prover_dal.rs | 5 ++- .../src/fri_witness_generator_dal/mod.rs | 3 ++ 23 files changed, 164 insertions(+), 92 deletions(-) rename prover/crates/lib/prover_dal/.sqlx/{query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json => query-28ab79139a5705b1b03d6349a0c763bdc06c27ed81d6097cf71011dc50ef7fe9.json} (57%) delete mode 100644 prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json rename prover/crates/lib/prover_dal/.sqlx/{query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json => query-b92af341238d1741463315bbe2dddce3ecd8e742a5eeff25a66e691a77d133d1.json} (67%) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index 5d1738fbf9da..4addcb7bc366 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -14,7 +14,6 @@ use crate::{ pub struct FriProverJobMetadata { pub id: u32, pub block_number: L1BatchNumber, - pub chain_id: L2ChainId, pub circuit_id: u8, pub aggregation_round: AggregationRound, diff --git a/prover/crates/bin/proof_fri_compressor/src/compressor.rs b/prover/crates/bin/proof_fri_compressor/src/compressor.rs index 581e1fed8a48..55c7039c6d87 100644 --- a/prover/crates/bin/proof_fri_compressor/src/compressor.rs +++ b/prover/crates/bin/proof_fri_compressor/src/compressor.rs @@ -19,7 +19,7 @@ use zksync_prover_interface::outputs::{ }; use zksync_prover_keystore::keystore::Keystore; use zksync_queued_job_processor::JobProcessor; -use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; +use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber, L2ChainId}; use crate::metrics::METRICS; @@ -69,7 +69,7 @@ impl ProofCompressor { #[async_trait] impl JobProcessor for ProofCompressor { type Job = ZkSyncRecursionLayerProof; - type JobId = L1BatchNumber; + type JobId = (L2ChainId, L1BatchNumber); type JobArtifacts = SnarkWrapperProof; @@ -78,7 +78,7 @@ impl JobProcessor for ProofCompressor { async fn get_next_job(&self) -> anyhow::Result> { let mut conn = self.pool.connection().await.unwrap(); let pod_name = get_current_pod_name(); - let Some(l1_batch_number) = conn + let Some((chain_id, l1_batch_number)) = conn .fri_proof_compressor_dal() .get_next_proof_compression_job(&pod_name, self.protocol_version) .await @@ -87,7 +87,7 @@ impl JobProcessor for ProofCompressor { }; let Some(fri_proof_id) = conn .fri_prover_jobs_dal() - .get_scheduler_proof_job_id(l1_batch_number) + .get_scheduler_proof_job_id(l1_batch_number, chain_id) .await else { anyhow::bail!("Scheduler proof is missing from database for batch {l1_batch_number}"); @@ -99,7 +99,7 @@ impl JobProcessor for ProofCompressor { let observer = METRICS.blob_fetch_time.start(); let fri_proof: FriProofWrapper = self.blob_store.get(fri_proof_id) - .await.with_context(|| format!("Failed to get fri proof from blob store for {l1_batch_number} with id {fri_proof_id}"))?; + .await.with_context(|| format!("Failed to get fri proof from blob store for batch {l1_batch_number}, chain {} with id {fri_proof_id}", chain_id.as_u64()))?; observer.observe(); @@ -107,7 +107,7 @@ impl JobProcessor for ProofCompressor { FriProofWrapper::Base(_) => anyhow::bail!("Must be a scheduler proof not base layer"), FriProofWrapper::Recursive(proof) => proof, }; - Ok(Some((l1_batch_number, scheduler_proof))) + Ok(Some(((chain_id, l1_batch_number), scheduler_proof))) } async fn save_failure(&self, job_id: Self::JobId, _started_at: Instant, error: String) { @@ -116,13 +116,13 @@ impl JobProcessor for ProofCompressor { .await .unwrap() .fri_proof_compressor_dal() - .mark_proof_compression_job_failed(&error, job_id) + .mark_proof_compression_job_failed(&error, job_id.1, job_id.0) .await; } async fn process_job( &self, - _job_id: &L1BatchNumber, + _job_id: &(L2ChainId, L1BatchNumber), job: ZkSyncRecursionLayerProof, _started_at: Instant, ) -> JoinHandle> { @@ -150,7 +150,9 @@ impl JobProcessor for ProofCompressor { ) -> anyhow::Result<()> { METRICS.compression_time.observe(started_at.elapsed()); tracing::info!( - "Finished fri proof compression for job: {job_id} took: {:?}", + "Finished fri proof compression for job with id {}, chain {} took: {:?}", + job_id.1, + job_id.0.as_u64(), started_at.elapsed() ); @@ -192,7 +194,12 @@ impl JobProcessor for ProofCompressor { .await .unwrap() .fri_proof_compressor_dal() - .mark_proof_compression_job_successful(job_id, started_at.elapsed(), &blob_url) + .mark_proof_compression_job_successful( + job_id.1, + job_id.0, + started_at.elapsed(), + &blob_url, + ) .await; Ok(()) } @@ -201,15 +208,16 @@ impl JobProcessor for ProofCompressor { self.max_attempts } - async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { + async fn get_job_attempts(&self, job_id: &(L2ChainId, L1BatchNumber)) -> anyhow::Result { let mut prover_storage = self .pool .connection() .await .context("failed to acquire DB connection for ProofCompressor")?; + let (chain_id, l1_batch_number) = *job_id; prover_storage .fri_proof_compressor_dal() - .get_proof_compression_job_attempts(*job_id) + .get_proof_compression_job_attempts(l1_batch_number, chain_id) .await .map(|attempts| attempts.unwrap_or(0)) .context("failed to get job attempts for ProofCompressor") diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs index 5c2715ae313e..171ca455d7a0 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/mod.rs @@ -86,8 +86,14 @@ impl JobManager for BasicCircuits { block_number.0 ); - let (circuit_urls, queue_urls, scheduler_witness, aux_output_witness) = - generate_witness(block_number, object_store, job, max_circuits_in_flight).await; + let (circuit_urls, queue_urls, scheduler_witness, aux_output_witness) = generate_witness( + block_number, + chain_id, + object_store, + job, + max_circuits_in_flight, + ) + .await; WITNESS_GENERATOR_METRICS.witness_generation_time[&AggregationRound::BasicCircuits.into()] .observe(started_at.elapsed()); tracing::info!( @@ -137,7 +143,11 @@ impl JobManager for BasicCircuits { .get_next_basic_circuit_witness_job(protocol_version, &pod_name) .await { - Ok(Some((chain_id, l1_batch_number.0, l1_batch_number))) + Ok(Some(( + chain_id, + l1_batch_number.0, + (chain_id, l1_batch_number), + ))) } else { Ok(None) } diff --git a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs index a3718a86ac91..ed4cd91d1ee9 100644 --- a/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs +++ b/prover/crates/bin/witness_generator/src/rounds/basic_circuits/utils.rs @@ -40,6 +40,7 @@ use crate::{ #[tracing::instrument(skip_all, fields(l1_batch = %block_number))] pub(super) async fn generate_witness( block_number: L1BatchNumber, + chain_id: L2ChainId, object_store: Arc, input: WitnessInputData, max_circuits_in_flight: usize, @@ -168,7 +169,7 @@ pub(super) async fn generate_witness( save_circuit_handles.push(tokio::task::spawn(async move { let (circuit_id, circuit_url) = - save_circuit(block_number, circuit, sequence, object_store).await; + save_circuit(block_number, chain_id, circuit, sequence, object_store).await; drop(permit); (circuit_id, circuit_url) })); @@ -192,6 +193,7 @@ pub(super) async fn generate_witness( let object_store = object_store.clone(); save_queue_handles.push(tokio::task::spawn(save_recursion_queue( block_number, + chain_id, circuit_id, queue, inputs, @@ -245,12 +247,14 @@ pub(super) async fn generate_witness( #[tracing::instrument(skip_all, fields(l1_batch = %block_number, circuit_id = %circuit_id))] async fn save_recursion_queue( block_number: L1BatchNumber, + chain_id: L2ChainId, circuit_id: u8, recursion_queue_simulator: RecursionQueueSimulator, closed_form_inputs: Vec>, object_store: Arc, ) -> (u8, String, usize) { let key = ClosedFormInputKey { + chain_id, block_number, circuit_id, }; @@ -280,6 +284,7 @@ pub(crate) async fn create_aggregation_jobs( .fri_leaf_witness_generator_dal() .insert_leaf_aggregation_jobs( block_number, + chain_id, protocol_version, *circuit_id, closed_form_inputs_url.clone(), @@ -291,6 +296,7 @@ pub(crate) async fn create_aggregation_jobs( .fri_node_witness_generator_dal() .insert_node_aggregation_jobs( block_number, + chain_id, base_layer_to_recursive_layer_circuit_id(*circuit_id), None, 0, @@ -304,6 +310,7 @@ pub(crate) async fn create_aggregation_jobs( .fri_recursion_tip_witness_generator_dal() .insert_recursion_tip_aggregation_jobs( block_number, + chain_id, closed_form_inputs_and_urls, protocol_version, ) @@ -313,6 +320,7 @@ pub(crate) async fn create_aggregation_jobs( .fri_scheduler_witness_generator_dal() .insert_scheduler_aggregation_jobs( block_number, + chain_id, scheduler_partial_input_blob_url, protocol_version, ) diff --git a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs index f2f7bc817451..6bab14cc1f26 100644 --- a/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/leaf_aggregation/mod.rs @@ -184,6 +184,7 @@ impl JobManager for LeafAggregation { Ok(LeafAggregationArtifacts { circuit_id, + chain_id, block_number: job.block_number, aggregations, circuit_ids_and_urls, diff --git a/prover/crates/bin/witness_generator/src/rounds/mod.rs b/prover/crates/bin/witness_generator/src/rounds/mod.rs index b15d1318df4a..23730064ce5c 100644 --- a/prover/crates/bin/witness_generator/src/rounds/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/mod.rs @@ -95,20 +95,25 @@ where R: JobManager + ArtifactsManager + Send + Sync, { type Job = R::Job; - type JobId = u32; + type JobId = (L2ChainId, u32); type JobArtifacts = R::OutputArtifacts; const SERVICE_NAME: &'static str = R::SERVICE_NAME; async fn get_next_job(&self) -> anyhow::Result> { - if let Some((id, metadata)) = + if let Some((chain_id, job_id, metadata)) = R::get_metadata(self.connection_pool.clone(), self.protocol_version) .await .context("get_metadata()")? { - tracing::info!("Processing {:?} job {:?}", R::ROUND, id); + tracing::info!( + "Processing {:?} job {:?} for chain {}", + R::ROUND, + job_id, + chain_id.as_u64() + ); Ok(Some(( - id, + (chain_id, job_id), R::prepare_job(metadata, &*self.object_store, self.keystore.clone()) .await .context("prepare_job()")?, @@ -124,7 +129,7 @@ where .await .unwrap() .fri_witness_generator_dal() - .mark_witness_job_failed(&error, job_id, R::ROUND) + .mark_witness_job_failed(&error, job_id.1, job_id.0, R::ROUND) .await; } @@ -153,7 +158,8 @@ where let blob_save_started_at = Instant::now(); let blob_urls = R::save_to_bucket( - job_id, + job_id.1, + job_id.0, artifacts.clone(), &*self.object_store, self.config.shall_save_to_public_bucket, @@ -167,7 +173,8 @@ where tracing::info!("Saved {:?} artifacts for job {:?}", R::ROUND, job_id); R::save_to_database( &self.connection_pool, - job_id, + job_id.1, + job_id.0, started_at, blob_urls, artifacts, @@ -188,9 +195,12 @@ where "failed to acquire DB connection for {:?}", R::ROUND ))?; + + let (chain_id, l1_batch_number) = *job_id; + prover_storage .fri_witness_generator_dal() - .get_witness_job_attempts(*job_id, R::ROUND) + .get_witness_job_attempts(l1_batch_number, chain_id, R::ROUND) .await .map(|attempts| attempts.unwrap_or(0)) .context(format!("failed to get job attempts for {:?}", R::ROUND)) diff --git a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs index ff7eb5f53f12..1f88cc468c05 100644 --- a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/artifacts.rs @@ -4,7 +4,9 @@ use async_trait::async_trait; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::keys::AggregationsKey; -use zksync_types::{basic_fri_types::AggregationRound, prover_dal::NodeAggregationJobMetadata}; +use zksync_types::{ + basic_fri_types::AggregationRound, prover_dal::NodeAggregationJobMetadata, L2ChainId, +}; use crate::{ artifacts::{AggregationBlobUrls, ArtifactsManager}, @@ -29,6 +31,7 @@ impl ArtifactsManager for NodeAggregation { object_store: &dyn ObjectStore, ) -> anyhow::Result { let key = AggregationsKey { + chain_id: metadata.chain_id, block_number: metadata.block_number, circuit_id: metadata.circuit_id, depth: metadata.depth, @@ -49,6 +52,7 @@ impl ArtifactsManager for NodeAggregation { )] async fn save_to_bucket( _job_id: u32, + _chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, _shall_save_to_public_bucket: bool, @@ -56,6 +60,7 @@ impl ArtifactsManager for NodeAggregation { ) -> AggregationBlobUrls { let started_at = Instant::now(); let key = AggregationsKey { + chain_id: artifacts.chain_id, block_number: artifacts.block_number, circuit_id: artifacts.circuit_id, depth: artifacts.depth, @@ -81,6 +86,7 @@ impl ArtifactsManager for NodeAggregation { async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + chain_id: L2ChainId, started_at: Instant, blob_urls: AggregationBlobUrls, artifacts: Self::OutputArtifacts, @@ -90,7 +96,7 @@ impl ArtifactsManager for NodeAggregation { let dependent_jobs = blob_urls.circuit_ids_and_urls.len(); let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch_and_chain(artifacts.block_number) + .protocol_version_for_l1_batch_and_chain(artifacts.block_number, artifacts.chain_id) .await; match artifacts.next_aggregations.len() > 1 { true => { @@ -98,6 +104,7 @@ impl ArtifactsManager for NodeAggregation { .fri_prover_jobs_dal() .insert_prover_jobs( artifacts.block_number, + artifacts.chain_id, blob_urls.circuit_ids_and_urls, AggregationRound::NodeAggregation, artifacts.depth, @@ -108,6 +115,7 @@ impl ArtifactsManager for NodeAggregation { .fri_node_witness_generator_dal() .insert_node_aggregation_jobs( artifacts.block_number, + artifacts.chain_id, artifacts.circuit_id, Some(dependent_jobs as i32), artifacts.depth, @@ -122,6 +130,7 @@ impl ArtifactsManager for NodeAggregation { .fri_prover_jobs_dal() .insert_prover_job( artifacts.block_number, + artifacts.chain_id, artifacts.circuit_id, artifacts.depth, 0, @@ -136,7 +145,7 @@ impl ArtifactsManager for NodeAggregation { transaction .fri_node_witness_generator_dal() - .mark_node_aggregation_as_successful(job_id, started_at.elapsed()) + .mark_node_aggregation_as_successful(job_id, chain_id, started_at.elapsed()) .await; transaction.commit().await?; diff --git a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/mod.rs b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/mod.rs index d53457957634..fd7e87555cf4 100644 --- a/prover/crates/bin/witness_generator/src/rounds/node_aggregation/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/node_aggregation/mod.rs @@ -23,7 +23,7 @@ use zksync_prover_fri_types::{ use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, - prover_dal::NodeAggregationJobMetadata, L1BatchNumber, + prover_dal::NodeAggregationJobMetadata, L1BatchNumber, L2ChainId, }; use crate::{ @@ -38,6 +38,7 @@ mod artifacts; pub struct NodeAggregationArtifacts { circuit_id: u8, block_number: L1BatchNumber, + chain_id: L2ChainId, depth: u16, pub next_aggregations: Vec<(u64, RecursionQueueSimulator)>, pub recursive_circuit_ids_and_urls: Vec<(u8, String)>, @@ -47,6 +48,7 @@ pub struct NodeAggregationArtifacts { pub struct NodeAggregationWitnessGeneratorJob { circuit_id: u8, block_number: L1BatchNumber, + chain_id: L2ChainId, depth: u16, aggregations: Vec<(u64, RecursionQueueSimulator)>, proofs_ids: Vec, @@ -77,9 +79,10 @@ impl JobManager for NodeAggregation { ) -> anyhow::Result { let node_vk_commitment = compute_node_vk_commitment(job.node_vk.clone()); tracing::info!( - "Starting witness generation of type {:?} for block {} circuit id {} depth {}", + "Starting witness generation of type {:?} for block {} chain {} circuit id {} depth {}", AggregationRound::NodeAggregation, job.block_number.0, + job.chain_id.as_u64(), job.circuit_id, job.depth ); @@ -191,6 +194,7 @@ impl JobManager for NodeAggregation { Ok(NodeAggregationArtifacts { circuit_id: job.circuit_id, block_number: job.block_number, + chain_id: job.chain_id, depth: job.depth + 1, next_aggregations, recursive_circuit_ids_and_urls, @@ -228,6 +232,7 @@ impl JobManager for NodeAggregation { Ok(NodeAggregationWitnessGeneratorJob { circuit_id: metadata.circuit_id, block_number: metadata.block_number, + chain_id: metadata.chain_id, depth: metadata.depth, aggregations: artifacts.0, proofs_ids: metadata.prover_job_ids_for_proofs, diff --git a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs index 8f85e4c9c649..f0d342eb6617 100644 --- a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/artifacts.rs @@ -9,7 +9,7 @@ use zkevm_test_harness::empty_node_proof; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::{keys::FriCircuitKey, CircuitWrapper, FriProofWrapper}; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId}; use crate::{ artifacts::ArtifactsManager, @@ -18,7 +18,7 @@ use crate::{ #[async_trait] impl ArtifactsManager for RecursionTip { - type InputMetadata = Vec<(u8, u32)>; + type InputMetadata = Vec<(u8, L1BatchNumber, L2ChainId)>; type InputArtifacts = Vec; type OutputArtifacts = RecursionTipArtifacts; type BlobUrls = String; @@ -28,16 +28,16 @@ impl ArtifactsManager for RecursionTip { /// In this scenario, we still need to pass a proof, but it won't be taken into account during proving. /// For this scenario, we use an empty_proof, but any proof would suffice. async fn get_artifacts( - metadata: &Vec<(u8, u32)>, + metadata: &Vec<(u8, L1BatchNumber, L2ChainId)>, object_store: &dyn ObjectStore, ) -> anyhow::Result> { - let job_mapping: HashMap = metadata + let job_mapping: HashMap = metadata .clone() .into_iter() - .map(|(leaf_circuit_id, job_id)| { + .map(|(leaf_circuit_id, job_id, chain_id)| { ( ZkSyncRecursionLayerStorageType::from_leaf_u8_to_basic_u8(leaf_circuit_id), - job_id, + (chain_id, job_id.0), ) }) .collect(); @@ -76,12 +76,14 @@ impl ArtifactsManager for RecursionTip { async fn save_to_bucket( job_id: u32, + chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, _shall_save_to_public_bucket: bool, _public_blob_store: Option>, ) -> String { let key = FriCircuitKey { + chain_id, block_number: L1BatchNumber(job_id), circuit_id: 255, sequence_number: 0, @@ -101,6 +103,7 @@ impl ArtifactsManager for RecursionTip { async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + chain_id: L2ChainId, started_at: Instant, blob_urls: String, _artifacts: Self::OutputArtifacts, @@ -109,12 +112,13 @@ impl ArtifactsManager for RecursionTip { let mut transaction = prover_connection.start_transaction().await?; let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id)) + .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id), chain_id) .await; transaction .fri_prover_jobs_dal() .insert_prover_job( L1BatchNumber(job_id), + chain_id, ZkSyncRecursionLayerStorageType::RecursionTipCircuit as u8, 0, 0, @@ -127,7 +131,11 @@ impl ArtifactsManager for RecursionTip { transaction .fri_recursion_tip_witness_generator_dal() - .mark_recursion_tip_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .mark_recursion_tip_job_as_successful( + L1BatchNumber(job_id), + chain_id, + started_at.elapsed(), + ) .await; transaction.commit().await?; diff --git a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/mod.rs b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/mod.rs index 9c5b3b671841..da929774b9f4 100644 --- a/prover/crates/bin/witness_generator/src/rounds/recursion_tip/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/recursion_tip/mod.rs @@ -41,6 +41,7 @@ use zksync_prover_fri_types::{get_current_pod_name, keys::ClosedFormInputKey}; use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, + L2ChainId, }; use crate::{ @@ -68,6 +69,7 @@ pub struct RecursionTipArtifacts { pub struct RecursionTipJobMetadata { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub final_node_proof_job_ids: Vec<(u8, u32)>, } @@ -151,6 +153,7 @@ impl JobManager for RecursionTip { let mut recursion_queues = vec![]; for circuit_id in BaseLayerCircuitType::as_iter_u8() { let key = ClosedFormInputKey { + chain_id: metadata.chain_id, block_number: metadata.l1_batch_number, circuit_id, }; @@ -221,7 +224,7 @@ impl JobManager for RecursionTip { protocol_version: ProtocolSemanticVersion, ) -> anyhow::Result> { let pod_name = get_current_pod_name(); - let Some((l1_batch_number, number_of_final_node_jobs)) = connection_pool + let Some((chain_id, l1_batch_number, number_of_final_node_jobs)) = connection_pool .connection() .await? .fri_recursion_tip_witness_generator_dal() @@ -235,7 +238,7 @@ impl JobManager for RecursionTip { .connection() .await? .fri_prover_jobs_dal() - .get_final_node_proof_job_ids_for(l1_batch_number) + .get_final_node_proof_job_ids_for(l1_batch_number, chain_id) .await; assert_eq!( @@ -248,6 +251,7 @@ impl JobManager for RecursionTip { Ok(Some(( l1_batch_number.0, RecursionTipJobMetadata { + chain_id, l1_batch_number, final_node_proof_job_ids, }, diff --git a/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs b/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs index 1745adb26f50..fb1370f94d48 100644 --- a/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs +++ b/prover/crates/bin/witness_generator/src/rounds/scheduler/artifacts.rs @@ -5,7 +5,7 @@ use circuit_definitions::circuit_definitions::recursion_layer::ZkSyncRecursionLa use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::{keys::FriCircuitKey, CircuitWrapper, FriProofWrapper}; -use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber, L2ChainId}; use crate::{ artifacts::ArtifactsManager, @@ -14,7 +14,7 @@ use crate::{ #[async_trait] impl ArtifactsManager for Scheduler { - type InputMetadata = u32; + type InputMetadata = (L2ChainId, u32); type InputArtifacts = FriProofWrapper; type OutputArtifacts = SchedulerArtifacts; type BlobUrls = String; @@ -30,12 +30,14 @@ impl ArtifactsManager for Scheduler { async fn save_to_bucket( job_id: u32, + chain_id: L2ChainId, artifacts: Self::OutputArtifacts, object_store: &dyn ObjectStore, _shall_save_to_public_bucket: bool, _public_blob_store: Option>, ) -> String { let key = FriCircuitKey { + chain_id, block_number: L1BatchNumber(job_id), circuit_id: 1, sequence_number: 0, @@ -55,6 +57,7 @@ impl ArtifactsManager for Scheduler { async fn save_to_database( connection_pool: &ConnectionPool, job_id: u32, + chain_id: L2ChainId, started_at: Instant, blob_urls: String, _artifacts: Self::OutputArtifacts, @@ -63,12 +66,13 @@ impl ArtifactsManager for Scheduler { let mut transaction = prover_connection.start_transaction().await?; let protocol_version_id = transaction .fri_basic_witness_generator_dal() - .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id)) + .protocol_version_for_l1_batch_and_chain(L1BatchNumber(job_id), chain_id) .await; transaction .fri_prover_jobs_dal() .insert_prover_job( L1BatchNumber(job_id), + chain_id, ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, 0, 0, @@ -81,7 +85,7 @@ impl ArtifactsManager for Scheduler { transaction .fri_scheduler_witness_generator_dal() - .mark_scheduler_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .mark_scheduler_job_as_successful(L1BatchNumber(job_id), chain_id, started_at.elapsed()) .await; transaction.commit().await?; diff --git a/prover/crates/bin/witness_generator/src/rounds/scheduler/mod.rs b/prover/crates/bin/witness_generator/src/rounds/scheduler/mod.rs index f1358cbfc4f7..8df3208daab5 100644 --- a/prover/crates/bin/witness_generator/src/rounds/scheduler/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/scheduler/mod.rs @@ -25,6 +25,7 @@ use zksync_prover_fri_types::{ use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, + L2ChainId, }; use crate::{ @@ -42,6 +43,7 @@ pub struct SchedulerArtifacts { #[derive(Clone)] pub struct SchedulerWitnessGeneratorJob { block_number: L1BatchNumber, + chain_id: L2ChainId, scheduler_witness: SchedulerCircuitInstanceWitness< GoldilocksField, CircuitGoldilocksPoseidon2Sponge, @@ -55,6 +57,7 @@ pub struct SchedulerWitnessGeneratorJob { pub struct SchedulerWitnessJobMetadata { pub l1_batch_number: L1BatchNumber, + pub chain_id: L2ChainId, pub recursion_tip_job_id: u32, } @@ -163,6 +166,7 @@ impl JobManager for Scheduler { Ok(SchedulerWitnessGeneratorJob { block_number: metadata.l1_batch_number, + chain_id: metadata.chain_id, scheduler_witness, node_vk, leaf_layer_parameters, @@ -175,7 +179,7 @@ impl JobManager for Scheduler { protocol_version: ProtocolSemanticVersion, ) -> anyhow::Result> { let pod_name = get_current_pod_name(); - let Some(l1_batch_number) = connection_pool + let Some((chain_id, l1_batch_number)) = connection_pool .connection() .await? .fri_scheduler_witness_generator_dal() @@ -188,7 +192,7 @@ impl JobManager for Scheduler { .connection() .await? .fri_prover_jobs_dal() - .get_recursion_tip_proof_job_id(l1_batch_number) + .get_recursion_tip_proof_job_id(l1_batch_number, chain_id) .await .context(format!( "could not find recursion tip proof for l1 batch {}", @@ -198,6 +202,7 @@ impl JobManager for Scheduler { Ok(Some(( l1_batch_number.0, SchedulerWitnessJobMetadata { + chain_id, l1_batch_number, recursion_tip_job_id, }, diff --git a/prover/crates/bin/witness_generator/src/utils.rs b/prover/crates/bin/witness_generator/src/utils.rs index e6a0bba54b83..e491961a46db 100644 --- a/prover/crates/bin/witness_generator/src/utils.rs +++ b/prover/crates/bin/witness_generator/src/utils.rs @@ -125,8 +125,8 @@ impl StoredObject for SchedulerPartialInputWrapper { fields(l1_batch = %block_number, circuit_id = %circuit.numeric_circuit_type()) )] pub async fn save_circuit( - chain_id: L2ChainId, block_number: L1BatchNumber, + chain_id: L2ChainId, circuit: ZkSyncBaseLayerCircuit, sequence_number: usize, object_store: Arc, diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs index 043232a5003c..3069d67ea93e 100644 --- a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs @@ -33,7 +33,7 @@ impl Executor for GpuCircuitProverExecutor { #[tracing::instrument( name = "gpu_circuit_prover_executor", skip_all, - fields(l1_batch = % metadata.block_number) + fields(l1_batch = % metadata.block_number, chain = %metadata.chain_id.as_u64()) )] fn execute( &self, @@ -42,9 +42,10 @@ impl Executor for GpuCircuitProverExecutor { ) -> anyhow::Result { let start_time = Instant::now(); tracing::info!( - "Started executing gpu circuit prover job {}, on batch {}, for circuit {}, at round {}", + "Started executing gpu circuit prover job {}, on batch {}, chain {}, for circuit {}, at round {}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round ); @@ -58,9 +59,10 @@ impl Executor for GpuCircuitProverExecutor { .prove(witness_vector, setup_data) .context("failed to gpu prove circuit")?; tracing::info!( - "Finished executing gpu circuit prover job {}, on batch {}, for circuit {}, at round {} after {:?}", + "Finished executing gpu circuit prover job {}, on batch {}, chain {}, for circuit {}, at round {} after {:?}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round, start_time.elapsed() diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs index 12c62a6913af..47a824cb5bb2 100644 --- a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs @@ -77,9 +77,10 @@ impl JobPicker for GpuCircuitProverJobPicker { setup_data, }; tracing::info!( - "Finished picking gpu circuit prover job {}, on batch {}, for circuit {}, at round {} in {:?}", + "Finished picking gpu circuit prover job {}, on batch {}, chain {}, for circuit {}, at round {} in {:?}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round, start_time.elapsed() diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs index 0ba28a0d9f5a..6b72616a3e13 100644 --- a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs @@ -40,7 +40,7 @@ impl JobSaver for GpuCircuitProverJobSaver { #[tracing::instrument( name = "gpu_circuit_prover_job_saver", skip_all, - fields(l1_batch = % data.1.block_number) + fields(l1_batch = % data.1.block_number, chain = %data.1.chain_id.as_u64()) )] async fn save_job_result( &self, @@ -49,9 +49,10 @@ impl JobSaver for GpuCircuitProverJobSaver { let start_time = Instant::now(); let (result, metadata) = data; tracing::info!( - "Started saving gpu circuit prover job {}, on batch {}, for circuit {}, at round {}", + "Started saving gpu circuit prover job {}, on batch {}, chain {}, for circuit {}, at round {}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round ); @@ -68,7 +69,7 @@ impl JobSaver for GpuCircuitProverJobSaver { let blob_url = self .object_store - .put(metadata.id, &proof_wrapper) + .put((metadata.chain_id, metadata.id), &proof_wrapper) .await .context("failed to upload to object store")?; @@ -78,13 +79,19 @@ impl JobSaver for GpuCircuitProverJobSaver { .context("failed to start db transaction")?; transaction .fri_prover_jobs_dal() - .save_proof(metadata.id, metadata.pick_time.elapsed(), &blob_url) + .save_proof( + metadata.id, + metadata.chain_id, + metadata.pick_time.elapsed(), + &blob_url, + ) .await; if is_scheduler_proof { transaction .fri_proof_compressor_dal() .insert_proof_compression_job( metadata.block_number, + metadata.chain_id, &blob_url, self.protocol_version, ) @@ -103,14 +110,15 @@ impl JobSaver for GpuCircuitProverJobSaver { .await .context("failed to get db connection")? .fri_prover_jobs_dal() - .save_proof_error(metadata.id, error_message) + .save_proof_error(metadata.id, metadata.chain_id, error_message) .await; } }; tracing::info!( - "Finished saving gpu circuit prover job {}, on batch {}, for circuit {}, at round {} after {:?}", + "Finished saving gpu circuit prover job {}, on batch {}, chain {}, for circuit {}, at round {} after {:?}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round, start_time.elapsed() diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs index b83baa8bbd35..73d48439dd73 100644 --- a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs @@ -16,7 +16,7 @@ use zksync_prover_fri_types::{ CircuitAuxData, CircuitWrapper, ProverServiceDataKey, RamPermutationQueueWitness, }; use zksync_prover_job_processor::JobPicker; -use zksync_types::{prover_dal::FriProverJobMetadata, L1BatchNumber}; +use zksync_types::{prover_dal::FriProverJobMetadata, L1BatchNumber, L2ChainId}; use crate::{ metrics::WITNESS_VECTOR_GENERATOR_METRICS, @@ -60,9 +60,11 @@ impl WitnessVectorGeneratorJobPicker { circuit: ZkSyncBaseLayerCircuit, aux_data: CircuitAuxData, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> anyhow::Result { if let ZkSyncBaseLayerCircuit::RAMPermutation(circuit_instance) = circuit { let sorted_witness_key = RamPermutationQueueWitnessKey { + chain_id, block_number: l1_batch_number, circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, is_sorted: true, @@ -74,6 +76,7 @@ impl WitnessVectorGeneratorJobPicker { .context("failed to load sorted witness key")?; let unsorted_witness_key = RamPermutationQueueWitnessKey { + chain_id, block_number: l1_batch_number, circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, is_sorted: false, @@ -131,7 +134,7 @@ impl JobPicker for WitnessVectorGeneratorJobPic CircuitWrapper::Base(circuit) => Circuit::Base(circuit), CircuitWrapper::Recursive(circuit) => Circuit::Recursive(circuit), CircuitWrapper::BasePartial((circuit, aux_data)) => self - .fill_witness(circuit, aux_data, metadata.block_number) + .fill_witness(circuit, aux_data, metadata.block_number, metadata.chain_id) .await .context("failed to fill witness")?, }; diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs index 86e04472b299..6005725ea407 100644 --- a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs @@ -58,9 +58,10 @@ impl JobSaver for WitnessVectorGeneratorJobSaver { match result { Ok(payload) => { tracing::info!( - "Started transferring witness vector generator job {}, on batch {}, for circuit {}, at round {}", + "Started transferring witness vector generator job {}, on batch {}, chain {}, for circuit {}, at round {}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round ); @@ -69,9 +70,10 @@ impl JobSaver for WitnessVectorGeneratorJobSaver { return Ok(()); } tracing::info!( - "Finished transferring witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + "Finished transferring witness vector generator job {}, on batch {}, chain {}, for circuit {}, at round {} in {:?}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round, start_time.elapsed() @@ -83,9 +85,10 @@ impl JobSaver for WitnessVectorGeneratorJobSaver { Err(err) => { tracing::error!("Witness vector generation failed: {:?}", err); tracing::info!( - "Started saving failure for witness vector generator job {}, on batch {}, for circuit {}, at round {}", + "Started saving failure for witness vector generator job {}, on batch {}, chain {}, for circuit {}, at round {}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round ); @@ -94,12 +97,13 @@ impl JobSaver for WitnessVectorGeneratorJobSaver { .await .context("failed to get db connection")? .fri_prover_jobs_dal() - .save_proof_error(metadata.id, err.to_string()) + .save_proof_error(metadata.id, metadata.chain_id, err.to_string()) .await; tracing::info!( - "Finished saving failure for witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + "Finished saving failure for witness vector generator job {}, on batch {}, chain {}, for circuit {}, at round {} in {:?}", metadata.id, metadata.block_number, + metadata.chain_id.as_u64(), metadata.circuit_id, metadata.aggregation_round, start_time.elapsed() diff --git a/prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json b/prover/crates/lib/prover_dal/.sqlx/query-28ab79139a5705b1b03d6349a0c763bdc06c27ed81d6097cf71011dc50ef7fe9.json similarity index 57% rename from prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json rename to prover/crates/lib/prover_dal/.sqlx/query-28ab79139a5705b1b03d6349a0c763bdc06c27ed81d6097cf71011dc50ef7fe9.json index abb7a6a618e3..9f5d4958a993 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-28ab79139a5705b1b03d6349a0c763bdc06c27ed81d6097cf71011dc50ef7fe9.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id\n FROM\n prover_jobs_fri\n JOIN\n scheduler_witness_jobs_fri swj\n ON prover_jobs_fri.l1_batch_number = swj.l1_batch_number\n AND prover_jobs_fri.chain_id = swj.chain_id\n WHERE\n swj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n )\n RETURNING\n l1_batch_number,\n chain_id;\n ", + "query": "\n UPDATE scheduler_witness_jobs_fri\n SET\n status = 'queued'\n WHERE\n (l1_batch_number, chain_id) IN (\n SELECT\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.chain_id\n FROM\n prover_jobs_fri\n JOIN\n scheduler_witness_jobs_fri swj\n ON\n prover_jobs_fri.l1_batch_number = swj.l1_batch_number\n AND prover_jobs_fri.chain_id = swj.chain_id\n WHERE\n swj.status = 'waiting_for_proofs'\n AND prover_jobs_fri.status = 'successful'\n AND prover_jobs_fri.aggregation_round = $1\n )\n RETURNING\n l1_batch_number,\n chain_id;\n ", "describe": { "columns": [ { @@ -24,5 +24,5 @@ false ] }, - "hash": "d5e0fc9af2432e00cde95eedaa971e45a108407ee55900557c91691c5f95033c" + "hash": "28ab79139a5705b1b03d6349a0c763bdc06c27ed81d6097cf71011dc50ef7fe9" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json b/prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json deleted file mode 100644 index 3275df2a3d58..000000000000 --- a/prover/crates/lib/prover_dal/.sqlx/query-a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n l1_batch_number = $1\n AND status = 'successful'\n AND aggregation_round = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int2" - ] - }, - "nullable": [ - false - ] - }, - "hash": "a84ee70bec8c03bd51e1c6bad44c9a64904026506914abae2946e5d353d6a604" -} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json b/prover/crates/lib/prover_dal/.sqlx/query-b92af341238d1741463315bbe2dddce3ecd8e742a5eeff25a66e691a77d133d1.json similarity index 67% rename from prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json rename to prover/crates/lib/prover_dal/.sqlx/query-b92af341238d1741463315bbe2dddce3ecd8e742a5eeff25a66e691a77d133d1.json index 3141b1edeb7f..4add72781f63 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-b92af341238d1741463315bbe2dddce3ecd8e742a5eeff25a66e691a77d133d1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n comp.l1_batch_number,\n comp.chain_id,\n (comp.updated_at - wit.created_at) AS time_taken,\n wit.created_at\n FROM\n proof_compression_jobs_fri AS comp\n JOIN witness_inputs_fri AS wit ON comp.l1_batch_number = wit.l1_batch_number AND comp.chain_id = wit.chain_id\n WHERE\n wit.created_at > $1\n ORDER BY\n time_taken DESC;\n ", + "query": "\n SELECT\n comp.l1_batch_number,\n comp.chain_id,\n (comp.updated_at - wit.created_at) AS time_taken,\n wit.created_at\n FROM\n proof_compression_jobs_fri AS comp\n JOIN\n witness_inputs_fri AS wit\n ON\n comp.l1_batch_number = wit.l1_batch_number\n AND comp.chain_id = wit.chain_id\n WHERE\n wit.created_at > $1\n ORDER BY\n time_taken DESC;\n ", "describe": { "columns": [ { @@ -36,5 +36,5 @@ false ] }, - "hash": "a1dd440737d96276005b48ac4f445a40a0e69c1b1065e05d41d180616ffb6a8a" + "hash": "b92af341238d1741463315bbe2dddce3ecd8e742a5eeff25a66e691a77d133d1" } diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index 5faed9f54fa7..f19ed11e1191 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -800,6 +800,7 @@ impl FriProverDal<'_, '_> { pub async fn get_scheduler_proof_job_id( &mut self, l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, ) -> Option { sqlx::query!( r#" @@ -809,10 +810,12 @@ impl FriProverDal<'_, '_> { prover_jobs_fri WHERE l1_batch_number = $1 + AND chain_id = $2 AND status = 'successful' - AND aggregation_round = $2 + AND aggregation_round = $3 "#, i64::from(l1_batch_number.0), + chain_id.as_u64() as i32, AggregationRound::Scheduler as i16, ) .fetch_optional(self.storage.conn()) diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs index befe27055c20..e9e009cb0f2b 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal/mod.rs @@ -42,6 +42,7 @@ impl FriWitnessGeneratorDal<'_, '_> { pub async fn get_witness_job_attempts( &mut self, job_id: u32, + chain_id: L2ChainId, aggregation_round: AggregationRound, ) -> sqlx::Result> { let table = match aggregation_round { @@ -68,7 +69,9 @@ impl FriWitnessGeneratorDal<'_, '_> { {table} WHERE {job_id_column} = {job_id} + AND chain_id = {} "#, + chain_id.as_u64() ); let attempts = sqlx::query(&query)