Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Arrabbiata: implement absorb_state #2982

Merged
merged 3 commits into from
Feb 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions arrabbiata/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ pub fn main() {
// verify and accumulate it at the next iteration.
env.commit_state();

// FIXME:
// Absorb all commitments in the sponge.
// Absorb the last program state.
env.absorb_state();

// FIXME:
// Coin chalenges β and γ for the permutation argument
Expand Down
100 changes: 93 additions & 7 deletions arrabbiata/src/witness.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use ark_ec::CurveConfig;
use ark_ff::PrimeField;
use ark_poly::Evaluations;
use kimchi::circuits::{domains::EvaluationDomains, gate::CurrOrNext};
Expand All @@ -6,7 +7,7 @@ use mina_poseidon::constants::SpongeConstants;
use num_bigint::{BigInt, BigUint};
use num_integer::Integer;
use o1_utils::field_helpers::FieldHelpers;
use poly_commitment::{ipa::SRS, PolyComm, SRS as _};
use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::time::Instant;

Expand Down Expand Up @@ -139,14 +140,38 @@ pub struct Env<
pub sponge_e1: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH],
pub sponge_e2: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH],

/// Sponge state used by the prover for the current iteration.
///
/// This sponge is used at the current iteration to absorb commitments of
/// the program state and generate the challenges for the current iteration.
/// The outputs of the sponge will be verified by the verifier of the next
/// iteration.
pub prover_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH],

/// Sponge state used by the verifier for the current iteration.
///
/// This sponge is used at the current iteration to build the verifier
/// circuit. The state will need to match with the previous prover sopnge
/// state.
pub verifier_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH],

/// The current iteration of the IVC
pub current_iteration: u64,

/// The digest of the last program state, including the cross-terms
/// commitments.
/// The value is a 128 bits value, to be absorbed to initialize the sponge
/// state for both curves.
pub last_digest: BigInt,
/// The digest of the program state before executing the last iteration.
/// The value will be used to initialize the execution trace of the verifier
/// in the next iteration, in particular to verify that the challenges have
/// been generated correctly.
///
/// The value is a 128bits value.
pub last_program_digest_before_execution: BigInt,

/// The digest of the program state after executing the last iteration.
/// The value will be used to initialize the sponge before committing to the
/// next iteration.
///
/// The value is a 128bits value.
pub last_program_digest_after_execution: BigInt,

/// The coin folding combiner will be used to generate the combinaison of
/// folding instances
Expand Down Expand Up @@ -782,6 +807,8 @@ impl<
where
E1::BaseField: PrimeField,
E2::BaseField: PrimeField,
<<E1 as CommitmentCurve>::Params as CurveConfig>::BaseField: PrimeField,
<<E2 as CommitmentCurve>::Params as CurveConfig>::BaseField: PrimeField,
{
pub fn new(
srs_log2_size: usize,
Expand Down Expand Up @@ -864,6 +891,10 @@ where
// FIXME: challenges
let challenges: Vec<BigInt> = vec![];

let prover_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH] =
std::array::from_fn(|_| BigInt::from(0_u64));
let verifier_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH] =
std::array::from_fn(|_| BigInt::from(0_u64));
Self {
// -------
// Setup
Expand Down Expand Up @@ -892,8 +923,13 @@ where
current_instruction: IVC_STARTING_INSTRUCTION,
sponge_e1,
sponge_e2,
prover_sponge_state,
verifier_sponge_state,
current_iteration: 0,
last_digest: BigInt::from(0_u64),
// FIXME: set a correct value
last_program_digest_before_execution: BigInt::from(0_u64),
// FIXME: set a correct value
last_program_digest_after_execution: BigInt::from(0_u64),
r: BigInt::from(0_usize),
// Initialize the temporary accumulators with 0
temporary_accumulators: (
Expand Down Expand Up @@ -985,6 +1021,56 @@ where
}
}

/// Absorb the last committed program state in the correct sponge.
///
/// For a description of the messages to be given to the sponge, including
/// the expected instantiation, refer to the section "Message Passing" in
/// [crate::interpreter].
pub fn absorb_state(&mut self) {
if self.current_iteration % 2 == 0 {
let mut sponge = E1::create_new_sponge();
let previous_state: E1::BaseField = E1::BaseField::from_biguint(
&self
.last_program_digest_after_execution
.to_biguint()
.unwrap(),
)
.unwrap();
E1::absorb_fq(&mut sponge, previous_state);
self.previous_committed_state_e1
.iter()
.for_each(|comm| E1::absorb_curve_points(&mut sponge, &comm.chunks));
let state: Vec<BigInt> = sponge
.sponge
.state
.iter()
.map(|x| x.to_biguint().into())
.collect();
self.prover_sponge_state = state.try_into().unwrap()
} else {
let mut sponge = E2::create_new_sponge();
let previous_state: E2::BaseField = E2::BaseField::from_biguint(
&self
.last_program_digest_after_execution
.to_biguint()
.unwrap(),
)
.unwrap();
E2::absorb_fq(&mut sponge, previous_state);
self.previous_committed_state_e2
.iter()
.for_each(|comm| E2::absorb_curve_points(&mut sponge, &comm.chunks));

let state: Vec<BigInt> = sponge
.sponge
.state
.iter()
.map(|x| x.to_biguint().into())
.collect();
self.prover_sponge_state = state.try_into().unwrap()
}
}

/// Compute the output of the application on the previous output
// TODO: we should compute the hash of the previous commitments, only on
// CPU?
Expand Down
Loading