Skip to content

Commit

Permalink
Merge pull request #15 from telosnetwork/fix_no_default_features
Browse files Browse the repository at this point in the history
Enable the possibility to compile with no std support
  • Loading branch information
albertog78 authored Dec 11, 2024
2 parents 0e36b06 + c7dd5c2 commit c787b8a
Show file tree
Hide file tree
Showing 101 changed files with 387 additions and 1,118 deletions.
29 changes: 3 additions & 26 deletions .github/workflows/continuous-integration-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,6 @@ jobs:
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

- name: Check in starky subdirectory
run: cargo check --manifest-path starky/Cargo.toml
env:
RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0
RUST_LOG: 1
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

- name: Run cargo test
run: cargo test --workspace
env:
Expand Down Expand Up @@ -86,14 +78,6 @@ jobs:
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

- name: Check in starky subdirectory for wasm targets
run: cargo check --manifest-path starky/Cargo.toml --target wasm32-unknown-unknown --no-default-features
env:
RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0
RUST_LOG: 1
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

no_std:
name: Test Suite in no-std
runs-on: ubuntu-latest
Expand Down Expand Up @@ -121,14 +105,6 @@ jobs:
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

- name: Run cargo test in starky subdirectory (no-std)
run: cargo test --manifest-path starky/Cargo.toml --no-default-features --lib
env:
RUSTFLAGS: -Copt-level=3 -Cdebug-assertions -Coverflow-checks=y -Cdebuginfo=0
RUST_LOG: 1
CARGO_INCREMENTAL: 1
RUST_BACKTRACE: 1

lints:
name: Formatting and Clippy
runs-on: ubuntu-latest
Expand All @@ -151,5 +127,6 @@ jobs:
- name: Run cargo fmt
run: cargo fmt --all --check

- name: Run cargo clippy
run: cargo clippy --all-features --all-targets -- -D warnings -A incomplete-features
#TODO: reintroduce check
#- name: Run cargo clippy
# run: cargo clippy --all-features --all-targets -- -D warnings -A incomplete-features
2 changes: 1 addition & 1 deletion field/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ p3-monty-31 = { workspace = true }
p3-goldilocks = { workspace = true }
p3-baby-bear = { workspace = true }

lazy_static = { version = "1.5.0", optional = true }
lazy_static = {version = "1.5.0", default-features = false, features = ["spin_no_std"], optional = true }
rand_xoshiro = { version = "0.6.0", optional = true }

# Display math equations properly in documentation
Expand Down
9 changes: 3 additions & 6 deletions field/src/batch_util.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use p3_field::Field;
use p3_field::PackedField;
use p3_field::{Field, PackedField};

const fn pack_with_leftovers_split_point<P: PackedField>(slice: &[P::Scalar]) -> usize {
let n = slice.len();
Expand Down Expand Up @@ -30,8 +29,7 @@ pub fn batch_multiply_inplace<F: Field>(out: &mut [F], a: &[F]) {
assert_eq!(n, a.len(), "both arrays must have the same length");

// Split out slice of vectors, leaving leftovers as scalars
let (out_packed, out_leftovers) =
pack_slice_with_leftovers_mut::<F::Packing>(out);
let (out_packed, out_leftovers) = pack_slice_with_leftovers_mut::<F::Packing>(out);
let (a_packed, a_leftovers) = pack_slice_with_leftovers::<F::Packing>(a);

// Multiply packed and the leftovers
Expand All @@ -50,8 +48,7 @@ pub fn batch_add_inplace<F: Field>(out: &mut [F], a: &[F]) {
assert_eq!(n, a.len(), "both arrays must have the same length");

// Split out slice of vectors, leaving leftovers as scalars
let (out_packed, out_leftovers) =
pack_slice_with_leftovers_mut::<F::Packing>(out);
let (out_packed, out_leftovers) = pack_slice_with_leftovers_mut::<F::Packing>(out);
let (a_packed, a_leftovers) = pack_slice_with_leftovers::<F::Packing>(a);

// Add packed and the leftovers
Expand Down
6 changes: 4 additions & 2 deletions field/src/extension/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,19 @@ use p3_field::{AbstractExtensionField, Field};
use crate::types::HasExtension;

/// Flatten the slice by sending every extension field element to its D-sized canonical representation.
pub fn flatten<F: HasExtension<D>, const D: usize>(l: &[F::Extension]) -> Vec<F>
pub fn flatten<F, const D: usize>(l: &[F::Extension]) -> Vec<F>
where
F: Field,
F: HasExtension<D>,
{
l.iter().flat_map(|x| x.as_base_slice().to_vec()).collect()
}

/// Batch every D-sized chunks into extension field elements.
pub fn unflatten<F: HasExtension<D>, const D: usize>(l: &[F]) -> Vec<F::Extension>
pub fn unflatten<F, const D: usize>(l: &[F]) -> Vec<F::Extension>
where
F: Field,
F: HasExtension<D>,
{
debug_assert_eq!(l.len() % D, 0);
l.chunks_exact(D)
Expand Down
3 changes: 1 addition & 2 deletions field/src/fft.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use alloc::vec::Vec;
use core::cmp::{max, min};

use p3_field::{Field, PackedValue, TwoAdicField};
use p3_field::{Field, PackedField, PackedValue, TwoAdicField};
use plonky2_util::{log2_strict, reverse_index_bits_in_place};
use unroll::unroll_for_loops;

use p3_field::PackedField;
use crate::polynomial::{PolynomialCoeffs, PolynomialValues};

pub type FftRootTable<F> = Vec<Vec<F>>;
Expand Down
5 changes: 1 addition & 4 deletions field/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@
#![deny(missing_debug_implementations)]
#![feature(specialization)]
#![cfg_attr(
all(
target_arch = "x86_64",
target_feature = "avx512f"
),
all(target_arch = "x86_64", target_feature = "avx512f"),
feature(stdarch_x86_avx512)
)]

Expand Down
6 changes: 4 additions & 2 deletions field/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use p3_baby_bear::BabyBear;
use p3_field::extension::{
BinomialExtensionField, BinomiallyExtendable, HasTwoAdicBionmialExtension,
};
use p3_field::{AbstractExtensionField, AbstractField, ExtensionField, PrimeField32, PrimeField64, TwoAdicField};
use p3_field::{
AbstractExtensionField, AbstractField, ExtensionField, PrimeField32, PrimeField64, TwoAdicField,
};
use p3_goldilocks::Goldilocks;
use rand::rngs::OsRng;
use rand::RngCore;
Expand Down Expand Up @@ -127,4 +129,4 @@ impl Sample for BabyBear {
use rand::Rng;
Self::from_canonical_u32(rng.gen_range(0..Self::ORDER_U32))
}
}
}
3 changes: 1 addition & 2 deletions field/src/zero_poly_coset.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use alloc::vec::Vec;

use p3_field::{batch_multiplicative_inverse, Field, TwoAdicField};
use p3_field::{batch_multiplicative_inverse, Field, PackedField, TwoAdicField};

use p3_field::PackedField;
use crate::types::two_adic_subgroup;

/// Precomputations of the evaluation of `Z_H(X) = X^n - 1` on a coset `gK` with `H <= K`.
Expand Down
1 change: 1 addition & 0 deletions maybe_rayon/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#![no_std]
#[cfg(not(feature = "parallel"))]
extern crate alloc;

Expand Down
4 changes: 2 additions & 2 deletions plonky2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@ keywords.workspace = true
categories.workspace = true

[features]
default = ["gate_testing", "parallel", "rand_chacha"]
default = ["gate_testing", "parallel"]
gate_testing = []
parallel = ["hashbrown/rayon", "plonky2_maybe_rayon/parallel"]
std = ["anyhow/std", "rand/std", "itertools/use_std"]
timing = ["std", "dep:web-time"]
disable-randomness = ["plonky2_field/disable-randomness"]

[dependencies]
lazy_static = "1.5.0"
lazy_static = {version = "1.5.0", default-features = false, features = ["spin_no_std"]}
ahash = { workspace = true }
anyhow = { workspace = true }
hashbrown = { workspace = true }
Expand Down
5 changes: 2 additions & 3 deletions plonky2/benches/ffts.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use p3_baby_bear::BabyBear;
use p3_field::TwoAdicField;
use p3_goldilocks::Goldilocks;
use tynm::type_name;

use plonky2::field::polynomial::PolynomialCoeffs;
use plonky2_field::types::Sample;
use tynm::type_name;

mod allocator;

Expand Down
7 changes: 3 additions & 4 deletions plonky2/benches/field_arithmetic.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use p3_baby_bear::BabyBear;
use p3_field::{batch_multiplicative_inverse, TwoAdicField};
use p3_field::extension::BinomialExtensionField;
use p3_field::{batch_multiplicative_inverse, TwoAdicField};
use p3_goldilocks::Goldilocks;
use tynm::type_name;

use plonky2_field::types::Sample;
use tynm::type_name;

mod allocator;

Expand Down
5 changes: 2 additions & 3 deletions plonky2/benches/hashing.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use p3_goldilocks::Goldilocks;
use tynm::type_name;

use plonky2::field::types::Sample;
use plonky2::hash::hash_types::{BytesHash, RichField};
use plonky2::hash::keccak::KeccakHash;
use plonky2::hash::poseidon_goldilocks::{PoseidonGoldilocks, SPONGE_WIDTH};
use plonky2::plonk::config::Hasher;
use tynm::type_name;

mod allocator;

Expand Down
5 changes: 2 additions & 3 deletions plonky2/benches/merkle.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use p3_goldilocks::Goldilocks;
use tynm::type_name;

use plonky2::hash::hash_types::RichField;
use plonky2::hash::keccak::KeccakHash;
use plonky2::hash::merkle_tree::MerkleTree;
use plonky2::hash::poseidon_goldilocks::Poseidon64Hash;
use plonky2::plonk::config::Hasher;
use tynm::type_name;

mod allocator;

Expand Down
12 changes: 2 additions & 10 deletions plonky2/benches/recursion.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
use anyhow::anyhow;
use criterion::{Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, Criterion};
use log::{info, Level};
use p3_baby_bear::BabyBear;
use p3_goldilocks::Goldilocks;
use tynm::type_name;

use plonky2::gates::noop::NoopGate;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
Expand All @@ -19,6 +17,7 @@ use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
use plonky2::plonk::prover::prove;
use plonky2::util::proving_process_info::ProvingProcessInfo;
use plonky2_field::types::HasExtension;
use tynm::type_name;

mod allocator;

Expand All @@ -39,7 +38,6 @@ fn dummy_proof<
log2_size: usize,
) -> anyhow::Result<ProofTuple<F, C, D, NUM_HASH_OUT_ELTS>>
where

{
// 'size' is in degree, but we want number of noop gates. A non-zero amount of padding will be added and size will be rounded to the next power of two. To hit our target size, we go just under the previous power of two and hope padding is less than half the proof.
let num_dummy_gates = match log2_size {
Expand Down Expand Up @@ -83,7 +81,6 @@ fn get_recursive_circuit_data<
)
where
InnerC::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let mut builder = CircuitBuilder::<F, D, NUM_HASH_OUT_ELTS>::new(config.clone());
let input_proof_target = builder.add_virtual_proof_with_pis(input_proof_common_circuit_data);
Expand Down Expand Up @@ -120,7 +117,6 @@ fn recursive_proof<
) -> anyhow::Result<ProofWithPublicInputs<F, C, D, NUM_HASH_OUT_ELTS>>
where
C::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let mut pw = PartialWitness::new();
pw.set_proof_with_pis_target(input_proof_target, input_proof);
Expand Down Expand Up @@ -148,7 +144,6 @@ pub(crate) fn bench_recursion<
config: &CircuitConfig,
) where
C::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let inner = dummy_proof::<F, C, D, NUM_HASH_OUT_ELTS>(config, 12).unwrap();
let (_, _, common_data) = &inner;
Expand Down Expand Up @@ -211,7 +206,6 @@ pub(crate) fn bench_merge<
config: &CircuitConfig,
) where
C::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let inner = dummy_proof::<F, C, D, NUM_HASH_OUT_ELTS>(config, 12).unwrap();
let (_, _, common_data) = &inner;
Expand Down Expand Up @@ -285,7 +279,6 @@ fn get_merge_circuit_data<
)
where
InnerC::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let mut builder = CircuitBuilder::<F, D, NUM_HASH_OUT_ELTS>::new(config.clone());
let input_proof_target_one =
Expand Down Expand Up @@ -332,7 +325,6 @@ fn merge_proof<
) -> anyhow::Result<ProofWithPublicInputs<F, C, D, NUM_HASH_OUT_ELTS>>
where
C::Hasher: AlgebraicHasher<F, NUM_HASH_OUT_ELTS>,

{
let mut pw = PartialWitness::new();
pw.set_proof_with_pis_target(input_proof_target_one, input_proof);
Expand Down
3 changes: 1 addition & 2 deletions plonky2/benches/reverse_index_bits.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use p3_goldilocks::Goldilocks;

use plonky2::field::types::Sample;
use plonky2_util::{reverse_index_bits, reverse_index_bits_in_place};

Expand Down
3 changes: 1 addition & 2 deletions plonky2/benches/transpose.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use p3_goldilocks::Goldilocks;

use plonky2::field::types::Sample;
use plonky2::util::transpose;

Expand Down
Loading

0 comments on commit c787b8a

Please sign in to comment.