Skip to content

Commit

Permalink
Add plonky3 support (#1158)
Browse files Browse the repository at this point in the history
plonky3 backend integration for witness columns only
  • Loading branch information
Schaeff authored Jun 26, 2024
1 parent 924b9b0 commit 01acfd7
Show file tree
Hide file tree
Showing 18 changed files with 638 additions and 29 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ members = [
"pil-analyzer",
"pipeline",
"pilopt",
"plonky3",
"asm-to-pil",
"backend",
"ast",
Expand Down
3 changes: 3 additions & 0 deletions backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ repository = { workspace = true }
default = []
halo2 = ["dep:halo2_proofs", "dep:halo2_curves", "dep:snark-verifier", "dep:halo2_solidity_verifier"]
estark-polygon = ["dep:pil-stark-prover"]
plonky3 = ["dep:powdr-plonky3"]

[dependencies]
powdr-ast.workspace = true
Expand All @@ -19,6 +20,8 @@ powdr-pil-analyzer.workspace = true
powdr-executor.workspace = true
powdr-parser-util.workspace = true

powdr-plonky3 = { path = "../plonky3", optional = true }

starky = { git = "https://github.com/0xEigenLabs/eigen-zkvm.git", rev = "cf405b2e2cecb8567cfd083a55936b71722276d5" }
pil-stark-prover = { git = "https://github.com/powdr-labs/pil-stark-prover.git", rev = "769b1153f3ae2d7cbab4c8acf33865ed13f8a823", optional = true }

Expand Down
7 changes: 7 additions & 0 deletions backend/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
mod estark;
#[cfg(feature = "halo2")]
mod halo2;
#[cfg(feature = "plonky3")]
mod plonky3;

mod composite;

Expand Down Expand Up @@ -30,6 +32,9 @@ pub enum BackendType {
EStarkStarky,
#[strum(serialize = "estark-dump")]
EStarkDump,
#[cfg(feature = "plonky3")]
#[strum(serialize = "plonky3")]
Plonky3,
}

pub type BackendOptions = String;
Expand All @@ -52,6 +57,8 @@ impl BackendType {
BackendType::EStarkPolygon => Box::new(estark::polygon_wrapper::Factory),
BackendType::EStarkStarky => Box::new(estark::starky_wrapper::Factory),
BackendType::EStarkDump => Box::new(estark::DumpFactory),
#[cfg(feature = "plonky3")]
BackendType::Plonky3 => Box::new(plonky3::Factory),
}
}
}
Expand Down
53 changes: 53 additions & 0 deletions backend/src/plonky3/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
use std::{io, path::Path};

use powdr_ast::analyzed::Analyzed;
use powdr_executor::witgen::WitgenCallback;
use powdr_number::FieldElement;
use powdr_plonky3::Plonky3Prover;

use crate::{Backend, BackendFactory, BackendOptions, Error, Proof};

pub(crate) struct Factory;

impl<T: FieldElement> BackendFactory<T> for Factory {
fn create<'a>(
&self,
pil: &'a Analyzed<T>,
_fixed: &'a [(String, Vec<T>)],
_output_dir: Option<&'a Path>,
setup: Option<&mut dyn io::Read>,
verification_key: Option<&mut dyn io::Read>,
verification_app_key: Option<&mut dyn io::Read>,
_: BackendOptions,
) -> Result<Box<dyn crate::Backend<'a, T> + 'a>, Error> {
if setup.is_some() {
return Err(Error::NoSetupAvailable);
}
if verification_key.is_some() {
return Err(Error::NoVerificationAvailable);
}
if verification_app_key.is_some() {
return Err(Error::NoAggregationAvailable);
}
Ok(Box::new(Plonky3Prover::new(pil)))
}
}

impl<'a, T: FieldElement> Backend<'a, T> for Plonky3Prover<'a, T> {
fn verify(&self, proof: &[u8], instances: &[Vec<T>]) -> Result<(), Error> {
Ok(self.verify(proof, instances)?)
}

fn prove(
&self,
witness: &[(String, Vec<T>)],
prev_proof: Option<Proof>,
witgen_callback: WitgenCallback<T>,
) -> Result<Proof, Error> {
if prev_proof.is_some() {
return Err(Error::NoAggregationAvailable);
}

Ok(self.prove(witness, witgen_callback)?)
}
}
3 changes: 3 additions & 0 deletions book/src/backends/plonky3.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Plonky3

powdr partially supports [plonky3](https://github.com/Plonky3/Plonky3) with the Goldilocks field. Progress is tracked [here](https://github.com/powdr-labs/powdr/issues/1468).
1 change: 1 addition & 0 deletions cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ default-run = "powdr"
[features]
default = [] # halo2 is disabled by default
halo2 = ["powdr-backend/halo2", "powdr-pipeline/halo2"]
plonky3 = ["powdr-backend/plonky3"]
estark-polygon = ["powdr-backend/estark-polygon", "powdr-pipeline/estark-polygon"]

[dependencies]
Expand Down
9 changes: 4 additions & 5 deletions executor/src/witgen/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, HashMap};
use std::rc::Rc;
use std::sync::Arc;

use powdr_ast::analyzed::{
Expand Down Expand Up @@ -49,15 +48,15 @@ impl<T, F> QueryCallback<T> for F where F: Fn(&str) -> Result<Option<T>, String>

#[derive(Clone)]
pub struct WitgenCallback<T> {
analyzed: Rc<Analyzed<T>>,
fixed_col_values: Rc<Vec<(String, Vec<T>)>>,
analyzed: Arc<Analyzed<T>>,
fixed_col_values: Arc<Vec<(String, Vec<T>)>>,
query_callback: Arc<dyn QueryCallback<T>>,
}

impl<T: FieldElement> WitgenCallback<T> {
pub fn new(
analyzed: Rc<Analyzed<T>>,
fixed_col_values: Rc<Vec<(String, Vec<T>)>>,
analyzed: Arc<Analyzed<T>>,
fixed_col_values: Arc<Vec<(String, Vec<T>)>>,
query_callback: Option<Arc<dyn QueryCallback<T>>>,
) -> Self {
let query_callback = query_callback.unwrap_or_else(|| Arc::new(unused_query_callback()));
Expand Down
1 change: 1 addition & 0 deletions pipeline/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ repository = { workspace = true }
[features]
default = [] # halo2 is disabled by default
halo2 = ["powdr-backend/halo2"]
plonky3 = ["powdr-backend/plonky3"]
estark-polygon = ["powdr-backend/estark-polygon"]

[dependencies]
Expand Down
49 changes: 26 additions & 23 deletions pipeline/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use std::{
io::{self, BufReader},
marker::Send,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::Instant,
};
Expand Down Expand Up @@ -63,11 +62,11 @@ pub struct Artifacts<T: FieldElement> {
/// An analyzed .pil file, with all dependencies imported, potentially from other files.
analyzed_pil: Option<Analyzed<T>>,
/// An optimized .pil file.
optimized_pil: Option<Rc<Analyzed<T>>>,
optimized_pil: Option<Arc<Analyzed<T>>>,
/// Fully evaluated fixed columns.
fixed_cols: Option<Rc<Columns<T>>>,
fixed_cols: Option<Arc<Columns<T>>>,
/// Generated witnesses.
witness: Option<Rc<Columns<T>>>,
witness: Option<Arc<Columns<T>>>,
/// The proof (if successful).
proof: Option<Proof>,
}
Expand Down Expand Up @@ -373,7 +372,7 @@ impl<T: FieldElement> Pipeline<T> {

Ok(Pipeline {
artifact: Artifacts {
optimized_pil: Some(Rc::new(analyzed)),
optimized_pil: Some(Arc::new(analyzed)),
..Default::default()
},
name,
Expand All @@ -394,7 +393,7 @@ impl<T: FieldElement> Pipeline<T> {

Pipeline {
artifact: Artifacts {
fixed_cols: Some(Rc::new(fixed)),
fixed_cols: Some(Arc::new(fixed)),
..self.artifact
},
..self
Expand All @@ -414,7 +413,7 @@ impl<T: FieldElement> Pipeline<T> {

Pipeline {
artifact: Artifacts {
witness: Some(Rc::new(witness)),
witness: Some(Arc::new(witness)),
..self.artifact
},
..self
Expand All @@ -430,7 +429,7 @@ impl<T: FieldElement> Pipeline<T> {
}
Pipeline {
artifact: Artifacts {
witness: Some(Rc::new(witness)),
witness: Some(Arc::new(witness)),
..self.artifact
},
..self
Expand Down Expand Up @@ -777,7 +776,7 @@ impl<T: FieldElement> Pipeline<T> {
Ok(self.artifact.analyzed_pil.as_ref().unwrap())
}

pub fn compute_optimized_pil(&mut self) -> Result<Rc<Analyzed<T>>, Vec<String>> {
pub fn compute_optimized_pil(&mut self) -> Result<Arc<Analyzed<T>>, Vec<String>> {
if let Some(ref optimized_pil) = self.artifact.optimized_pil {
return Ok(optimized_pil.clone());
}
Expand All @@ -790,16 +789,16 @@ impl<T: FieldElement> Pipeline<T> {
self.maybe_write_pil(&optimized, "_opt")?;
self.maybe_write_pil_object(&optimized, "_opt")?;

self.artifact.optimized_pil = Some(Rc::new(optimized));
self.artifact.optimized_pil = Some(Arc::new(optimized));

Ok(self.artifact.optimized_pil.as_ref().unwrap().clone())
}

pub fn optimized_pil(&self) -> Result<Rc<Analyzed<T>>, Vec<String>> {
pub fn optimized_pil(&self) -> Result<Arc<Analyzed<T>>, Vec<String>> {
Ok(self.artifact.optimized_pil.as_ref().unwrap().clone())
}

pub fn compute_fixed_cols(&mut self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn compute_fixed_cols(&mut self) -> Result<Arc<Columns<T>>, Vec<String>> {
if let Some(ref fixed_cols) = self.artifact.fixed_cols {
return Ok(fixed_cols.clone());
}
Expand All @@ -813,16 +812,16 @@ impl<T: FieldElement> Pipeline<T> {
self.maybe_write_constants(&fixed_cols)?;
self.log(&format!("Took {}", start.elapsed().as_secs_f32()));

self.artifact.fixed_cols = Some(Rc::new(fixed_cols));
self.artifact.fixed_cols = Some(Arc::new(fixed_cols));

Ok(self.artifact.fixed_cols.as_ref().unwrap().clone())
}

pub fn fixed_cols(&self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn fixed_cols(&self) -> Result<Arc<Columns<T>>, Vec<String>> {
Ok(self.artifact.fixed_cols.as_ref().unwrap().clone())
}

pub fn compute_witness(&mut self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn compute_witness(&mut self) -> Result<Arc<Columns<T>>, Vec<String>> {
if let Some(ref witness) = self.artifact.witness {
return Ok(witness.clone());
}
Expand All @@ -849,12 +848,12 @@ impl<T: FieldElement> Pipeline<T> {

self.maybe_write_witness(&fixed_cols, &witness)?;

self.artifact.witness = Some(Rc::new(witness));
self.artifact.witness = Some(Arc::new(witness));

Ok(self.artifact.witness.as_ref().unwrap().clone())
}

pub fn witness(&self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn witness(&self) -> Result<Arc<Columns<T>>, Vec<String>> {
Ok(self.artifact.witness.as_ref().unwrap().clone())
}

Expand Down Expand Up @@ -1083,11 +1082,13 @@ impl<T: FieldElement> Pipeline<T> {
.as_ref()
.map(|path| BufReader::new(fs::File::open(path).unwrap()));

let mut vkey_file = if let Some(ref path) = self.arguments.vkey_file {
BufReader::new(fs::File::open(path).unwrap())
} else {
panic!("Verification key should have been provided for verification")
};
let mut vkey_file = self
.arguments
.vkey_file
.as_ref()
.map(fs::File::open)
.map(Result::unwrap)
.map(BufReader::new);

let pil = self.compute_optimized_pil()?;
let fixed_cols = self.compute_fixed_cols()?;
Expand All @@ -1100,7 +1101,9 @@ impl<T: FieldElement> Pipeline<T> {
setup_file
.as_mut()
.map(|file| file as &mut dyn std::io::Read),
Some(&mut vkey_file),
vkey_file
.as_mut()
.map(|file| file as &mut dyn std::io::Read),
// We shouldn't need the app verification key for this
None,
self.arguments.backend_options.clone(),
Expand Down
29 changes: 29 additions & 0 deletions pipeline/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,35 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>) {
#[cfg(not(feature = "halo2"))]
pub fn gen_halo2_proof(_file_name: &str, _inputs: Vec<Bn254Field>) {}

#[cfg(feature = "plonky3")]
pub fn test_plonky3(file_name: &str, inputs: Vec<GoldilocksField>) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(powdr_backend::BackendType::Plonky3, None);

// Generate a proof
let proof = pipeline.compute_proof().cloned().unwrap();

let publics: Vec<GoldilocksField> = pipeline
.publics()
.clone()
.unwrap()
.iter()
.map(|(_name, v)| *v)
.collect();

pipeline.verify(&proof, &[publics]).unwrap();
}

#[cfg(not(feature = "plonky3"))]
pub fn test_plonky3(_: &str, _: Vec<GoldilocksField>) {}

#[cfg(not(feature = "plonky3"))]
pub fn gen_plonky3_proof(_: &str, _: Vec<GoldilocksField>) {}

/// Returns the analyzed PIL containing only the std library.
pub fn std_analyzed<T: FieldElement>() -> Analyzed<T> {
let mut pipeline = Pipeline::default().from_asm_string(String::new(), None);
Expand Down
9 changes: 8 additions & 1 deletion pipeline/tests/pil.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ use powdr_number::GoldilocksField;
use powdr_pipeline::test_util::{
assert_proofs_fail_for_invalid_witnesses, assert_proofs_fail_for_invalid_witnesses_estark,
assert_proofs_fail_for_invalid_witnesses_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, test_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, test_halo2, test_plonky3,
verify_test_file,
};

use test_log::test;

pub fn verify_pil(file_name: &str, inputs: Vec<GoldilocksField>) {
Expand Down Expand Up @@ -238,6 +239,12 @@ fn halo_without_lookup() {
gen_estark_proof(f, Default::default());
}

#[test]
fn add() {
let f = "pil/add.pil";
test_plonky3(f, Default::default());
}

#[test]
fn simple_div() {
let f = "pil/simple_div.pil";
Expand Down
Loading

0 comments on commit 01acfd7

Please sign in to comment.