Skip to content

Commit

Permalink
Simplify types: remove option from CTL filters
Browse files Browse the repository at this point in the history
CTL filters can already express perfectly well the behaviour that we
gave for `None`.  No need to complicated anything.

Plus some random lint fixes that clippy demanded..
  • Loading branch information
matthiasgoergens committed Apr 15, 2024
1 parent 53c5bc3 commit 0731fec
Show file tree
Hide file tree
Showing 6 changed files with 40 additions and 92 deletions.
1 change: 0 additions & 1 deletion plonky2/src/gates/lookup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use alloc::{
vec,
vec::Vec,
};
use core::usize;

use itertools::Itertools;
use keccak_hash::keccak;
Expand Down
1 change: 0 additions & 1 deletion plonky2/src/gates/lookup_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use alloc::{
vec,
vec::Vec,
};
use core::usize;
#[cfg(feature = "std")]
use std::sync::Arc;

Expand Down
1 change: 1 addition & 0 deletions plonky2/src/plonk/copy_constraint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::iop::target::Target;
#[derive(Debug)]
pub struct CopyConstraint {
pub pair: (Target, Target),
#[allow(dead_code)]
pub name: String,
}

Expand Down
58 changes: 14 additions & 44 deletions starky/src/cross_table_lookup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,12 @@ pub type TableIdx = usize;
pub struct TableWithColumns<F: Field> {
table: TableIdx,
columns: Vec<Column<F>>,
filter: Option<Filter<F>>,
filter: Filter<F>,
}

impl<F: Field> TableWithColumns<F> {
/// Generates a new `TableWithColumns` given a `table` index, a linear combination of columns `columns` and a `filter`.
pub fn new(table: TableIdx, columns: Vec<Column<F>>, filter: Option<Filter<F>>) -> Self {
pub fn new(table: TableIdx, columns: Vec<Column<F>>, filter: Filter<F>) -> Self {
Self {
table,
columns,
Expand Down Expand Up @@ -163,7 +163,7 @@ pub struct CtlZData<'a, F: Field> {
pub(crate) columns: Vec<&'a [Column<F>]>,
/// Vector of filter columns for the current table.
/// Each filter evaluates to either 1 or 0.
pub(crate) filter: Vec<Option<Filter<F>>>,
pub(crate) filter: Vec<Filter<F>>,
}

impl<'a, F: Field> CtlZData<'a, F> {
Expand All @@ -173,7 +173,7 @@ impl<'a, F: Field> CtlZData<'a, F> {
z: PolynomialValues<F>,
challenge: GrandProductChallenge<F>,
columns: Vec<&'a [Column<F>]>,
filter: Vec<Option<Filter<F>>>,
filter: Vec<Filter<F>>,
) -> Self {
Self {
helper_columns,
Expand Down Expand Up @@ -404,7 +404,7 @@ fn ctl_helper_zs_cols<F: Field, const N: usize>(
.map(|(table, group)| {
let columns_filters = group
.map(|table| (&table.columns[..], &table.filter))
.collect::<Vec<(&[Column<F>], &Option<Filter<F>>)>>();
.collect::<Vec<(&[Column<F>], &Filter<F>)>>();
(
table,
partial_sums(
Expand Down Expand Up @@ -484,7 +484,7 @@ where
/// Column linear combinations of the `CrossTableLookup`s.
pub(crate) columns: Vec<&'a [Column<F>]>,
/// Filter that evaluates to either 1 or 0.
pub(crate) filter: Vec<Option<Filter<F>>>,
pub(crate) filter: Vec<Filter<F>>,
}

impl<'a, F: RichField + Extendable<D>, const D: usize>
Expand Down Expand Up @@ -682,16 +682,8 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
let combin0 = challenges.combine(&evals[0]);
let combin1 = challenges.combine(&evals[1]);

let f0 = if let Some(filter0) = &filter[0] {
filter0.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f1 = if let Some(filter1) = &filter[1] {
filter1.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f0 = filter[0].eval_filter(local_values, next_values);
let f1 = filter[1].eval_filter(local_values, next_values);

consumer
.constraint_last_row(combin0 * combin1 * *local_z - f0 * combin1 - f1 * combin0);
Expand All @@ -700,11 +692,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
);
} else {
let combin0 = challenges.combine(&evals[0]);
let f0 = if let Some(filter0) = &filter[0] {
filter0.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f0 = filter[0].eval_filter(local_values, next_values);
consumer.constraint_last_row(combin0 * *local_z - f0);
consumer.constraint_transition(combin0 * (*local_z - *next_z) - f0);
}
Expand All @@ -726,7 +714,7 @@ pub struct CtlCheckVarsTarget<F: Field, const D: usize> {
/// Column linear combinations of the `CrossTableLookup`s.
pub(crate) columns: Vec<Vec<Column<F>>>,
/// Filter that evaluates to either 1 or 0.
pub(crate) filter: Vec<Option<Filter<F>>>,
pub(crate) filter: Vec<Filter<F>>,
}

impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
Expand Down Expand Up @@ -856,8 +844,6 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();

let one = builder.one_extension();

for lookup_vars in ctl_vars {
let CtlCheckVarsTarget {
helper_columns,
Expand Down Expand Up @@ -906,16 +892,8 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
let combin0 = challenges.combine_circuit(builder, &evals[0]);
let combin1 = challenges.combine_circuit(builder, &evals[1]);

let f0 = if let Some(filter0) = &filter[0] {
filter0.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f1 = if let Some(filter1) = &filter[1] {
filter1.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f0 = filter[0].eval_filter_circuit(builder, local_values, next_values);
let f1 = filter[1].eval_filter_circuit(builder, local_values, next_values);

let combined = builder.mul_sub_extension(combin1, *local_z, f1);
let combined = builder.mul_extension(combined, combin0);
Expand All @@ -928,11 +906,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
consumer.constraint_last_row(builder, constr);
} else {
let combin0 = challenges.combine_circuit(builder, &evals[0]);
let f0 = if let Some(filter0) = &filter[0] {
filter0.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f0 = filter[0].eval_filter_circuit(builder, local_values, next_values);

let constr = builder.mul_sub_extension(combin0, *local_z, f0);
consumer.constraint_last_row(builder, constr);
Expand Down Expand Up @@ -1121,11 +1095,7 @@ pub mod debug_utils {
) {
let trace = &trace_poly_values[table.table];
for i in 0..trace[0].len() {
let filter = if let Some(combin) = &table.filter {
combin.eval_table(trace, i)
} else {
F::ONE
};
let filter = table.filter.eval_table(trace, i);
if filter.is_one() {
let row = table
.columns
Expand Down
69 changes: 24 additions & 45 deletions starky/src/lookup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,16 @@ pub struct Filter<F: Field> {
constants: Vec<Column<F>>,
}

/// The default filter is always on.
impl<F: Field> Default for Filter<F> {
fn default() -> Self {
Self {
products: vec![],
constants: vec![Column::constant(F::ONE)],
}
}
}

impl<F: Field> Filter<F> {
/// Returns a filter from the provided `products` and `constants` vectors.
pub fn new(products: Vec<(Column<F>, Column<F>)>, constants: Vec<Column<F>>) -> Self {
Expand Down Expand Up @@ -396,7 +406,7 @@ impl<F: Field> Column<F> {
}
}

pub(crate) type ColumnFilter<'a, F> = (&'a [Column<F>], &'a Option<Filter<F>>);
pub(crate) type ColumnFilter<'a, F> = (&'a [Column<F>], &'a Filter<F>);

/// A [`Lookup`] defines a set of `columns`` whose values should appear in a
/// `table_column` (i.e. the lookup table associated to these looking columns),
Expand All @@ -423,7 +433,7 @@ pub struct Lookup<F: Field> {

/// Columns to filter some elements. There is at most one filter
/// column per column to lookup.
pub filter_columns: Vec<Option<Filter<F>>>,
pub filter_columns: Vec<Filter<F>>,
}

impl<F: Field> Lookup<F> {
Expand Down Expand Up @@ -650,7 +660,7 @@ pub(crate) fn lookup_helper_columns<F: Field>(

/// Given data associated to a lookup, check the associated helper polynomials.
pub(crate) fn eval_helper_columns<F, FE, P, const D: usize, const D2: usize>(
filter: &[Option<Filter<F>>],
filter: &[Filter<F>],
columns: &[Vec<P>],
local_values: &[P],
next_values: &[P],
Expand All @@ -674,26 +684,14 @@ pub(crate) fn eval_helper_columns<F, FE, P, const D: usize, const D2: usize>(
let combin0 = challenges.combine(&chunk[0]);
let combin1 = challenges.combine(chunk[1].iter());

let f0 = if let Some(filter0) = &fs[0] {
filter0.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f1 = if let Some(filter1) = &fs[1] {
filter1.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f0 = fs[0].eval_filter(local_values, next_values);
let f1 = fs[1].eval_filter(local_values, next_values);

consumer.constraint(combin1 * combin0 * h - f0 * combin1 - f1 * combin0);
}
1 => {
let combin = challenges.combine(&chunk[0]);
let f0 = if let Some(filter1) = &fs[0] {
filter1.eval_filter(local_values, next_values)
} else {
P::ONES
};
let f0 = fs[0].eval_filter(local_values, next_values);
consumer.constraint(combin * h - f0);
}

Expand All @@ -707,7 +705,7 @@ pub(crate) fn eval_helper_columns<F, FE, P, const D: usize, const D2: usize>(
/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials.
pub(crate) fn eval_helper_columns_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
filter: &[Option<Filter<F>>],
filter: &[Filter<F>],
columns: &[Vec<ExtensionTarget<D>>],
local_values: &[ExtensionTarget<D>],
next_values: &[ExtensionTarget<D>],
Expand All @@ -722,22 +720,13 @@ pub(crate) fn eval_helper_columns_circuit<F: RichField + Extendable<D>, const D:
.chunks(chunk_size)
.zip(filter.chunks(chunk_size).zip(helper_columns))
{
let one = builder.one_extension();
match chunk.len() {
2 => {
let combin0 = challenges.combine_circuit(builder, &chunk[0]);
let combin1 = challenges.combine_circuit(builder, &chunk[1]);

let f0 = if let Some(filter0) = &fs[0] {
filter0.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f1 = if let Some(filter1) = &fs[1] {
filter1.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f0 = fs[0].eval_filter_circuit(builder, local_values, next_values);
let f1 = fs[1].eval_filter_circuit(builder, local_values, next_values);

let constr = builder.mul_sub_extension(combin0, h, f0);
let constr = builder.mul_extension(constr, combin1);
Expand All @@ -748,11 +737,7 @@ pub(crate) fn eval_helper_columns_circuit<F: RichField + Extendable<D>, const D:
}
1 => {
let combin = challenges.combine_circuit(builder, &chunk[0]);
let f0 = if let Some(filter1) = &fs[0] {
filter1.eval_filter_circuit(builder, local_values, next_values)
} else {
one
};
let f0 = fs[0].eval_filter_circuit(builder, local_values, next_values);
let constr = builder.mul_sub_extension(combin, h, f0);
consumer.constraint(builder, constr);
}
Expand Down Expand Up @@ -788,13 +773,10 @@ pub(crate) fn get_helper_cols<F: Field>(
let mut filter_col = Vec::with_capacity(degree);
let first_combined = (0..degree)
.map(|d| {
let f = if let Some(filter) = first_filter {
let f = filter.eval_table(trace, d);
let f = {
let f = first_filter.eval_table(trace, d);
filter_col.push(f);
f
} else {
filter_col.push(F::ONE);
F::ONE
};
if f.is_one() {
let evals = first_col
Expand All @@ -821,13 +803,10 @@ pub(crate) fn get_helper_cols<F: Field>(
let mut filter_col = Vec::with_capacity(degree);
let mut combined = (0..degree)
.map(|d| {
let f = if let Some(filter) = filt {
let f = filter.eval_table(trace, d);
let f = {
let f = filt.eval_table(trace, d);
filter_col.push(f);
f
} else {
filter_col.push(F::ONE);
F::ONE
};
if f.is_one() {
let evals = col
Expand Down
2 changes: 1 addition & 1 deletion starky/src/permutation_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for PermutationSt
columns: vec![Column::single(0)],
table_column: Column::single(1),
frequencies_column: Column::single(2),
filter_columns: vec![None; 1],
filter_columns: vec![Default::default()],
}]
}

Expand Down

0 comments on commit 0731fec

Please sign in to comment.