diff --git a/.noir-sync-commit b/.noir-sync-commit index 0421ede08e4..b6e1166fe48 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -e9d3ccf5bae50241d8183ba5972dce62fbd700a4 +31640e91ba75b9c5200ea66d1f54cc5185e0d196 diff --git a/noir/noir-repo/.github/workflows/memory_report.yml b/noir/noir-repo/.github/workflows/memory_report.yml deleted file mode 100644 index c31c750e6fc..00000000000 --- a/noir/noir-repo/.github/workflows/memory_report.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Report Peak Memory - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - generate_memory_report: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate Memory report - working-directory: ./test_programs - run: | - chmod +x memory_report.sh - ./memory_report.sh - mv memory_report.json ../memory_report.json - - - name: Parse memory report - id: memory_report - uses: noir-lang/noir-bench-report@ccb0d806a91d3bd86dba0ba3d580a814eed5673c - with: - report: memory_report.json - header: | - # Memory Report - memory_report: true - - - name: Add memory report to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: memory - message: ${{ steps.memory_report.outputs.markdown }} \ No newline at end of file diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 2f19ed704b2..96ceb94fcdd 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -13,7 +13,7 @@ dependencies = [ "criterion", "flate2", "fxhash", - "pprof 0.13.0", + "pprof", "serde", "serde-big-array", "serde-generate", @@ -158,6 +158,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "aligned-vec" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e0966165eaf052580bd70eb1b32cb3d6245774c0104d1b2793e9650bf83b52a" +dependencies = [ + "equator", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -604,7 +613,7 @@ dependencies = [ "lazy_static", "noir_grumpkin", "num-bigint", - "pprof 0.12.1", + "pprof", ] [[package]] @@ -1417,6 +1426,26 @@ dependencies = [ "log", ] +[[package]] +name = "equator" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c35da53b5a021d2484a7cc49b2ac7f2d840f8236a286f84202369bd338d761ea" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -2807,7 +2836,7 @@ dependencies = [ "notify", "notify-debouncer-full", "paste", - "pprof 0.13.0", + "pprof", "predicates 2.1.5", "prettytable-rs", "proptest", @@ -3579,32 +3608,11 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "pprof" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" -dependencies = [ - "backtrace", - "cfg-if 1.0.0", - "criterion", - "findshlibs", - "inferno", - "libc", - "log", - "nix 0.26.4", - "once_cell", - "parking_lot 0.12.3", - "smallvec", - "symbolic-demangle", - "tempfile", - "thiserror", -] - -[[package]] -name = "pprof" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef5c97c51bd34c7e742402e216abdeb44d415fbe6ae41d56b114723e953711cb" +checksum = "ebbe2f8898beba44815fdc9e5a4ae9c929e21c5dc29b0c774a15555f7f58d6d0" dependencies = [ + "aligned-vec", "backtrace", "cfg-if 1.0.0", "criterion", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 94ebe54fde1..4ce0ddd999f 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -126,7 +126,7 @@ codespan-reporting = "0.11.1" criterion = "0.5.0" # Note that using the "frame-pointer" feature breaks framegraphs on linux # https://github.com/tikv/pprof-rs/pull/172 -pprof = { version = "0.13", features = ["flamegraph", "criterion"] } +pprof = { version = "0.14", features = ["flamegraph", "criterion"] } cfg-if = "1.0.0" dirs = "4" diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index 8829692b9b4..825a0ef0481 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -30,7 +30,7 @@ num-bigint.workspace = true [dev-dependencies] ark-std.workspace = true criterion = "0.5.0" -pprof = { version = "0.12", features = [ +pprof = { version = "0.14", features = [ "flamegraph", "frame-pointer", "criterion", diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs index 67f7cf2dc34..0a6e8824223 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs @@ -69,6 +69,8 @@ pub(super) fn compile_array_copy_procedure( BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, 1_usize.into(), ); + // Decrease the original ref count now that this copy is no longer pointing to it + ctx.codegen_usize_op(rc.address, rc.address, BrilligBinaryOp::Sub, 1); } }); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 0479f8da0b7..0ae61404442 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -441,29 +441,38 @@ impl FunctionBuilder { /// Insert instructions to increment the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, true); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, true) } /// Insert instructions to decrement the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, false); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, false) } /// Increment or decrement the given value's reference count if it is an array. /// If it is not an array, this does nothing. Note that inc_rc and dec_rc instructions /// are ignored outside of unconstrained code. - fn update_array_reference_count(&mut self, value: ValueId, increment: bool) { + /// + /// Returns whether a reference count instruction was issued. + fn update_array_reference_count(&mut self, value: ValueId, increment: bool) -> bool { match self.type_of_value(value) { - Type::Numeric(_) => (), - Type::Function => (), + Type::Numeric(_) => false, + Type::Function => false, Type::Reference(element) => { if element.contains_an_array() { let reference = value; let value = self.insert_load(reference, element.as_ref().clone()); self.update_array_reference_count(value, increment); + true + } else { + false } } Type::Array(..) | Type::Slice(..) => { @@ -474,6 +483,7 @@ impl FunctionBuilder { } else { self.insert_dec_rc(value); } + true } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index a0c23ad70aa..6ebd2aa1105 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -129,7 +129,7 @@ impl<'f> FunctionInserter<'f> { // another MakeArray instruction. Note that this assumes the function inserter is inserting // in control-flow order. Otherwise we could refer to ValueIds defined later in the program. let make_array = if let Instruction::MakeArray { elements, typ } = &instruction { - if self.array_is_constant(elements) { + if self.array_is_constant(elements) && self.function.runtime().is_acir() { if let Some(fetched_value) = self.get_cached_array(elements, typ) { assert_eq!(results.len(), 1); self.values.insert(results[0], fetched_value); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index e34e0070d4b..76409f6a20a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -429,7 +429,7 @@ impl Instruction { /// conditional on whether the caller wants the predicate to be taken into account or not. pub(crate) fn can_be_deduplicated( &self, - dfg: &DataFlowGraph, + function: &Function, deduplicate_with_predicate: bool, ) -> bool { use Instruction::*; @@ -443,7 +443,7 @@ impl Instruction { | IncrementRc { .. } | DecrementRc { .. } => false, - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match function.dfg[*func] { Value::Intrinsic(intrinsic) => { intrinsic.can_be_deduplicated(deduplicate_with_predicate) } @@ -453,8 +453,11 @@ impl Instruction { // We can deduplicate these instructions if we know the predicate is also the same. Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, - // This should never be side-effectful - MakeArray { .. } => true, + // Arrays can be mutated in unconstrained code so code that handles this case must + // take care to track whether the array was possibly mutated or not before + // deduplicating. Since we don't know if the containing pass checks for this, we + // can only assume these are safe to deduplicate in constrained code. + MakeArray { .. } => function.runtime().is_acir(), // These can have different behavior depending on the EnableSideEffectsIf context. // Replacing them with a similar instruction potentially enables replacing an instruction @@ -467,7 +470,7 @@ impl Instruction { | IfElse { .. } | ArrayGet { .. } | ArraySet { .. } => { - deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) + deduplicate_with_predicate || !self.requires_acir_gen_predicate(&function.dfg) } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 4c08b533d99..a8db5e2ff94 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -841,27 +841,27 @@ mod tests { #[test] fn simplify_derive_generators_has_correct_type() { - let src = " + let src = r#" brillig(inline) fn main f0 { b0(): - v0 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v0 = make_array b"DEFAULT_DOMAIN_SEPARATOR" // This call was previously incorrectly simplified to something that returned `[Field; 3]` v2 = call derive_pedersen_generators(v0, u32 0) -> [(Field, Field, u1); 1] return v2 } - "; + "#; let ssa = Ssa::from_str(src).unwrap(); - let expected = " + let expected = r#" brillig(inline) fn main f0 { b0(): - v15 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v15 = make_array b"DEFAULT_DOMAIN_SEPARATOR" v19 = make_array [Field 3728882899078719075161482178784387565366481897740339799480980287259621149274, Field -9903063709032878667290627648209915537972247634463802596148419711785767431332, u1 0] : [(Field, Field, u1); 1] return v19 } - "; + "#; assert_normalized_ssa_equals(ssa, expected); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 5a451301d24..aa2952d5abc 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -5,8 +5,11 @@ use std::{ }; use acvm::acir::AcirField; +use im::Vector; use iter_extended::vecmap; +use crate::ssa::ir::types::{NumericType, Type}; + use super::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -220,6 +223,28 @@ fn display_instruction_inner( ) } Instruction::MakeArray { elements, typ } => { + // If the array is a byte array, we check if all the bytes are printable ascii characters + // and, if so, we print the array as a string literal (easier to understand). + // It could happen that the byte array is a random byte sequence that happens to be printable + // (it didn't come from a string literal) but this still reduces the noise in the output + // and actually represents the same value. + let (element_types, is_slice) = match typ { + Type::Array(types, _) => (types, false), + Type::Slice(types) => (types, true), + _ => panic!("Expected array or slice type for MakeArray"), + }; + if element_types.len() == 1 + && element_types[0] == Type::Numeric(NumericType::Unsigned { bit_size: 8 }) + { + if let Some(string) = try_byte_array_to_string(elements, function) { + if is_slice { + return writeln!(f, "make_array &b{:?}", string); + } else { + return writeln!(f, "make_array b{:?}", string); + } + } + } + write!(f, "make_array [")?; for (i, element) in elements.iter().enumerate() { @@ -234,6 +259,25 @@ fn display_instruction_inner( } } +fn try_byte_array_to_string(elements: &Vector, function: &Function) -> Option { + let mut string = String::new(); + for element in elements { + let element = function.dfg.get_numeric_constant(*element)?; + let element = element.try_to_u32()?; + if element > 0xFF { + return None; + } + let byte = element as u8; + if byte.is_ascii_alphanumeric() || byte.is_ascii_punctuation() || byte.is_ascii_whitespace() + { + string.push(byte as char); + } else { + return None; + } + } + Some(string) +} + fn result_types(function: &Function, results: &[ValueId]) -> String { let types = vecmap(results, |result| function.dfg.type_of_value(*result).to_string()); if types.is_empty() { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs index 16f4b8d2431..4e4f7e8aa62 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -190,6 +190,15 @@ impl Type { } } + /// Retrieves the array or slice type within this type, or panics if there is none. + pub(crate) fn get_contained_array(&self) -> &Type { + match self { + Type::Numeric(_) | Type::Function => panic!("Expected an array type"), + Type::Array(_, _) | Type::Slice(_) => self, + Type::Reference(element) => element.get_contained_array(), + } + } + pub(crate) fn element_types(self) -> Arc> { match self { Type::Array(element_types, _) | Type::Slice(element_types) => element_types, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index d1d84c305d2..776ff93abba 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -159,7 +159,7 @@ impl Function { } context.visited_blocks.insert(block); - context.fold_constants_in_block(&mut self.dfg, &mut dom, block); + context.fold_constants_in_block(self, &mut dom, block); } } } @@ -266,18 +266,19 @@ impl<'brillig> Context<'brillig> { fn fold_constants_in_block( &mut self, - dfg: &mut DataFlowGraph, + function: &mut Function, dom: &mut DominatorTree, block: BasicBlockId, ) { - let instructions = dfg[block].take_instructions(); + let instructions = function.dfg[block].take_instructions(); // Default side effect condition variable with an enabled state. - let mut side_effects_enabled_var = dfg.make_constant(FieldElement::one(), Type::bool()); + let mut side_effects_enabled_var = + function.dfg.make_constant(FieldElement::one(), Type::bool()); for instruction_id in instructions { self.fold_constants_into_instruction( - dfg, + function, dom, block, instruction_id, @@ -289,13 +290,14 @@ impl<'brillig> Context<'brillig> { fn fold_constants_into_instruction( &mut self, - dfg: &mut DataFlowGraph, + function: &mut Function, dom: &mut DominatorTree, mut block: BasicBlockId, id: InstructionId, side_effects_enabled_var: &mut ValueId, ) { let constraint_simplification_mapping = self.get_constraint_map(*side_effects_enabled_var); + let dfg = &mut function.dfg; let instruction = Self::resolve_instruction(id, block, dfg, dom, constraint_simplification_mapping); @@ -308,6 +310,15 @@ impl<'brillig> Context<'brillig> { { match cache_result { CacheResult::Cached(cached) => { + // We track whether we may mutate MakeArray instructions before we deduplicate + // them but we still need to issue an extra inc_rc in case they're mutated afterward. + if matches!(instruction, Instruction::MakeArray { .. }) { + let value = *cached.last().unwrap(); + let inc_rc = Instruction::IncrementRc { value }; + let call_stack = dfg.get_call_stack(id); + dfg.insert_instruction_and_results(inc_rc, block, None, call_stack); + } + Self::replace_result_ids(dfg, &old_results, cached); return; } @@ -321,24 +332,17 @@ impl<'brillig> Context<'brillig> { } }; - let new_results = // First try to inline a call to a brillig function with all constant arguments. - Self::try_inline_brillig_call_with_all_constants( + let new_results = Self::try_inline_brillig_call_with_all_constants( &instruction, &old_results, block, dfg, self.brillig_info, ) + // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. .unwrap_or_else(|| { - // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. - Self::push_instruction( - id, - instruction.clone(), - &old_results, - block, - dfg, - ) + Self::push_instruction(id, instruction.clone(), &old_results, block, dfg) }); Self::replace_result_ids(dfg, &old_results, &new_results); @@ -346,7 +350,7 @@ impl<'brillig> Context<'brillig> { self.cache_instruction( instruction.clone(), new_results, - dfg, + function, *side_effects_enabled_var, block, ); @@ -433,7 +437,7 @@ impl<'brillig> Context<'brillig> { &mut self, instruction: Instruction, instruction_results: Vec, - dfg: &DataFlowGraph, + function: &Function, side_effects_enabled_var: ValueId, block: BasicBlockId, ) { @@ -442,11 +446,11 @@ impl<'brillig> Context<'brillig> { // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - if let Some((complex, simple)) = simplify(dfg, lhs, rhs) { + if let Some((complex, simple)) = simplify(&function.dfg, lhs, rhs) { self.get_constraint_map(side_effects_enabled_var) .entry(complex) .or_default() - .add(dfg, simple, block); + .add(&function.dfg, simple, block); } } } @@ -463,7 +467,7 @@ impl<'brillig> Context<'brillig> { // we can simplify the operation when we take into account the predicate. if let Instruction::ArraySet { index, value, .. } = &instruction { let use_predicate = - self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); let predicate = use_predicate.then_some(side_effects_enabled_var); let array_get = Instruction::ArrayGet { array: instruction_results[0], index: *index }; @@ -476,12 +480,19 @@ impl<'brillig> Context<'brillig> { .cache(block, vec![*value]); } + self.remove_possibly_mutated_cached_make_arrays(&instruction, function); + // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, // we cache the results so we can reuse them if the same instruction appears again later in the block. // Others have side effects representing failure, which are implicit in the ACIR code and can also be deduplicated. - if instruction.can_be_deduplicated(dfg, self.use_constraint_info) { + let can_be_deduplicated = + instruction.can_be_deduplicated(function, self.use_constraint_info); + + // We also allow deduplicating MakeArray instructions that we have tracked which haven't + // been mutated. + if can_be_deduplicated || matches!(instruction, Instruction::MakeArray { .. }) { let use_predicate = - self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); let predicate = use_predicate.then_some(side_effects_enabled_var); self.cached_instruction_results @@ -688,6 +699,26 @@ impl<'brillig> Context<'brillig> { } } } + + fn remove_possibly_mutated_cached_make_arrays( + &mut self, + instruction: &Instruction, + function: &Function, + ) { + use Instruction::{ArraySet, Store}; + + // Should we consider calls to slice_push_back and similar to be mutating operations as well? + if let Store { value: array, .. } | ArraySet { array, .. } = instruction { + let instruction = match &function.dfg[*array] { + Value::Instruction { instruction, .. } => &function.dfg[*instruction], + _ => return, + }; + + if matches!(instruction, Instruction::MakeArray { .. }) { + self.cached_instruction_results.remove(instruction); + } + } + } } impl ResultCache { @@ -1148,6 +1179,7 @@ mod test { // fn main f0 { // b0(v0: u64): // v1 = make_array [v0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0] + // inc_rc v1 // v5 = call keccakf1600(v1) // } let ssa = ssa.fold_constants(); @@ -1157,7 +1189,107 @@ mod test { let main = ssa.main(); let instructions = main.dfg[main.entry_block()].instructions(); let ending_instruction_count = instructions.len(); - assert_eq!(ending_instruction_count, 2); + assert_eq!(ending_instruction_count, 3); + } + + #[test] + fn deduplicate_across_blocks() { + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // v2 = not v0 + // return v2 + // } + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let b1 = builder.insert_block(); + + let v0 = builder.add_parameter(Type::bool()); + let _v1 = builder.insert_not(v0); + builder.terminate_with_jmp(b1, Vec::new()); + + builder.switch_to_block(b1); + let v2 = builder.insert_not(v0); + builder.terminate_with_return(vec![v2]); + + let ssa = builder.finish(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 1); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // return v1 + // } + let ssa = ssa.fold_constants_using_constraints(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 0); + } + + #[test] + fn deduplicate_across_non_dominated_blocks() { + let src = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + jmpif v2 then: b1, else: b2 + b1(): + v4 = shl v0, u32 1 + v5 = lt v0, v4 + constrain v5 == u1 1 + jmp b2() + b2(): + v7 = lt u32 1000, v0 + jmpif v7 then: b3, else: b4 + b3(): + v8 = shl v0, u32 1 + v9 = lt v0, v8 + constrain v9 == u1 1 + jmp b4() + b4(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + // v4 has been hoisted, although: + // - v5 has not yet been removed since it was encountered earlier in the program + // - v8 hasn't been recognized as a duplicate of v6 yet since they still reference v4 and + // v5 respectively + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + v4 = shl v0, u32 1 + jmpif v2 then: b1, else: b2 + b1(): + v5 = shl v0, u32 1 + v6 = lt v0, v5 + constrain v6 == u1 1 + jmp b2() + b2(): + jmpif v2 then: b3, else: b4 + b3(): + v8 = lt v0, v4 + constrain v8 == u1 1 + jmp b4() + b4(): + return + } + "; + + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); } #[test] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 2e5c44cce76..290d2a33846 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -189,8 +189,7 @@ impl<'f> LoopInvariantContext<'f> { !self.defined_in_loop.contains(&value) || self.loop_invariants.contains(&value); }); - let can_be_deduplicated = instruction - .can_be_deduplicated(&self.inserter.function.dfg, false) + let can_be_deduplicated = instruction.can_be_deduplicated(self.inserter.function, false) || self.can_be_deduplicated_from_upper_bound(&instruction); is_loop_invariant && can_be_deduplicated diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs index d89bc1e9e28..5b66810c641 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs @@ -62,6 +62,7 @@ impl<'a> Lexer<'a> { Some('-') if self.peek_char() == Some('>') => self.double_char_token(Token::Arrow), Some('-') => self.single_char_token(Token::Dash), Some('"') => self.eat_string_literal(), + Some('b') if self.peek_char() == Some('"') => self.eat_byte_string_literal(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(char) => Err(LexerError::UnexpectedCharacter { char, @@ -180,8 +181,23 @@ impl<'a> Lexer<'a> { fn eat_string_literal(&mut self) -> SpannedTokenResult { let start = self.position; - let mut string = String::new(); + let string = self.eat_string(start)?; + let str_literal_token = Token::Str(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + + fn eat_byte_string_literal(&mut self) -> SpannedTokenResult { + let start = self.position; + self.next_char(); // skip the b + let string = self.eat_string(start)?; + let str_literal_token = Token::ByteStr(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + fn eat_string(&mut self, start: u32) -> Result { + let mut string = String::new(); while let Some(next) = self.next_char() { let char = match next { '"' => break, @@ -206,11 +222,7 @@ impl<'a> Lexer<'a> { string.push(char); } - - let str_literal_token = Token::Str(string); - - let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(string) } fn eat_while bool>( diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs index 506d2df3dea..24a5ff43071 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs @@ -4,7 +4,10 @@ use std::{ }; use super::{ - ir::{instruction::BinaryOp, types::Type}, + ir::{ + instruction::BinaryOp, + types::{NumericType, Type}, + }, Ssa, }; @@ -448,12 +451,39 @@ impl<'a> Parser<'a> { } if self.eat_keyword(Keyword::MakeArray)? { - self.eat_or_error(Token::LeftBracket)?; - let elements = self.parse_comma_separated_values()?; - self.eat_or_error(Token::RightBracket)?; - self.eat_or_error(Token::Colon)?; - let typ = self.parse_type()?; - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + if self.eat(Token::Ampersand)? { + let Some(string) = self.eat_byte_str()? else { + return self.expected_byte_string(); + }; + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Slice(Arc::new(vec![u8.clone()])); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else if let Some(string) = self.eat_byte_str()? { + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Array(Arc::new(vec![u8.clone()]), string.len() as u32); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else { + self.eat_or_error(Token::LeftBracket)?; + let elements = self.parse_comma_separated_values()?; + self.eat_or_error(Token::RightBracket)?; + self.eat_or_error(Token::Colon)?; + let typ = self.parse_type()?; + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } } if self.eat_keyword(Keyword::Not)? { @@ -796,6 +826,18 @@ impl<'a> Parser<'a> { } } + fn eat_byte_str(&mut self) -> ParseResult> { + if matches!(self.token.token(), Token::ByteStr(..)) { + let token = self.bump()?; + match token.into_token() { + Token::ByteStr(string) => Ok(Some(string)), + _ => unreachable!(), + } + } else { + Ok(None) + } + } + fn eat(&mut self, token: Token) -> ParseResult { if self.token.token() == &token { self.bump()?; @@ -848,6 +890,13 @@ impl<'a> Parser<'a> { }) } + fn expected_byte_string(&mut self) -> ParseResult { + Err(ParserError::ExpectedByteString { + found: self.token.token().clone(), + span: self.token.to_span(), + }) + } + fn expected_identifier(&mut self) -> ParseResult { Err(ParserError::ExpectedIdentifier { found: self.token.token().clone(), @@ -911,6 +960,8 @@ pub(crate) enum ParserError { ExpectedInstructionOrTerminator { found: Token, span: Span }, #[error("Expected a string literal or 'data', found '{found}'")] ExpectedStringOrData { found: Token, span: Span }, + #[error("Expected a byte string literal, found '{found}'")] + ExpectedByteString { found: Token, span: Span }, #[error("Expected a value, found '{found}'")] ExpectedValue { found: Token, span: Span }, #[error("Multiple return values only allowed for call")] @@ -928,6 +979,7 @@ impl ParserError { | ParserError::ExpectedType { span, .. } | ParserError::ExpectedInstructionOrTerminator { span, .. } | ParserError::ExpectedStringOrData { span, .. } + | ParserError::ExpectedByteString { span, .. } | ParserError::ExpectedValue { span, .. } => *span, ParserError::MultipleReturnValuesOnlyAllowedForCall { second_target, .. } => { second_target.span diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 593b66d0c98..6318f9dc56e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -89,6 +89,30 @@ fn test_make_composite_array() { assert_ssa_roundtrip(src); } +#[test] +fn test_make_byte_array_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + +#[test] +fn test_make_byte_slice_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array &b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + #[test] fn test_block_parameters() { let src = " @@ -228,14 +252,14 @@ fn test_constrain_with_static_message() { #[test] fn test_constrain_with_dynamic_message() { - let src = " + let src = r#" acir(inline) fn main f0 { b0(v0: Field, v1: Field): - v7 = make_array [u8 123, u8 120, u8 125, u8 32, u8 123, u8 121, u8 125] : [u8; 7] + v7 = make_array b"{x} {y}" constrain v0 == Field 1, data v7, u32 2, v0, v1 return } - "; + "#; assert_ssa_roundtrip(src); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs index d8dd4ec011e..83a2a1d1ed2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs @@ -30,6 +30,7 @@ pub(crate) enum Token { Ident(String), Int(FieldElement), Str(String), + ByteStr(String), Keyword(Keyword), IntType(IntType), /// = @@ -79,6 +80,7 @@ impl Display for Token { Token::Ident(ident) => write!(f, "{}", ident), Token::Int(int) => write!(f, "{}", int), Token::Str(string) => write!(f, "{string:?}"), + Token::ByteStr(string) => write!(f, "{string:?}"), Token::Keyword(keyword) => write!(f, "{}", keyword), Token::IntType(int_type) => write!(f, "{}", int_type), Token::Assign => write!(f, "="), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 8a09dba64c4..116e0de4ecd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -20,7 +20,7 @@ use crate::ssa::ir::value::ValueId; use super::value::{Tree, Value, Values}; use super::SSA_WORD_SIZE; -use fxhash::FxHashMap as HashMap; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; /// The FunctionContext is the main context object for translating a /// function into SSA form during the SSA-gen pass. @@ -159,7 +159,8 @@ impl<'a> FunctionContext<'a> { let parameter_value = Self::map_type(parameter_type, |typ| { let value = self.builder.add_parameter(typ); if mutable { - self.new_mutable_variable(value) + // This will wrap any `mut var: T` in a reference and increase the rc of an array if needed + self.new_mutable_variable(value, true) } else { value.into() } @@ -170,9 +171,17 @@ impl<'a> FunctionContext<'a> { /// Allocate a single slot of memory and store into it the given initial value of the variable. /// Always returns a Value::Mutable wrapping the allocate instruction. - pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { + pub(super) fn new_mutable_variable( + &mut self, + value_to_store: ValueId, + increment_array_rc: bool, + ) -> Value { let element_type = self.builder.current_function.dfg.type_of_value(value_to_store); - self.builder.increment_array_reference_count(value_to_store); + + if increment_array_rc { + self.builder.increment_array_reference_count(value_to_store); + } + let alloc = self.builder.insert_allocate(element_type); self.builder.insert_store(alloc, value_to_store); let typ = self.builder.type_of_value(value_to_store); @@ -904,35 +913,52 @@ impl<'a> FunctionContext<'a> { } } - /// Increments the reference count of all parameters. Returns the entry block of the function. + /// Increments the reference count of mutable reference array parameters. + /// Any mutable-value (`mut a: [T; N]` versus `a: &mut [T; N]`) are already incremented + /// by `FunctionBuilder::add_parameter_to_scope`. + /// Returns each array id that was incremented. /// /// This is done on parameters rather than call arguments so that we can optimize out /// paired inc/dec instructions within brillig functions more easily. - pub(crate) fn increment_parameter_rcs(&mut self) -> BasicBlockId { + pub(crate) fn increment_parameter_rcs(&mut self) -> HashSet { let entry = self.builder.current_function.entry_block(); let parameters = self.builder.current_function.dfg.block_parameters(entry).to_vec(); + let mut incremented = HashSet::default(); + let mut seen_array_types = HashSet::default(); + for parameter in parameters { // Avoid reference counts for immutable arrays that aren't behind references. - if self.builder.current_function.dfg.value_is_reference(parameter) { - self.builder.increment_array_reference_count(parameter); + let typ = self.builder.current_function.dfg.type_of_value(parameter); + + if let Type::Reference(element) = typ { + if element.contains_an_array() { + // If we haven't already seen this array type, the value may be possibly + // aliased, so issue an inc_rc for it. + if !seen_array_types.insert(element.get_contained_array().clone()) + && self.builder.increment_array_reference_count(parameter) + { + incremented.insert(parameter); + } + } } } - entry + incremented } /// Ends a local scope of a function. /// This will issue DecrementRc instructions for any arrays in the given starting scope /// block's parameters. Arrays that are also used in terminator instructions for the scope are /// ignored. - pub(crate) fn end_scope(&mut self, scope: BasicBlockId, terminator_args: &[ValueId]) { - let mut dropped_parameters = - self.builder.current_function.dfg.block_parameters(scope).to_vec(); - - dropped_parameters.retain(|parameter| !terminator_args.contains(parameter)); + pub(crate) fn end_scope( + &mut self, + mut incremented_params: HashSet, + terminator_args: &[ValueId], + ) { + incremented_params.retain(|parameter| !terminator_args.contains(parameter)); - for parameter in dropped_parameters { + for parameter in incremented_params { if self.builder.current_function.dfg.value_is_reference(parameter) { self.builder.decrement_array_reference_count(parameter); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d28236bd360..2fe0a38af00 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -125,10 +125,10 @@ impl<'a> FunctionContext<'a> { /// Codegen a function's body and set its return value to that of its last parameter. /// For functions returning nothing, this will be an empty list. fn codegen_function_body(&mut self, body: &Expression) -> Result<(), RuntimeError> { - let entry_block = self.increment_parameter_rcs(); + let incremented_params = self.increment_parameter_rcs(); let return_value = self.codegen_expression(body)?; let results = return_value.into_value_list(self); - self.end_scope(entry_block, &results); + self.end_scope(incremented_params, &results); self.builder.terminate_with_return(results); Ok(()) @@ -195,8 +195,7 @@ impl<'a> FunctionContext<'a> { fn codegen_literal(&mut self, literal: &ast::Literal) -> Result { match literal { ast::Literal::Array(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -207,8 +206,7 @@ impl<'a> FunctionContext<'a> { }) } ast::Literal::Slice(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -245,18 +243,33 @@ impl<'a> FunctionContext<'a> { } } + fn codegen_array_elements( + &mut self, + elements: &[Expression], + ) -> Result, RuntimeError> { + try_vecmap(elements, |element| { + let value = self.codegen_expression(element)?; + Ok((value, element.is_array_or_slice_literal())) + }) + } + fn codegen_string(&mut self, string: &str) -> Values { let elements = vecmap(string.as_bytes(), |byte| { - self.builder.numeric_constant(*byte as u128, Type::unsigned(8)).into() + let char = self.builder.numeric_constant(*byte as u128, Type::unsigned(8)); + (char.into(), false) }); let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u32)); self.codegen_array(elements, typ) } // Codegen an array but make sure that we do not have a nested slice + /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. fn codegen_array_checked( &mut self, - elements: Vec, + elements: Vec<(Values, bool)>, typ: Type, ) -> Result { if typ.is_nested_slice() { @@ -273,11 +286,15 @@ impl<'a> FunctionContext<'a> { /// stored next to the other fields in memory. So an array such as [(1, 2), (3, 4)] is /// stored the same as the array [1, 2, 3, 4]. /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. + /// /// The value returned from this function is always that of the allocate instruction. - fn codegen_array(&mut self, elements: Vec, typ: Type) -> Values { + fn codegen_array(&mut self, elements: Vec<(Values, bool)>, typ: Type) -> Values { let mut array = im::Vector::new(); - for element in elements { + for (element, is_array_constant) in elements { element.for_each(|element| { let element = element.eval(self); @@ -286,7 +303,10 @@ impl<'a> FunctionContext<'a> { // pessimistic reference count (since some are likely moved rather than shared) // which is important for Brillig's copy on write optimization. This has no // effect in ACIR code. - self.builder.increment_array_reference_count(element); + if !is_array_constant { + self.builder.increment_array_reference_count(element); + } + array.push_back(element); }); } @@ -662,14 +682,22 @@ impl<'a> FunctionContext<'a> { fn codegen_let(&mut self, let_expr: &ast::Let) -> Result { let mut values = self.codegen_expression(&let_expr.expression)?; + // Don't mutate the reference count if we're assigning an array literal to a Let: + // `let mut foo = [1, 2, 3];` + // we consider the array to be moved, so we should have an initial rc of just 1. + let should_inc_rc = !let_expr.expression.is_array_or_slice_literal(); + values = values.map(|value| { let value = value.eval(self); Tree::Leaf(if let_expr.mutable { - self.new_mutable_variable(value) + self.new_mutable_variable(value, should_inc_rc) } else { - // `new_mutable_variable` already increments rcs internally - self.builder.increment_array_reference_count(value); + // `new_mutable_variable` increments rcs internally so we have to + // handle it separately for the immutable case + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } value::Value::Normal(value) }) }); @@ -728,10 +756,14 @@ impl<'a> FunctionContext<'a> { fn codegen_assign(&mut self, assign: &ast::Assign) -> Result { let lhs = self.extract_current_value(&assign.lvalue)?; let rhs = self.codegen_expression(&assign.expression)?; + let should_inc_rc = !assign.expression.is_array_or_slice_literal(); rhs.clone().for_each(|value| { let value = value.eval(self); - self.builder.increment_array_reference_count(value); + + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } }); self.assign_new_value(lhs, rhs); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 8f6817dc15d..5d9b66f4f96 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -48,6 +48,12 @@ pub enum Expression { Continue, } +impl Expression { + pub fn is_array_or_slice_literal(&self) -> bool { + matches!(self, Expression::Literal(Literal::Array(_) | Literal::Slice(_))) + } +} + /// A definition is either a local (variable), function, or is a built-in /// function that will be generated or referenced by the compiler later. #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index f3badde62be..41a823646dd 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -79,7 +79,7 @@ fn main() { You can construct a U128 from its limbs: ```rust fn main(x: u64, y: u64) { - let x = U128::from_u64s_be(x,y); + let z = U128::from_u64s_be(x,y); assert(z.hi == x as Field); assert(z.lo == y as Field); } diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml new file mode 100644 index 00000000000..16a708743ed --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "array_dedup_regression" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml new file mode 100644 index 00000000000..3aea0c58ce5 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml @@ -0,0 +1 @@ +x = 0 diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr new file mode 100644 index 00000000000..5506d55b9e7 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr @@ -0,0 +1,21 @@ +unconstrained fn main(x: u32) { + let a1 = [1, 2, 3, 4, 5]; + + for i in 0..5 { + let mut a2 = [1, 2, 3, 4, 5]; + a2[x + i] = 128; + println(a2); + + if i != 0 { + assert(a2[x + i - 1] != 128); + } + } + + // Can't use `== [1, 2, 3, 4, 5]` here, that make_array may get + // deduplicated to equal a1 in the bugged version + assert_eq(a1[0], 1); + assert_eq(a1[1], 2); + assert_eq(a1[2], 3); + assert_eq(a1[3], 4); + assert_eq(a1[4], 5); +} diff --git a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr index 86b5c812472..68b9f2407b9 100644 --- a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr @@ -1,10 +1,19 @@ +use std::mem::array_refcount; + fn main() { let mut array = [0, 1, 2]; assert_refcount(array, 2); - borrow(array, std::mem::array_refcount(array)); - borrow_mut(&mut array, std::mem::array_refcount(array)); - copy_mut(array, std::mem::array_refcount(array)); + borrow(array, array_refcount(array)); + borrow_mut(&mut array, array_refcount(array)); + copy_mut(array, array_refcount(array)); + + borrow_mut_two(&mut array, &mut array, array_refcount(array)); + + let mut u32_array = [0, 1, 2]; + let rc1 = array_refcount(array); + let rc2 = array_refcount(u32_array); + borrow_mut_two_separate(&mut array, &mut u32_array, rc1, rc2); } fn borrow(array: [Field; 3], rc_before_call: u32) { @@ -13,19 +22,48 @@ fn borrow(array: [Field; 3], rc_before_call: u32) { } fn borrow_mut(array: &mut [Field; 3], rc_before_call: u32) { - assert_refcount(*array, rc_before_call + 1); - array[0] = 5; + // Optimization: inc_rc isn't needed since there is only one array (`array`) + // of the same type that `array` can be modified through + assert_refcount(*array, rc_before_call + 0); + array[0] = 3; println(array[0]); } fn copy_mut(mut array: [Field; 3], rc_before_call: u32) { assert_refcount(array, rc_before_call + 1); - array[0] = 6; + array[0] = 4; println(array[0]); } -fn assert_refcount(array: [Field; 3], expected: u32) { - let count = std::mem::array_refcount(array); +/// Borrow the same array mutably through both parameters, inc_rc is necessary here, although +/// only one is needed to bring the rc from 1 to 2. +fn borrow_mut_two(array1: &mut [Field; 3], array2: &mut [Field; 3], rc_before_call: u32) { + assert_refcount(*array1, rc_before_call + 1); + assert_refcount(*array2, rc_before_call + 1); + array1[0] = 5; + array2[0] = 6; + println(array1[0]); // array1 & 2 alias, so this should also print 6 + println(array2[0]); +} + +/// Borrow a different array: we should be able to reason that these types cannot be mutably +/// aliased since they're different types so we don't need any inc_rc instructions. +fn borrow_mut_two_separate( + array1: &mut [Field; 3], + array2: &mut [u32; 3], + rc_before_call1: u32, + rc_before_call2: u32, +) { + assert_refcount(*array1, rc_before_call1 + 0); + assert_refcount(*array2, rc_before_call2 + 0); + array1[0] = 7; + array2[0] = 8; + println(array1[0]); + println(array2[0]); +} + +fn assert_refcount(array: [T; 3], expected: u32) { + let count = array_refcount(array); // All refcounts are zero when running this as a constrained program if std::runtime::is_unconstrained() { diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index f0334eaf713..41b3c0c9cf7 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -60,13 +60,9 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ ]; /// Tests which aren't expected to work with the default inliner cases. -const INLINER_MIN_OVERRIDES: [(&str, i64); 2] = [ +const INLINER_MIN_OVERRIDES: [(&str, i64); 1] = [ // 0 works if PoseidonHasher::write is tagged as `inline_always`, otherwise 22. ("eddsa", 0), - // (#6583): The RcTracker in the DIE SSA pass is removing inc_rcs that are still needed. - // This triggers differently depending on the optimization level (although all are wrong), - // so we arbitrarily only run with the inlined versions. - ("reference_counts", 0), ]; /// Some tests are expected to have warnings