From 7d2b615aa5a72f8517b5c4768754e584d85dcd1b Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Fri, 8 Nov 2024 15:18:17 +0000 Subject: [PATCH 01/19] Experiment with a generic ValueId --- .../noirc_evaluator/src/ssa/ir/basic_block.rs | 8 +- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 99 +++++++++++-------- .../noirc_evaluator/src/ssa/ir/instruction.rs | 78 +++++++-------- .../src/ssa/ir/instruction/binary.rs | 9 +- .../src/ssa/ir/instruction/cast.rs | 6 +- .../src/ssa/ir/instruction/constrain.rs | 6 +- compiler/noirc_evaluator/src/ssa/ir/map.rs | 6 +- .../noirc_evaluator/src/ssa/ir/printer.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 97 +++++++++++++++++- .../noirc_evaluator/src/ssa/opt/array_set.rs | 33 ++++--- compiler/noirc_evaluator/src/ssa/opt/rc.rs | 2 +- 11 files changed, 231 insertions(+), 115 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs b/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs index a7c637dedd0..29b4002841c 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs @@ -2,7 +2,7 @@ use super::{ dfg::CallStack, instruction::{InstructionId, TerminatorInstruction}, map::Id, - value::ValueId, + value::{Unresolved, ValueId}, }; use serde::{Deserialize, Serialize}; @@ -13,9 +13,9 @@ use serde::{Deserialize, Serialize}; /// This means that if one instruction is executed in a basic /// block, then all instructions are executed. ie single-entry single-exit. #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] -pub(crate) struct BasicBlock { +pub(crate) struct BasicBlock { /// Parameters to the basic block. - parameters: Vec, + parameters: Vec>, /// Instructions in the basic block. instructions: Vec, @@ -24,7 +24,7 @@ pub(crate) struct BasicBlock { /// /// This will be a control flow instruction. This is only /// None if the block is still being constructed. - terminator: Option, + terminator: Option>, } /// An identifier for a Basic Block. diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 2be9ffa9afa..93dfee28fa1 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -10,7 +10,7 @@ use super::{ }, map::DenseMap, types::Type, - value::{Value, ValueId}, + value::{RawValueId, ResolvedValueId, Value, ValueId}, }; use acvm::{acir::AcirField, FieldElement}; @@ -78,7 +78,7 @@ pub(crate) struct DataFlowGraph { /// and has no material effect on the SSA itself, however in practice the IDs can get out of /// sync and may need this resolution before they can be compared. #[serde(skip)] - replaced_value_ids: HashMap, + replaced_value_ids: HashMap, /// Source location of each instruction for debugging and issuing errors. /// @@ -119,8 +119,8 @@ impl DataFlowGraph { let parameters = self.blocks[block].parameters(); let parameters = vecmap(parameters.iter().enumerate(), |(position, param)| { - let typ = self.values[*param].get_type().clone(); - self.values.insert(Value::Param { block: new_block, position, typ }) + let typ = self.values[param.raw()].get_type().clone(); + self.values.insert(Value::Param { block: new_block, position, typ }).into() }); self.blocks[new_block].set_parameters(parameters); @@ -139,7 +139,7 @@ impl DataFlowGraph { /// Iterate over every Value in this DFG in no particular order, including unused Values pub(crate) fn values_iter(&self) -> impl ExactSizeIterator { - self.values.iter() + self.values.iter().map(|(id, value)| (id.into(), value)) } /// Returns the parameters of the given block @@ -213,7 +213,7 @@ impl DataFlowGraph { /// Insert a value into the dfg's storage and return an id to reference it. /// Until the value is used in an instruction it is unreachable. pub(crate) fn make_value(&mut self, value: Value) -> ValueId { - self.values.insert(value) + self.values.insert(value).into() } /// Set the value of value_to_replace to refer to the value referred to by new_value. @@ -222,16 +222,16 @@ impl DataFlowGraph { /// values since other instructions referring to the same ValueId need /// not be modified to refer to a new ValueId. pub(crate) fn set_value_from_id(&mut self, value_to_replace: ValueId, new_value: ValueId) { - if value_to_replace != new_value { - self.replaced_value_ids.insert(value_to_replace, self.resolve(new_value)); - let new_value = self.values[new_value].clone(); - self.values[value_to_replace] = new_value; + if value_to_replace.unresolved_eq(&new_value) { + self.replaced_value_ids.insert(value_to_replace.raw(), self.resolve(new_value).into()); + let new_value = self.values[new_value.raw()].clone(); + self.values[value_to_replace.raw()] = new_value; } } /// Set the type of value_id to the target_type. pub(crate) fn set_type_of_value(&mut self, value_id: ValueId, target_type: Type) { - let value = &mut self.values[value_id]; + let value = &mut self.values[value_id.raw()]; match value { Value::Instruction { typ, .. } | Value::Param { typ, .. } @@ -248,10 +248,10 @@ impl DataFlowGraph { /// `ValueId`, this function will return the `ValueId` from which the substitution was taken. /// If `original_value_id`'s underlying `Value` has not been substituted, the same `ValueId` /// is returned. - pub(crate) fn resolve(&self, original_value_id: ValueId) -> ValueId { - match self.replaced_value_ids.get(&original_value_id) { + pub(crate) fn resolve(&self, original_value_id: ValueId) -> ResolvedValueId { + match self.replaced_value_ids.get(original_value_id.as_ref()) { Some(id) => self.resolve(*id), - None => original_value_id, + None => original_value_id.resolved(), } } @@ -261,7 +261,7 @@ impl DataFlowGraph { if let Some(id) = self.constants.get(&(constant, typ.clone())) { return *id; } - let id = self.values.insert(Value::NumericConstant { constant, typ: typ.clone() }); + let id = self.values.insert(Value::NumericConstant { constant, typ: typ.clone() }).into(); self.constants.insert((constant, typ), id); id } @@ -277,7 +277,7 @@ impl DataFlowGraph { if let Some(existing) = self.functions.get(&function) { return *existing; } - self.values.insert(Value::Function(function)) + self.values.insert(Value::Function(function)).into() } /// Gets or creates a ValueId for the given FunctionId. @@ -285,7 +285,7 @@ impl DataFlowGraph { if let Some(existing) = self.foreign_functions.get(function) { return *existing; } - self.values.insert(Value::ForeignFunction(function.to_owned())) + self.values.insert(Value::ForeignFunction(function.to_owned())).into() } /// Gets or creates a ValueId for the given Intrinsic. @@ -293,7 +293,7 @@ impl DataFlowGraph { if let Some(existing) = self.get_intrinsic(intrinsic) { return *existing; } - let intrinsic_value_id = self.values.insert(Value::Intrinsic(intrinsic)); + let intrinsic_value_id = self.values.insert(Value::Intrinsic(intrinsic)).into(); self.intrinsics.insert(intrinsic, intrinsic_value_id); intrinsic_value_id } @@ -346,9 +346,19 @@ impl DataFlowGraph { } } + /// Look up a value by ID. + fn get_value(&self, value: ValueId) -> Value { + self.values[value.raw()] + } + + /// Resolve and get a value by ID + fn resolve_value(&self, original_value_id: ValueId) -> Value { + self.values[self.resolve(original_value_id).raw()] + } + /// Returns the type of a given value - pub(crate) fn type_of_value(&self, value: ValueId) -> Type { - self.values[value].get_type().clone() + pub(crate) fn type_of_value(&self, value: ValueId) -> Type { + self.get_value(value).get_type().clone() } /// Returns the maximum possible number of bits that `value` can potentially be. @@ -356,7 +366,7 @@ impl DataFlowGraph { /// Should `value` be a numeric constant then this function will return the exact number of bits required, /// otherwise it will return the minimum number of bits based on type information. pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { - match self[value] { + match self.get_value(value) { Value::Instruction { instruction, .. } => { if let Instruction::Cast(original_value, _) = self[instruction] { self.type_of_value(original_value).bit_size() @@ -373,7 +383,7 @@ impl DataFlowGraph { /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { - matches!(self.values[value].get_type(), Type::Reference(_)) + matches!(self.get_value(value).get_type(), Type::Reference(_)) } /// Appends a result type to the instruction. @@ -381,11 +391,14 @@ impl DataFlowGraph { let results = self.results.get_mut(&instruction_id).unwrap(); let expected_res_position = results.len(); - let value_id = self.values.insert(Value::Instruction { - typ, - position: expected_res_position, - instruction: instruction_id, - }); + let value_id = self + .values + .insert(Value::Instruction { + typ, + position: expected_res_position, + instruction: instruction_id, + }) + .into(); // Add value to the list of results for this instruction results.push(value_id); @@ -402,14 +415,13 @@ impl DataFlowGraph { let results = self.results.get_mut(&instruction_id).unwrap(); let res_position = results .iter() - .position(|&id| id == prev_value_id) + .position(|id| prev_value_id.unresolved_eq(id)) .expect("Result id not found while replacing"); - let value_id = self.values.insert(Value::Instruction { - typ, - position: res_position, - instruction: instruction_id, - }); + let value_id = self + .values + .insert(Value::Instruction { typ, position: res_position, instruction: instruction_id }) + .into(); // Replace the value in list of results for this instruction results[res_position] = value_id; @@ -431,7 +443,7 @@ impl DataFlowGraph { pub(crate) fn add_block_parameter(&mut self, block_id: BasicBlockId, typ: Type) -> ValueId { let block = &mut self.blocks[block_id]; let position = block.parameters().len(); - let parameter = self.values.insert(Value::Param { block: block_id, position, typ }); + let parameter = self.values.insert(Value::Param { block: block_id, position, typ }).into(); block.add_parameter(parameter); parameter } @@ -448,7 +460,7 @@ impl DataFlowGraph { &self, value: ValueId, ) -> Option<(FieldElement, Type)> { - match &self.values[self.resolve(value)] { + match &self.resolve_value(value) { Value::NumericConstant { constant, typ } => Some((*constant, typ.clone())), _ => None, } @@ -457,7 +469,7 @@ impl DataFlowGraph { /// Returns the Value::Array associated with this ValueId if it refers to an array constant. /// Otherwise, this returns None. pub(crate) fn get_array_constant(&self, value: ValueId) -> Option<(im::Vector, Type)> { - match &self.values[self.resolve(value)] { + match &self.resolve_value(value) { // Arrays are shared, so cloning them is cheap Value::Array { array, typ } => Some((array.clone(), typ.clone())), _ => None, @@ -513,7 +525,7 @@ impl DataFlowGraph { } pub(crate) fn get_value_call_stack(&self, value: ValueId) -> CallStack { - match &self.values[self.resolve(value)] { + match &self.resolve_value(value) { Value::Instruction { instruction, .. } => self.get_call_stack(*instruction), _ => im::Vector::new(), } @@ -521,7 +533,7 @@ impl DataFlowGraph { /// True if the given ValueId refers to a (recursively) constant value pub(crate) fn is_constant(&self, argument: ValueId) -> bool { - match &self[self.resolve(argument)] { + match &self.resolve_value(argument) { Value::Instruction { .. } | Value::Param { .. } => false, Value::Array { array, .. } => array.iter().all(|element| self.is_constant(*element)), _ => true, @@ -551,13 +563,20 @@ impl std::ops::IndexMut for DataFlowGraph { } } -impl std::ops::Index for DataFlowGraph { +impl std::ops::Index for DataFlowGraph { type Output = Value; - fn index(&self, id: ValueId) -> &Self::Output { + fn index(&self, id: RawValueId) -> &Self::Output { &self.values[id] } } +impl std::ops::Index for DataFlowGraph { + type Output = Value; + fn index(&self, id: ResolvedValueId) -> &Self::Output { + &self.values[id.raw()] + } +} + impl std::ops::Index for DataFlowGraph { type Output = BasicBlock; fn index(&self, id: BasicBlockId) -> &Self::Output { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index f187a279b9b..d4e53ab9192 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -21,7 +21,7 @@ use super::{ dfg::{CallStack, DataFlowGraph}, map::Id, types::{NumericType, Type}, - value::{Value, ValueId}, + value::{Unresolved, Value, ValueId}, }; mod binary; @@ -189,27 +189,27 @@ pub(crate) enum Endian { #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. -pub(crate) enum Instruction { +pub(crate) enum Instruction { /// Binary Operations like +, -, *, /, ==, != - Binary(Binary), + Binary(Binary), /// Converts `Value` into Typ - Cast(ValueId, Type), + Cast(ValueId, Type), /// Computes a bit wise not - Not(ValueId), + Not(ValueId), /// Truncates `value` to `bit_size` - Truncate { value: ValueId, bit_size: u32, max_bit_size: u32 }, + Truncate { value: ValueId, bit_size: u32, max_bit_size: u32 }, /// Constrains two values to be equal to one another. - Constrain(ValueId, ValueId, Option), + Constrain(ValueId, ValueId, Option>), /// Range constrain `value` to `max_bit_size` - RangeCheck { value: ValueId, max_bit_size: u32, assert_message: Option }, + RangeCheck { value: ValueId, max_bit_size: u32, assert_message: Option }, /// Performs a function call with a list of its arguments. - Call { func: ValueId, arguments: Vec }, + Call { func: ValueId, arguments: Vec> }, /// Allocates a region of memory. Note that this is not concerned with /// the type of memory, the type of element is determined when loading this memory. @@ -217,10 +217,10 @@ pub(crate) enum Instruction { Allocate, /// Loads a value from memory. - Load { address: ValueId }, + Load { address: ValueId }, /// Writes a value to memory. - Store { address: ValueId, value: ValueId }, + Store { address: ValueId, value: ValueId }, /// Provides a context for all instructions that follow up until the next /// `EnableSideEffectsIf` is encountered, for stating a condition that determines whether @@ -239,29 +239,29 @@ pub(crate) enum Instruction { /// This instruction is only emitted after the cfg flattening pass, and is used to annotate /// instruction regions with an condition that corresponds to their position in the CFG's /// if-branching structure. - EnableSideEffectsIf { condition: ValueId }, + EnableSideEffectsIf { condition: ValueId }, /// Retrieve a value from an array at the given index - ArrayGet { array: ValueId, index: ValueId }, + ArrayGet { array: ValueId, index: ValueId }, /// Creates a new array with the new value at the given index. All other elements are identical /// to those in the given array. This will not modify the original array unless `mutable` is /// set. This flag is off by default and only enabled when optimizations determine it is safe. - ArraySet { array: ValueId, index: ValueId, value: ValueId, mutable: bool }, + ArraySet { array: ValueId, index: ValueId, value: ValueId, mutable: bool }, /// An instruction to increment the reference count of a value. /// /// This currently only has an effect in Brillig code where array sharing and copy on write is /// implemented via reference counting. In ACIR code this is done with im::Vector and these /// IncrementRc instructions are ignored. - IncrementRc { value: ValueId }, + IncrementRc { value: ValueId }, /// An instruction to decrement the reference count of a value. /// /// This currently only has an effect in Brillig code where array sharing and copy on write is /// implemented via reference counting. In ACIR code this is done with im::Vector and these /// DecrementRc instructions are ignored. - DecrementRc { value: ValueId }, + DecrementRc { value: ValueId }, /// Merge two values returned from opposite branches of a conditional into one. /// @@ -276,10 +276,10 @@ pub(crate) enum Instruction { /// Where we save the result of !then_condition so that we have the same /// ValueId for it each time. IfElse { - then_condition: ValueId, - then_value: ValueId, - else_condition: ValueId, - else_value: ValueId, + then_condition: ValueId, + then_value: ValueId, + else_condition: ValueId, + else_value: ValueId, }, } @@ -339,7 +339,7 @@ impl Instruction { | IncrementRc { .. } | DecrementRc { .. } => false, - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match dfg[func.raw()] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, }, @@ -394,7 +394,7 @@ impl Instruction { | RangeCheck { .. } => false, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match dfg[func.raw()] { // Explicitly allows removal of unused ec operations, even if they can fail Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) | Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::EmbeddedCurveAdd)) => true, @@ -429,7 +429,7 @@ impl Instruction { Instruction::EnableSideEffectsIf { .. } | Instruction::ArraySet { .. } => true, - Instruction::Call { func, .. } => match dfg[*func] { + Instruction::Call { func, .. } => match dfg[func.raw()] { Value::Function(_) => true, Value::Intrinsic(intrinsic) => { matches!(intrinsic, Intrinsic::SliceInsert | Intrinsic::SliceRemove) @@ -740,7 +740,7 @@ impl Instruction { let then_value = dfg.resolve(*then_value); let else_value = dfg.resolve(*else_value); if then_value == else_value { - return SimplifiedTo(then_value); + return SimplifiedTo(then_value.into()); } if matches!(&typ, Type::Numeric(_)) { @@ -752,8 +752,8 @@ impl Instruction { block, then_condition, else_condition, - then_value, - else_value, + then_value.into(), + else_value.into(), ); SimplifiedTo(result) } else { @@ -781,7 +781,7 @@ impl Instruction { /// - If the array value is from a previous array-set, we recur. fn try_optimize_array_get_from_previous_set( dfg: &DataFlowGraph, - mut array_id: Id, + mut array_id: ValueId, target_index: FieldElement, ) -> SimplifyResult { let mut elements = None; @@ -789,7 +789,7 @@ fn try_optimize_array_get_from_previous_set( // Arbitrary number of maximum tries just to prevent this optimization from taking too long. let max_tries = 5; for _ in 0..max_tries { - match &dfg[array_id] { + match &dfg[array_id.raw()] { Value::Instruction { instruction, .. } => { match &dfg[*instruction] { Instruction::ArraySet { array, index, value, .. } => { @@ -856,13 +856,13 @@ fn try_optimize_array_set_from_previous_get( target_index: ValueId, target_value: ValueId, ) -> SimplifyResult { - let array_from_get = match &dfg[target_value] { + let array_from_get = match &dfg[target_value.raw()] { Value::Instruction { instruction, .. } => match &dfg[*instruction] { Instruction::ArrayGet { array, index } => { - if *array == array_id && *index == target_index { + if array_id.unresolved_eq(array) && target_index.unresolved_eq(index) { // If array and index match from the value, we can immediately simplify return SimplifyResult::SimplifiedTo(array_id); - } else if *index == target_index { + } else if target_index.unresolved_eq(index) { *array } else { return SimplifyResult::None; @@ -889,7 +889,7 @@ fn try_optimize_array_set_from_previous_get( // Arbitrary number of maximum tries just to prevent this optimization from taking too long. let max_tries = 5; for _ in 0..max_tries { - match &dfg[array_id] { + match &dfg[array_id.raw()] { Value::Instruction { instruction, .. } => match &dfg[*instruction] { Instruction::ArraySet { array, index, .. } => { let Some(index) = dfg.get_numeric_constant(*index) else { @@ -900,7 +900,7 @@ fn try_optimize_array_set_from_previous_get( return SimplifyResult::None; } - if *array == array_from_get { + if array_from_get.unresolved_eq(array) { return SimplifyResult::SimplifiedTo(original_array_id); } @@ -931,11 +931,11 @@ pub(crate) fn error_selector_from_type(typ: &ErrorType) -> ErrorSelector { } #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] -pub(crate) enum ConstrainError { +pub(crate) enum ConstrainError { // Static string errors are not handled inside the program as data for efficiency reasons. StaticString(String), // These errors are handled by the program as data. - Dynamic(ErrorSelector, Vec), + Dynamic(ErrorSelector, Vec>), } impl From for ConstrainError { @@ -973,7 +973,7 @@ pub(crate) enum InstructionResultType { /// to split up instructions like this, as we are sure that these instructions /// will not be in the list of instructions for a basic block. #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] -pub(crate) enum TerminatorInstruction { +pub(crate) enum TerminatorInstruction { /// Control flow /// /// Jump If @@ -981,7 +981,7 @@ pub(crate) enum TerminatorInstruction { /// If the condition is true: jump to the specified `then_destination`. /// Otherwise, jump to the specified `else_destination`. JmpIf { - condition: ValueId, + condition: ValueId, then_destination: BasicBlockId, else_destination: BasicBlockId, call_stack: CallStack, @@ -992,7 +992,7 @@ pub(crate) enum TerminatorInstruction { /// Jumps to specified `destination` with `arguments`. /// The CallStack here is expected to be used to issue an error when the start range of /// a for loop cannot be deduced at compile-time. - Jmp { destination: BasicBlockId, arguments: Vec, call_stack: CallStack }, + Jmp { destination: BasicBlockId, arguments: Vec>, call_stack: CallStack }, /// Return from the current function with the given return values. /// @@ -1001,7 +1001,7 @@ pub(crate) enum TerminatorInstruction { /// unconditionally jump to a single exit block with the return values /// as the block arguments. Then the exit block can terminate in a return /// instruction returning these values. - Return { return_values: Vec, call_stack: CallStack }, + Return { return_values: Vec>, call_stack: CallStack }, } impl TerminatorInstruction { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 487370488b9..3ca827a5f29 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -2,7 +2,8 @@ use acvm::{acir::AcirField, FieldElement}; use serde::{Deserialize, Serialize}; use super::{ - DataFlowGraph, Instruction, InstructionResultType, NumericType, SimplifyResult, Type, ValueId, + DataFlowGraph, Instruction, InstructionResultType, NumericType, SimplifyResult, Type, + Unresolved, ValueId, }; /// Binary Operations allowed in the IR. @@ -66,11 +67,11 @@ impl std::fmt::Display for BinaryOp { /// A binary instruction in the IR. #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] -pub(crate) struct Binary { +pub(crate) struct Binary { /// Left hand side of the binary operation - pub(crate) lhs: ValueId, + pub(crate) lhs: ValueId, /// Right hand side of the binary operation - pub(crate) rhs: ValueId, + pub(crate) rhs: ValueId, /// The binary operation to apply pub(crate) operator: BinaryOp, } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs index ed588def1d7..7bcc0783e54 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs @@ -19,12 +19,12 @@ pub(super) fn simplify_cast( } } - if let Some(constant) = dfg.get_numeric_constant(value) { + if let Some(constant) = dfg.get_numeric_constant(value.into()) { let src_typ = dfg.type_of_value(value); match (src_typ, dst_typ) { (Type::Numeric(NumericType::NativeField), Type::Numeric(NumericType::NativeField)) => { // Field -> Field: use src value - SimplifiedTo(value) + SimplifiedTo(value.into()) } ( Type::Numeric(NumericType::Unsigned { .. } | NumericType::Signed { .. }), @@ -69,7 +69,7 @@ pub(super) fn simplify_cast( _ => None, } } else if *dst_typ == dfg.type_of_value(value) { - SimplifiedTo(value) + SimplifiedTo(value.into()) } else { None } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs index 66f50440d64..36bc50cd9cf 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs @@ -116,7 +116,7 @@ pub(super) fn decompose_constrain( decompose_constrain(value, reversed_constant, msg, dfg) } - _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], + _ => vec![Instruction::Constrain(lhs.into(), rhs.into(), msg.clone())], } } @@ -134,10 +134,10 @@ pub(super) fn decompose_constrain( vec![Instruction::Constrain(*original_lhs, *original_rhs, msg.clone())] } - _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], + _ => vec![Instruction::Constrain(lhs.into(), rhs.into(), msg.clone())], } } - _ => vec![Instruction::Constrain(lhs, rhs, msg.clone())], + _ => vec![Instruction::Constrain(lhs.into(), rhs.into(), msg.clone())], } } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/map.rs b/compiler/noirc_evaluator/src/ssa/ir/map.rs index 23f5380f030..29e9873acbd 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/map.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/map.rs @@ -99,7 +99,7 @@ impl std::fmt::Display for Id { } } -impl std::fmt::Display for Id { +impl std::fmt::Display for Id> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "v{}", self.index) } @@ -132,10 +132,10 @@ impl FromStr for Id { } } -impl FromStr for Id { +impl FromStr for Id> { type Err = IdDisplayFromStrErr; fn from_str(s: &str) -> Result { - id_from_str_helper::(s, 'v') + id_from_str_helper::>(s, 'v') } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 2b564c14aa7..a52ccd24d3e 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -220,7 +220,7 @@ pub(crate) fn try_to_extract_string_from_error_payload( ((error_selector == STRING_ERROR_SELECTOR) && (values.len() == 1)) .then_some(()) .and_then(|()| { - let Value::Array { array: values, .. } = &dfg[values[0]] else { + let Value::Array { array: values, .. } = &dfg[values[0].raw()] else { return None; }; let fields: Option> = diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 795d45c75e9..3433c776916 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -1,3 +1,5 @@ +use std::{hash::Hash, marker::PhantomData}; + use acvm::FieldElement; use serde::{Deserialize, Serialize}; @@ -10,12 +12,99 @@ use super::{ types::Type, }; -pub(crate) type ValueId = Id; +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct Unresolved; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub(crate) struct Resolved; + +/// A resolved value ID is something we can directly compare. +pub(crate) type ResolvedValueId = ValueId; + +/// A raw value ID that can be used as a key in maps. +pub(crate) type RawValueId = Id; + +/// A value ID that can either be unresolved or resolved. Before it's resolved it's +/// generally not safe to compare IDs with each other, as they might have been replaced +/// during SSA passes, without having updated all the other occurrences. +#[derive(Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[serde(transparent)] +pub(crate) struct ValueId { + id: Id>, + #[serde(skip)] + _marker: PhantomData, +} + +impl ValueId { + pub fn new(id: Id>) -> Self { + Self { id, _marker: PhantomData } + } + + /// Access the underlying raw ID for indexing into data structures. + pub fn raw(&self) -> RawValueId { + Id::new(self.id.to_usize()) + } +} + +impl ValueId { + /// Be careful when using this comparison. + /// Sure the IDs don't have to be resolved first? + pub fn unresolved_eq(&self, other: &Self) -> bool { + self.id == other.id + } + /// Promote an unresolved ID into a resolved one. + pub fn resolved(self) -> ValueId { + ValueId::new(Id::new(self.id.to_usize())) + } +} + +impl Copy for ValueId {} + +impl Clone for ValueId { + fn clone(&self) -> Self { + *self + } +} + +impl std::fmt::Debug for ValueId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.id.fmt(f) + } +} + +impl std::fmt::Display for ValueId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.id.fmt(f) + } +} + +/// The underlying ID is often used to index into maps, but in general +/// we have to be careful when we use this method and how we compare +/// the raw IDs. +impl AsRef>> for ValueId { + fn as_ref(&self) -> &Id> { + &self.id + } +} + +/// Demote a resolved ID into an unresolved one. +impl Into> for ValueId { + fn into(self) -> ValueId { + ValueId::new(self.raw()) + } +} + +/// Wrap an `Id` into an equivalent `ValueId`` +impl Into> for Id> { + fn into(self) -> ValueId { + ValueId::new(self) + } +} /// Value is the most basic type allowed in the IR. /// Transition Note: A Id is similar to `NodeId` in our previous IR. #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] -pub(crate) enum Value { +pub(crate) enum Value { /// This value was created due to an instruction /// /// instruction -- This is the instruction which defined it @@ -37,7 +126,7 @@ pub(crate) enum Value { NumericConstant { constant: FieldElement, typ: Type }, /// Represents a constant array value - Array { array: im::Vector, typ: Type }, + Array { array: im::Vector>, typ: Type }, /// This Value refers to a function in the IR. /// Functions always have the type Type::Function. @@ -56,7 +145,7 @@ pub(crate) enum Value { ForeignFunction(String), } -impl Value { +impl Value { /// Retrieves the type of this Value pub(crate) fn get_type(&self) -> &Type { match self { diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 7d9694d4872..3149093d58c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -7,7 +7,7 @@ use crate::ssa::{ function::{Function, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, types::Type::{Array, Slice}, - value::ValueId, + value::RawValueId, }, ssa_gen::Ssa, }; @@ -51,12 +51,12 @@ impl Function { struct Context<'f> { dfg: &'f DataFlowGraph, is_brillig_runtime: bool, - array_to_last_use: HashMap, + array_to_last_use: HashMap, instructions_that_can_be_made_mutable: HashSet, // Mapping of an array that comes from a load and whether the address // it was loaded from is a reference parameter. - arrays_from_load: HashMap, - inner_nested_arrays: HashMap, + arrays_from_load: HashMap, + inner_nested_arrays: HashMap, } impl<'f> Context<'f> { @@ -81,24 +81,28 @@ impl<'f> Context<'f> { Instruction::ArrayGet { array, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { + if let Some(existing) = + self.array_to_last_use.insert(array.raw(), *instruction_id) + { self.instructions_that_can_be_made_mutable.remove(&existing); } } Instruction::ArraySet { array, value, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { + if let Some(existing) = + self.array_to_last_use.insert(array.raw(), *instruction_id) + { self.instructions_that_can_be_made_mutable.remove(&existing); } if self.is_brillig_runtime { let value = self.dfg.resolve(*value); - if let Some(existing) = self.inner_nested_arrays.get(&value) { + if let Some(existing) = self.inner_nested_arrays.get(&value.raw()) { self.instructions_that_can_be_made_mutable.remove(existing); } let result = self.dfg.instruction_results(*instruction_id)[0]; - self.inner_nested_arrays.insert(result, *instruction_id); + self.inner_nested_arrays.insert(result.raw(), *instruction_id); } // If the array we are setting does not come from a load we can safely mark it mutable. @@ -117,7 +121,7 @@ impl<'f> Context<'f> { array_in_terminator = true; } }); - if let Some(is_from_param) = self.arrays_from_load.get(&array) { + if let Some(is_from_param) = self.arrays_from_load.get(&array.raw()) { // If the array was loaded from a reference parameter, we cannot // safely mark that array mutable as it may be shared by another value. if !is_from_param && is_return_block { @@ -134,7 +138,7 @@ impl<'f> Context<'f> { let argument = self.dfg.resolve(*argument); if let Some(existing) = - self.array_to_last_use.insert(argument, *instruction_id) + self.array_to_last_use.insert(argument.raw(), *instruction_id) { self.instructions_that_can_be_made_mutable.remove(&existing); } @@ -144,9 +148,12 @@ impl<'f> Context<'f> { Instruction::Load { address } => { let result = self.dfg.instruction_results(*instruction_id)[0]; if matches!(self.dfg.type_of_value(result), Array { .. } | Slice { .. }) { - let is_reference_param = - self.dfg.block_parameters(block_id).contains(address); - self.arrays_from_load.insert(result, is_reference_param); + let is_reference_param = self + .dfg + .block_parameters(block_id) + .iter() + .any(|p| p.unresolved_eq(address)); + self.arrays_from_load.insert(result.raw(), is_reference_param); } } _ => (), diff --git a/compiler/noirc_evaluator/src/ssa/opt/rc.rs b/compiler/noirc_evaluator/src/ssa/opt/rc.rs index c3606ac4311..841eead5ad1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/rc.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/rc.rs @@ -136,7 +136,7 @@ pub(crate) fn pop_rc_for( let typ = function.dfg.type_of_value(value); let rcs = inc_rcs.get_mut(&typ)?; - let position = rcs.iter().position(|inc_rc| inc_rc.array == value)?; + let position = rcs.iter().position(|inc_rc| inc_rc.array.unresolved_eq(&value))?; Some(rcs.remove(position)) } From 73664a603204e80577d59e3cce4207ef2f62127c Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Fri, 8 Nov 2024 20:42:49 +0000 Subject: [PATCH 02/19] More fixes --- .../brillig_gen/constant_allocation.rs | 11 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 37 +- .../src/ssa/function_builder/data_bus.rs | 21 +- .../src/ssa/function_builder/mod.rs | 13 +- .../src/ssa/ir/function_inserter.rs | 70 +-- .../noirc_evaluator/src/ssa/ir/instruction.rs | 505 +++++++++--------- .../src/ssa/ir/instruction/binary.rs | 6 +- .../src/ssa/ir/instruction/call.rs | 15 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 20 +- .../src/ssa/opt/as_slice_length.rs | 2 +- .../src/ssa/opt/assert_constant.rs | 4 +- .../src/ssa/opt/constant_folding.rs | 118 ++-- .../src/ssa/opt/defunctionalize.rs | 21 +- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 54 +- .../src/ssa/opt/flatten_cfg/value_merger.rs | 54 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 29 +- .../src/ssa/opt/mem2reg/alias_set.rs | 15 +- .../src/ssa/opt/mem2reg/block.rs | 28 +- .../src/ssa/opt/normalize_value_ids.rs | 10 +- .../src/ssa/opt/remove_enable_side_effects.rs | 9 +- .../src/ssa/opt/remove_if_else.rs | 24 +- .../src/ssa/opt/resolve_is_unconstrained.rs | 2 +- .../src/ssa/opt/runtime_separation.rs | 12 +- .../noirc_evaluator/src/ssa/opt/unrolling.rs | 4 +- .../src/ssa/ssa_gen/context.rs | 3 +- 25 files changed, 583 insertions(+), 504 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index f9ded224b33..5428cff8dc5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -11,7 +11,7 @@ use crate::ssa::ir::{ function::Function, instruction::InstructionId, post_order::PostOrder, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }; use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; @@ -23,7 +23,7 @@ pub(crate) enum InstructionLocation { } pub(crate) struct ConstantAllocation { - constant_usage: HashMap>>, + constant_usage: HashMap>>, allocation_points: HashMap>>, dominator_tree: DominatorTree, blocks_within_loops: HashSet, @@ -68,7 +68,7 @@ impl ConstantAllocation { |block_id: BasicBlockId, value_id: ValueId, location: InstructionLocation| { if is_constant_value(value_id, &func.dfg) { self.constant_usage - .entry(value_id) + .entry(value_id.raw()) .or_default() .entry(block_id) .or_default() @@ -101,8 +101,9 @@ impl ConstantAllocation { fn decide_allocation_points(&mut self, func: &Function) { for (constant_id, usage_in_blocks) in self.constant_usage.iter() { let block_ids: Vec<_> = usage_in_blocks.iter().map(|(block_id, _)| *block_id).collect(); + let constant_id = constant_id.into(); - let allocation_point = self.decide_allocation_point(*constant_id, &block_ids, func); + let allocation_point = self.decide_allocation_point(constant_id, &block_ids, func); // If the allocation point is one of the places where it's used, we take the first usage in the allocation point. // Otherwise, we allocate it at the terminator of the allocation point. @@ -121,7 +122,7 @@ impl ConstantAllocation { .or_default() .entry(location) .or_default() - .push(*constant_id); + .push(constant_id); } } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index ea44ffeb7fa..a42a6223581 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -11,6 +11,7 @@ use super::ir::dfg::CallStack; use super::ir::function::FunctionId; use super::ir::instruction::{ConstrainError, ErrorType}; use super::ir::printer::try_to_extract_string_from_error_payload; +use super::ir::value::RawValueId; use super::{ ir::{ dfg::DataFlowGraph, @@ -149,7 +150,7 @@ struct Context<'a> { /// AcirVar per SSA value. Before creating an `AcirVar` /// for an SSA value, we check this map. If an `AcirVar` /// already exists for this Value, we return the `AcirVar`. - ssa_values: HashMap, AcirValue>, + ssa_values: HashMap, /// The `AcirVar` that describes the condition belonging to the most recently invoked /// `SideEffectsEnabled` instruction. @@ -169,13 +170,13 @@ struct Context<'a> { /// Maps SSA values to BlockId /// A BlockId is an ACIR structure which identifies a memory block /// Each acir memory block corresponds to a different SSA array. - memory_blocks: HashMap, BlockId>, + memory_blocks: HashMap, /// Maps SSA values to a BlockId used internally /// A BlockId is an ACIR structure which identifies a memory block /// Each memory blocks corresponds to a different SSA value /// which utilizes this internal memory for ACIR generation. - internal_memory_blocks: HashMap, BlockId>, + internal_memory_blocks: HashMap, /// Maps an internal memory block to its length /// @@ -574,7 +575,7 @@ impl<'a> Context<'a> { "The dynamic array type is created in Acir gen and therefore cannot be a block parameter" ), } - self.ssa_values.insert(*param_id, value); + self.ssa_values.insert(param_id.raw(), value); } let end_witness = self.acir_context.current_witness_index().0; let witnesses = (start_witness..=end_witness).map(Witness::from).collect(); @@ -613,12 +614,12 @@ impl<'a> Context<'a> { /// Get the BlockId corresponding to the ValueId /// If there is no matching BlockId, we create a new one. fn block_id(&mut self, value: &ValueId) -> BlockId { - if let Some(block_id) = self.memory_blocks.get(value) { + if let Some(block_id) = self.memory_blocks.get(value.as_ref()) { return *block_id; } let block_id = BlockId(self.max_block_id); self.max_block_id += 1; - self.memory_blocks.insert(*value, block_id); + self.memory_blocks.insert(value.raw(), block_id); block_id } @@ -628,12 +629,12 @@ impl<'a> Context<'a> { /// only be computed dynamically, such as the type structure /// of non-homogenous arrays. fn internal_block_id(&mut self, value: &ValueId) -> BlockId { - if let Some(block_id) = self.internal_memory_blocks.get(value) { + if let Some(block_id) = self.internal_memory_blocks.get(value.as_ref()) { return *block_id; } let block_id = BlockId(self.max_block_id); self.max_block_id += 1; - self.internal_memory_blocks.insert(*value, block_id); + self.internal_memory_blocks.insert(value.raw(), block_id); block_id } @@ -784,7 +785,7 @@ impl<'a> Context<'a> { match instruction { Instruction::Call { func, arguments } => { - let function_value = &dfg[*func]; + let function_value = &dfg[func.raw()]; match function_value { Value::Function(id) => { let func = &ssa.functions[id]; @@ -925,7 +926,7 @@ impl<'a> Context<'a> { for (result_id, output) in result_ids.iter().zip(output_values) { if let AcirValue::Array(_) = &output { let array_id = dfg.resolve(*result_id); - let block_id = self.block_id(&array_id); + let block_id = self.block_id(&array_id.into()); let array_typ = dfg.type_of_value(array_id); let len = if matches!(array_typ, Type::Array(_, _)) { array_typ.flattened_size() @@ -937,7 +938,7 @@ impl<'a> Context<'a> { // Do nothing for AcirValue::DynamicArray and AcirValue::Var // A dynamic array returned from a function call should already be initialized // and a single variable does not require any extra initialization. - self.ssa_values.insert(*result_id, output); + self.ssa_values.insert(result_id.raw(), output); } Ok(()) } @@ -954,7 +955,7 @@ impl<'a> Context<'a> { if let Type::Slice(item_types) = typ { let len = match self .ssa_values - .get(&value_id) + .get(value_id.as_ref()) .expect("ICE: Unknown slice input to brillig") { AcirValue::DynamicArray(AcirDynamicArray { len, .. }) => *len, @@ -1044,10 +1045,7 @@ impl<'a> Context<'a> { } }; // Ensure that array id is fully resolved. - let array = dfg.resolve(array); - - let array_id = dfg.resolve(array); - let array_typ = dfg.type_of_value(array_id); + let array_typ = dfg.type_of_value(array); // Compiler sanity checks assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); let (Type::Array(_, _) | Type::Slice(_)) = &array_typ else { @@ -1102,15 +1100,14 @@ impl<'a> Context<'a> { index: ValueId, store_value: Option, ) -> Result { - let array_id = dfg.resolve(array); - let array_typ = dfg.type_of_value(array_id); + let array_typ = dfg.type_of_value(array); // Compiler sanity checks assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); let (Type::Array(_, _) | Type::Slice(_)) = &array_typ else { unreachable!("ICE: expected array or slice type"); }; - match self.convert_value(array_id, dfg) { + match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), @@ -2134,7 +2131,7 @@ impl<'a> Context<'a> { dfg: &DataFlowGraph, ) -> Result { let mut var = self.convert_numeric_value(value_id, dfg)?; - match &dfg[value_id] { + match &dfg[value_id.raw()] { Value::Instruction { instruction, .. } => { if matches!( &dfg[*instruction], diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 5a62e9c8e9a..0d702e0a7f7 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -1,6 +1,9 @@ use std::{collections::BTreeMap, sync::Arc}; -use crate::ssa::ir::{types::Type, value::ValueId}; +use crate::ssa::ir::{ + types::Type, + value::{RawValueId, ValueId}, +}; use acvm::FieldElement; use fxhash::FxHashMap as HashMap; use noirc_frontend::ast; @@ -21,7 +24,7 @@ pub(crate) enum DatabusVisibility { pub(crate) struct DataBusBuilder { pub(crate) values: im::Vector, index: usize, - pub(crate) map: HashMap, + pub(crate) map: HashMap, pub(crate) databus: Option, call_data_id: Option, } @@ -60,7 +63,7 @@ pub(crate) struct CallData { /// The id to this calldata assigned by the user pub(crate) call_data_id: u32, pub(crate) array_id: ValueId, - pub(crate) index_map: HashMap, + pub(crate) index_map: HashMap, } #[derive(Clone, Default, Debug, Serialize, Deserialize)] @@ -81,17 +84,17 @@ impl DataBus { call_data_map.insert(f(*k), *v); } CallData { - array_id: f(cd.array_id), + array_id: f(cd.array_id.raw()).into(), index_map: call_data_map, call_data_id: cd.call_data_id, } }) .collect(); - DataBus { call_data, return_data: self.return_data.map(&mut f) } + DataBus { call_data, return_data: self.return_data.map(|rd| f(rd.raw()).into()) } } pub(crate) fn call_data_array(&self) -> Vec<(u32, ValueId)> { - self.call_data.iter().map(|cd| (cd.call_data_id, cd.array_id)).collect() + self.call_data.iter().map(|cd| (cd.call_data_id, cd.array_id.into())).collect() } /// Construct a databus from call_data and return_data data bus builders pub(crate) fn get_data_bus( @@ -114,14 +117,14 @@ impl FunctionBuilder { /// Insert a value into a data bus builder fn add_to_data_bus(&mut self, value: ValueId, databus: &mut DataBusBuilder) { assert!(databus.databus.is_none(), "initializing finalized call data"); - let typ = self.current_function.dfg[value].get_type().clone(); + let typ = self.current_function.dfg[value.raw()].get_type().clone(); match typ { Type::Numeric(_) => { databus.values.push_back(value); databus.index += 1; } Type::Array(typ, len) => { - databus.map.insert(value, databus.index); + databus.map.insert(value.raw(), databus.index); let mut index = 0; for _i in 0..len { @@ -226,7 +229,7 @@ impl FunctionBuilder { ) -> Vec { let ssa_param_sizes: Vec<_> = ssa_params .iter() - .map(|ssa_param| self.current_function.dfg[*ssa_param].get_type().flattened_size()) + .map(|ssa_param| self.current_function.dfg[ssa_param.raw()].get_type().flattened_size()) .collect(); let mut is_ssa_params_databus = Vec::with_capacity(ssa_params.len()); diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index aebb47ccf8e..2bc95dd4923 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -415,7 +415,7 @@ impl FunctionBuilder { } pub(crate) fn get_intrinsic_from_value(&mut self, value: ValueId) -> Option { - match self.current_function.dfg[value] { + match self.current_function.dfg[value.raw()] { Value::Intrinsic(intrinsic) => Some(intrinsic), _ => None, } @@ -464,7 +464,7 @@ impl std::ops::Index for FunctionBuilder { type Output = Value; fn index(&self, id: ValueId) -> &Self::Output { - &self.current_function.dfg[id] + &self.current_function.dfg[id.raw()] } } @@ -489,6 +489,7 @@ mod tests { use std::sync::Arc; use acvm::{acir::AcirField, FieldElement}; + use iter_extended::vecmap; use crate::ssa::ir::{ instruction::{Endian, Intrinsic}, @@ -506,8 +507,8 @@ mod tests { // let bits: [u1; 8] = x.to_le_bits(); let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id); - let one = builder.numeric_constant(FieldElement::one(), Type::bool()); - let zero = builder.numeric_constant(FieldElement::zero(), Type::bool()); + let one = builder.numeric_constant(FieldElement::one(), Type::bool()).resolved(); + let zero = builder.numeric_constant(FieldElement::zero(), Type::bool()).resolved(); let to_bits_id = builder.import_intrinsic_id(Intrinsic::ToBits(Endian::Little)); let input = builder.numeric_constant(FieldElement::from(7_u128), Type::field()); @@ -516,8 +517,8 @@ mod tests { let call_results = builder.insert_call(to_bits_id, vec![input, length], result_types).into_owned(); - let slice = match &builder.current_function.dfg[call_results[0]] { - Value::Array { array, .. } => array, + let slice = match &builder.current_function.dfg[call_results[0].raw()] { + Value::Array { array, .. } => vecmap(array, |v| v.resolved()), _ => panic!(), }; assert_eq!(slice[0], one); diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index 991ff22c902..eb2a2da7f29 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -7,7 +7,7 @@ use super::{ dfg::{CallStack, InsertInstructionResult}, function::Function, instruction::{Instruction, InstructionId}, - value::ValueId, + value::{RawValueId, ResolvedValueId, ValueId}, }; use fxhash::FxHashMap as HashMap; @@ -17,11 +17,11 @@ use fxhash::FxHashMap as HashMap; pub(crate) struct FunctionInserter<'f> { pub(crate) function: &'f mut Function, - values: HashMap, + values: HashMap, /// Map containing repeat array constants so that we do not initialize a new /// array unnecessarily. An extra tuple field is included as part of the key to /// distinguish between array/slice types. - const_arrays: HashMap<(im::Vector, Type), ValueId>, + const_arrays: HashMap<(im::Vector, Type), ValueId>, } impl<'f> FunctionInserter<'f> { @@ -32,28 +32,26 @@ impl<'f> FunctionInserter<'f> { /// Resolves a ValueId to its new, updated value. /// If there is no updated value for this id, this returns the same /// ValueId that was passed in. - pub(crate) fn resolve(&mut self, mut value: ValueId) -> ValueId { - value = self.function.dfg.resolve(value); - match self.values.get(&value) { + pub(crate) fn resolve(&mut self, value: ValueId) -> ResolvedValueId { + let value = self.function.dfg.resolve(value); + match self.values.get(&value.raw()) { Some(value) => self.resolve(*value), - None => match &self.function.dfg[value] { + None => match &self.function.dfg[value.raw()] { super::value::Value::Array { array, typ } => { - let array = array.clone(); let typ = typ.clone(); - let new_array: im::Vector = - array.iter().map(|id| self.resolve(*id)).collect(); + let new_array: im::Vector = + array.into_iter().map(|id| self.resolve(*id)).collect(); + let array_and_typ = (new_array, typ); - if let Some(fetched_value) = - self.const_arrays.get(&(new_array.clone(), typ.clone())) - { - return *fetched_value; + if let Some(fetched_value) = self.const_arrays.get(&array_and_typ) { + return fetched_value.resolved(); }; - let new_array_clone = new_array.clone(); - let new_id = self.function.dfg.make_array(new_array, typ.clone()); - self.values.insert(value, new_id); - self.const_arrays.insert((new_array_clone, typ), new_id); - new_id + let new_array = array_and_typ.0.iter().map(|v| v.into()).collect(); + let new_id = self.function.dfg.make_array(new_array, array_and_typ.1.clone()); + self.values.insert(value.raw(), new_id); + self.const_arrays.insert(array_and_typ, new_id); + new_id.resolved() } _ => value, }, @@ -62,27 +60,29 @@ impl<'f> FunctionInserter<'f> { /// Insert a key, value pair if the key isn't already present in the map pub(crate) fn try_map_value(&mut self, key: ValueId, value: ValueId) { - if key == value { + if key.unresolved_eq(&value) { // This case is technically not needed since try_map_value isn't meant to change // existing entries, but we should never have a value in the map referring to itself anyway. - self.values.remove(&key); + self.values.remove(key.as_ref()); } else { - self.values.entry(key).or_insert(value); + self.values.entry(key.raw()).or_insert(value); } } /// Insert a key, value pair in the map pub(crate) fn map_value(&mut self, key: ValueId, value: ValueId) { - if key == value { - self.values.remove(&key); + if key.unresolved_eq(&value) { + self.values.remove(key.as_ref()); } else { - self.values.insert(key, value); + self.values.insert(key.raw(), value); } } + /// Maps an instruction, replacing any ValueId in the instruction with the + /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_instruction(&mut self, id: InstructionId) -> (Instruction, CallStack) { ( - self.function.dfg[id].clone().map_values(|id| self.resolve(id)), + self.function.dfg[id].clone().map_values(|id| self.resolve(id).into()), self.function.dfg.get_call_stack(id), ) } @@ -91,7 +91,7 @@ impl<'f> FunctionInserter<'f> { /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_terminator_in_place(&mut self, block: BasicBlockId) { let mut terminator = self.function.dfg[block].take_terminator(); - terminator.mutate_values(|value| self.resolve(value)); + terminator.mutate_values(|value| self.resolve(value).into()); self.function.dfg[block].set_terminator(terminator); } @@ -99,7 +99,7 @@ impl<'f> FunctionInserter<'f> { /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_data_bus_in_place(&mut self) { let data_bus = self.function.dfg.data_bus.clone(); - let data_bus = data_bus.map_values(|value| self.resolve(value)); + let data_bus = data_bus.map_values(|value| self.resolve(value).into()); self.function.dfg.data_bus = data_bus; } @@ -146,24 +146,24 @@ impl<'f> FunctionInserter<'f> { /// Modify the values HashMap to remember the mapping between an instruction result's previous /// ValueId (from the source_function) and its new ValueId in the destination function. pub(crate) fn insert_new_instruction_results( - values: &mut HashMap, - old_results: &[ValueId], + values: &mut HashMap, + old_results: &[ResolvedValueId], new_results: &InsertInstructionResult, ) { assert_eq!(old_results.len(), new_results.len()); match new_results { InsertInstructionResult::SimplifiedTo(new_result) => { - values.insert(old_results[0], *new_result); + values.insert(old_results[0].raw(), *new_result); } InsertInstructionResult::SimplifiedToMultiple(new_results) => { for (old_result, new_result) in old_results.iter().zip(new_results) { - values.insert(*old_result, *new_result); + values.insert(old_result.raw(), *new_result); } } InsertInstructionResult::Results(_, new_results) => { for (old_result, new_result) in old_results.iter().zip(*new_results) { - values.insert(*old_result, *new_result); + values.insert(old_result.raw(), *new_result); } } InsertInstructionResult::InstructionRemoved => (), @@ -174,7 +174,7 @@ impl<'f> FunctionInserter<'f> { let old_parameters = self.function.dfg.block_parameters(block); for (param, new_param) in old_parameters.iter().zip(new_values) { - self.values.entry(*param).or_insert(*new_param); + self.values.entry(param.raw()).or_insert(*new_param); } } @@ -188,7 +188,7 @@ impl<'f> FunctionInserter<'f> { for (param, new_param) in old_parameters.iter().zip(new_parameters) { // Don't overwrite any existing entries to avoid overwriting the induction variable - self.values.entry(*param).or_insert(*new_param); + self.values.entry(param.raw()).or_insert(*new_param); } } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index d4e53ab9192..a884aa6eef4 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -284,244 +284,6 @@ pub(crate) enum Instruction { } impl Instruction { - /// Returns a binary instruction with the given operator, lhs, and rhs - pub(crate) fn binary(operator: BinaryOp, lhs: ValueId, rhs: ValueId) -> Instruction { - Instruction::Binary(Binary { lhs, operator, rhs }) - } - - /// Returns the type that this instruction will return. - pub(crate) fn result_type(&self) -> InstructionResultType { - match self { - Instruction::Binary(binary) => binary.result_type(), - Instruction::Cast(_, typ) => InstructionResultType::Known(typ.clone()), - Instruction::Not(value) - | Instruction::Truncate { value, .. } - | Instruction::ArraySet { array: value, .. } - | Instruction::IfElse { then_value: value, .. } => { - InstructionResultType::Operand(*value) - } - Instruction::Constrain(..) - | Instruction::Store { .. } - | Instruction::IncrementRc { .. } - | Instruction::DecrementRc { .. } - | Instruction::RangeCheck { .. } - | Instruction::EnableSideEffectsIf { .. } => InstructionResultType::None, - Instruction::Allocate { .. } - | Instruction::Load { .. } - | Instruction::ArrayGet { .. } - | Instruction::Call { .. } => InstructionResultType::Unknown, - } - } - - /// True if this instruction requires specifying the control type variables when - /// inserting this instruction into a DataFlowGraph. - pub(crate) fn requires_ctrl_typevars(&self) -> bool { - matches!(self.result_type(), InstructionResultType::Unknown) - } - - /// Indicates if the instruction can be safely replaced with the results of another instruction with the same inputs. - /// If `deduplicate_with_predicate` is set, we assume we're deduplicating with the instruction - /// and its predicate, rather than just the instruction. Setting this means instructions that - /// rely on predicates can be deduplicated as well. - pub(crate) fn can_be_deduplicated( - &self, - dfg: &DataFlowGraph, - deduplicate_with_predicate: bool, - ) -> bool { - use Instruction::*; - - match self { - // These either have side-effects or interact with memory - EnableSideEffectsIf { .. } - | Allocate - | Load { .. } - | Store { .. } - | IncrementRc { .. } - | DecrementRc { .. } => false, - - Call { func, .. } => match dfg[func.raw()] { - Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), - _ => false, - }, - - // We can deduplicate these instructions if we know the predicate is also the same. - Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, - - // These can have different behavior depending on the EnableSideEffectsIf context. - // Replacing them with a similar instruction potentially enables replacing an instruction - // with one that was disabled. See - // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. - Binary(_) - | Cast(_, _) - | Not(_) - | Truncate { .. } - | IfElse { .. } - | ArrayGet { .. } - | ArraySet { .. } => { - deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) - } - } - } - - pub(crate) fn can_eliminate_if_unused(&self, dfg: &DataFlowGraph) -> bool { - use Instruction::*; - match self { - Binary(binary) => { - if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { - if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { - rhs != FieldElement::zero() - } else { - false - } - } else { - true - } - } - Cast(_, _) - | Not(_) - | Truncate { .. } - | Allocate - | Load { .. } - | ArrayGet { .. } - | IfElse { .. } - | ArraySet { .. } => true, - - Constrain(..) - | Store { .. } - | EnableSideEffectsIf { .. } - | IncrementRc { .. } - | DecrementRc { .. } - | RangeCheck { .. } => false, - - // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. - Call { func, .. } => match dfg[func.raw()] { - // Explicitly allows removal of unused ec operations, even if they can fail - Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) - | Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::EmbeddedCurveAdd)) => true, - Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), - - // All foreign functions are treated as having side effects. - // This is because they can be used to pass information - // from the ACVM to the external world during execution. - Value::ForeignFunction(_) => false, - - // We must assume that functions contain a side effect as we cannot inspect more deeply. - Value::Function(_) => false, - - _ => false, - }, - } - } - - /// If true the instruction will depends on enable_side_effects context during acir-gen - pub(crate) fn requires_acir_gen_predicate(&self, dfg: &DataFlowGraph) -> bool { - match self { - Instruction::Binary(binary) - if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) => - { - true - } - - Instruction::ArrayGet { array, index } => { - // `ArrayGet`s which read from "known good" indices from an array should not need a predicate. - !dfg.is_safe_index(*index, *array) - } - - Instruction::EnableSideEffectsIf { .. } | Instruction::ArraySet { .. } => true, - - Instruction::Call { func, .. } => match dfg[func.raw()] { - Value::Function(_) => true, - Value::Intrinsic(intrinsic) => { - matches!(intrinsic, Intrinsic::SliceInsert | Intrinsic::SliceRemove) - } - _ => false, - }, - Instruction::Cast(_, _) - | Instruction::Binary(_) - | Instruction::Not(_) - | Instruction::Truncate { .. } - | Instruction::Constrain(_, _, _) - | Instruction::RangeCheck { .. } - | Instruction::Allocate - | Instruction::Load { .. } - | Instruction::Store { .. } - | Instruction::IfElse { .. } - | Instruction::IncrementRc { .. } - | Instruction::DecrementRc { .. } => false, - } - } - - /// Maps each ValueId inside this instruction to a new ValueId, returning the new instruction. - /// Note that the returned instruction is fresh and will not have an assigned InstructionId - /// until it is manually inserted in a DataFlowGraph later. - pub(crate) fn map_values(&self, mut f: impl FnMut(ValueId) -> ValueId) -> Instruction { - match self { - Instruction::Binary(binary) => Instruction::Binary(Binary { - lhs: f(binary.lhs), - rhs: f(binary.rhs), - operator: binary.operator, - }), - Instruction::Cast(value, typ) => Instruction::Cast(f(*value), typ.clone()), - Instruction::Not(value) => Instruction::Not(f(*value)), - Instruction::Truncate { value, bit_size, max_bit_size } => Instruction::Truncate { - value: f(*value), - bit_size: *bit_size, - max_bit_size: *max_bit_size, - }, - Instruction::Constrain(lhs, rhs, assert_message) => { - // Must map the `lhs` and `rhs` first as the value `f` is moved with the closure - let lhs = f(*lhs); - let rhs = f(*rhs); - let assert_message = assert_message.as_ref().map(|error| match error { - ConstrainError::Dynamic(selector, payload_values) => ConstrainError::Dynamic( - *selector, - payload_values.iter().map(|&value| f(value)).collect(), - ), - _ => error.clone(), - }); - Instruction::Constrain(lhs, rhs, assert_message) - } - Instruction::Call { func, arguments } => Instruction::Call { - func: f(*func), - arguments: vecmap(arguments.iter().copied(), f), - }, - Instruction::Allocate => Instruction::Allocate, - Instruction::Load { address } => Instruction::Load { address: f(*address) }, - Instruction::Store { address, value } => { - Instruction::Store { address: f(*address), value: f(*value) } - } - Instruction::EnableSideEffectsIf { condition } => { - Instruction::EnableSideEffectsIf { condition: f(*condition) } - } - Instruction::ArrayGet { array, index } => { - Instruction::ArrayGet { array: f(*array), index: f(*index) } - } - Instruction::ArraySet { array, index, value, mutable } => Instruction::ArraySet { - array: f(*array), - index: f(*index), - value: f(*value), - mutable: *mutable, - }, - Instruction::IncrementRc { value } => Instruction::IncrementRc { value: f(*value) }, - Instruction::DecrementRc { value } => Instruction::DecrementRc { value: f(*value) }, - Instruction::RangeCheck { value, max_bit_size, assert_message } => { - Instruction::RangeCheck { - value: f(*value), - max_bit_size: *max_bit_size, - assert_message: assert_message.clone(), - } - } - Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { - Instruction::IfElse { - then_condition: f(*then_condition), - then_value: f(*then_value), - else_condition: f(*else_condition), - else_value: f(*else_value), - } - } - } - } - /// Applies a function to each input value this instruction holds. pub(crate) fn for_each_value(&self, mut f: impl FnMut(ValueId) -> T) { match self { @@ -762,6 +524,255 @@ impl Instruction { } } } + + /// Pretend the value IDs have been resolved. + #[cfg(test)] + pub(crate) fn resolved(&self) -> Instruction { + self.map_values(|v| v.resolved()) + } +} + +impl Instruction { + /// Returns a binary instruction with the given operator, lhs, and rhs + pub(crate) fn binary(operator: BinaryOp, lhs: ValueId, rhs: ValueId) -> Instruction { + Instruction::Binary(Binary { lhs, operator, rhs }) + } + + /// Returns the type that this instruction will return. + pub(crate) fn result_type(&self) -> InstructionResultType { + match self { + Instruction::Binary(binary) => binary.result_type(), + Instruction::Cast(_, typ) => InstructionResultType::Known(typ.clone()), + Instruction::Not(value) + | Instruction::Truncate { value, .. } + | Instruction::ArraySet { array: value, .. } + | Instruction::IfElse { then_value: value, .. } => { + InstructionResultType::Operand(*value) + } + Instruction::Constrain(..) + | Instruction::Store { .. } + | Instruction::IncrementRc { .. } + | Instruction::DecrementRc { .. } + | Instruction::RangeCheck { .. } + | Instruction::EnableSideEffectsIf { .. } => InstructionResultType::None, + Instruction::Allocate { .. } + | Instruction::Load { .. } + | Instruction::ArrayGet { .. } + | Instruction::Call { .. } => InstructionResultType::Unknown, + } + } + + /// True if this instruction requires specifying the control type variables when + /// inserting this instruction into a DataFlowGraph. + pub(crate) fn requires_ctrl_typevars(&self) -> bool { + matches!(self.result_type(), InstructionResultType::Unknown) + } + + /// Maps each ValueId inside this instruction to a new ValueId, returning the new instruction. + /// Note that the returned instruction is fresh and will not have an assigned InstructionId + /// until it is manually inserted in a DataFlowGraph later. + pub(crate) fn map_values( + &self, + mut f: impl FnMut(ValueId) -> ValueId, + ) -> Instruction { + match self { + Instruction::Binary(binary) => Instruction::Binary(Binary { + lhs: f(binary.lhs), + rhs: f(binary.rhs), + operator: binary.operator, + }), + Instruction::Cast(value, typ) => Instruction::Cast(f(*value), typ.clone()), + Instruction::Not(value) => Instruction::Not(f(*value)), + Instruction::Truncate { value, bit_size, max_bit_size } => Instruction::Truncate { + value: f(*value), + bit_size: *bit_size, + max_bit_size: *max_bit_size, + }, + Instruction::Constrain(lhs, rhs, assert_message) => { + // Must map the `lhs` and `rhs` first as the value `f` is moved with the closure + let lhs = f(*lhs); + let rhs = f(*rhs); + let assert_message = assert_message.as_ref().map(|error| match error { + ConstrainError::Dynamic(selector, payload_values) => ConstrainError::Dynamic( + *selector, + payload_values.iter().map(|&value| f(value)).collect(), + ), + ConstrainError::StaticString(s) => ConstrainError::StaticString(s.clone()), + }); + Instruction::Constrain(lhs, rhs, assert_message) + } + Instruction::Call { func, arguments } => Instruction::Call { + func: f(*func), + arguments: vecmap(arguments.iter().copied(), f), + }, + Instruction::Allocate => Instruction::Allocate, + Instruction::Load { address } => Instruction::Load { address: f(*address) }, + Instruction::Store { address, value } => { + Instruction::Store { address: f(*address), value: f(*value) } + } + Instruction::EnableSideEffectsIf { condition } => { + Instruction::EnableSideEffectsIf { condition: f(*condition) } + } + Instruction::ArrayGet { array, index } => { + Instruction::ArrayGet { array: f(*array), index: f(*index) } + } + Instruction::ArraySet { array, index, value, mutable } => Instruction::ArraySet { + array: f(*array), + index: f(*index), + value: f(*value), + mutable: *mutable, + }, + Instruction::IncrementRc { value } => Instruction::IncrementRc { value: f(*value) }, + Instruction::DecrementRc { value } => Instruction::DecrementRc { value: f(*value) }, + Instruction::RangeCheck { value, max_bit_size, assert_message } => { + Instruction::RangeCheck { + value: f(*value), + max_bit_size: *max_bit_size, + assert_message: assert_message.clone(), + } + } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + Instruction::IfElse { + then_condition: f(*then_condition), + then_value: f(*then_value), + else_condition: f(*else_condition), + else_value: f(*else_value), + } + } + } + } + + /// If true the instruction will depends on enable_side_effects context during acir-gen + pub(crate) fn requires_acir_gen_predicate(&self, dfg: &DataFlowGraph) -> bool { + match self { + Instruction::Binary(binary) + if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) => + { + true + } + + Instruction::ArrayGet { array, index } => { + // `ArrayGet`s which read from "known good" indices from an array should not need a predicate. + !dfg.is_safe_index(index.into(), array.into()) + } + + Instruction::EnableSideEffectsIf { .. } | Instruction::ArraySet { .. } => true, + + Instruction::Call { func, .. } => match dfg[func.raw()] { + Value::Function(_) => true, + Value::Intrinsic(intrinsic) => { + matches!(intrinsic, Intrinsic::SliceInsert | Intrinsic::SliceRemove) + } + _ => false, + }, + Instruction::Cast(_, _) + | Instruction::Binary(_) + | Instruction::Not(_) + | Instruction::Truncate { .. } + | Instruction::Constrain(_, _, _) + | Instruction::RangeCheck { .. } + | Instruction::Allocate + | Instruction::Load { .. } + | Instruction::Store { .. } + | Instruction::IfElse { .. } + | Instruction::IncrementRc { .. } + | Instruction::DecrementRc { .. } => false, + } + } + + /// Indicates if the instruction can be safely replaced with the results of another instruction with the same inputs. + /// If `deduplicate_with_predicate` is set, we assume we're deduplicating with the instruction + /// and its predicate, rather than just the instruction. Setting this means instructions that + /// rely on predicates can be deduplicated as well. + pub(crate) fn can_be_deduplicated( + &self, + dfg: &DataFlowGraph, + deduplicate_with_predicate: bool, + ) -> bool { + use Instruction::*; + + match self { + // These either have side-effects or interact with memory + EnableSideEffectsIf { .. } + | Allocate + | Load { .. } + | Store { .. } + | IncrementRc { .. } + | DecrementRc { .. } => false, + + Call { func, .. } => match dfg[func.raw()] { + Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), + _ => false, + }, + + // We can deduplicate these instructions if we know the predicate is also the same. + Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, + + // These can have different behavior depending on the EnableSideEffectsIf context. + // Replacing them with a similar instruction potentially enables replacing an instruction + // with one that was disabled. See + // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. + Binary(_) + | Cast(_, _) + | Not(_) + | Truncate { .. } + | IfElse { .. } + | ArrayGet { .. } + | ArraySet { .. } => { + deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) + } + } + } + + pub(crate) fn can_eliminate_if_unused(&self, dfg: &DataFlowGraph) -> bool { + use Instruction::*; + match self { + Binary(binary) => { + if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { + if let Some(rhs) = dfg.get_numeric_constant(binary.rhs.unresolved()) { + rhs != FieldElement::zero() + } else { + false + } + } else { + true + } + } + Cast(_, _) + | Not(_) + | Truncate { .. } + | Allocate + | Load { .. } + | ArrayGet { .. } + | IfElse { .. } + | ArraySet { .. } => true, + + Constrain(..) + | Store { .. } + | EnableSideEffectsIf { .. } + | IncrementRc { .. } + | DecrementRc { .. } + | RangeCheck { .. } => false, + + // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. + Call { func, .. } => match dfg[func.raw()] { + // Explicitly allows removal of unused ec operations, even if they can fail + Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) + | Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::EmbeddedCurveAdd)) => true, + Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), + + // All foreign functions are treated as having side effects. + // This is because they can be used to pass information + // from the ACVM to the external world during execution. + Value::ForeignFunction(_) => false, + + // We must assume that functions contain a side effect as we cannot inspect more deeply. + Value::Function(_) => false, + + _ => false, + }, + } + } } /// Given a chain of operations like: @@ -951,9 +962,9 @@ impl From for Box { } /// The possible return values for Instruction::return_types -pub(crate) enum InstructionResultType { +pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand - Operand(ValueId), + Operand(ValueId), /// The result type of this instruction is known to be this type - independent of its operands. Known(Type), @@ -1096,21 +1107,21 @@ impl TerminatorInstruction { /// Contains the result to Instruction::simplify, specifying how the instruction /// should be simplified. -pub(crate) enum SimplifyResult { +pub(crate) enum SimplifyResult { /// Replace this function's result with the given value - SimplifiedTo(ValueId), + SimplifiedTo(ValueId), /// Replace this function's results with the given values /// Used for when there are multiple return values from /// a function such as a tuple - SimplifiedToMultiple(Vec), + SimplifiedToMultiple(Vec>), /// Replace this function with an simpler but equivalent instruction. - SimplifiedToInstruction(Instruction), + SimplifiedToInstruction(Instruction), /// Replace this function with a set of simpler but equivalent instructions. /// This is currently only to be used for [`Instruction::Constrain`]. - SimplifiedToInstructionMultiple(Vec), + SimplifiedToInstructionMultiple(Vec>), /// Remove the instruction, it is unnecessary Remove, @@ -1119,8 +1130,8 @@ pub(crate) enum SimplifyResult { None, } -impl SimplifyResult { - pub(crate) fn instructions(self) -> Option> { +impl SimplifyResult { + pub(crate) fn instructions(self) -> Option>> { match self { SimplifyResult::SimplifiedToInstruction(instruction) => Some(vec![instruction]), SimplifyResult::SimplifiedToInstructionMultiple(instructions) => Some(instructions), diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 3ca827a5f29..9ab02f85c11 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -76,15 +76,17 @@ pub(crate) struct Binary { pub(crate) operator: BinaryOp, } -impl Binary { +impl Binary { /// The type of this Binary instruction's result - pub(crate) fn result_type(&self) -> InstructionResultType { + pub(crate) fn result_type(&self) -> InstructionResultType { match self.operator { BinaryOp::Eq | BinaryOp::Lt => InstructionResultType::Known(Type::bool()), _ => InstructionResultType::Operand(self.lhs), } } +} +impl Binary { /// Try to simplify this binary instruction, returning the new value if possible. pub(super) fn simplify(&self, dfg: &mut DataFlowGraph) -> SimplifyResult { let lhs = dfg.get_numeric_constant(self.lhs); diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 9dbd2c56993..6421243febe 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -14,9 +14,8 @@ use crate::ssa::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, instruction::Intrinsic, - map::Id, types::Type, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }, opt::flatten_cfg::value_merger::ValueMerger, }; @@ -40,7 +39,7 @@ pub(super) fn simplify_call( ctrl_typevars: Option>, call_stack: &CallStack, ) -> SimplifyResult { - let intrinsic = match &dfg[func] { + let intrinsic = match &dfg[func.raw()] { Value::Intrinsic(intrinsic) => *intrinsic, _ => return SimplifyResult::None, }; @@ -432,9 +431,9 @@ fn simplify_slice_push_back( .insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack.clone()) .first(); - let mut slice_sizes = HashMap::default(); - slice_sizes.insert(set_last_slice_value, slice_size / element_size); - slice_sizes.insert(new_slice, slice_size / element_size); + let mut slice_sizes: HashMap = HashMap::default(); + slice_sizes.insert(set_last_slice_value.raw(), slice_size / element_size); + slice_sizes.insert(new_slice.raw(), slice_size / element_size); let unknown = &mut HashMap::default(); let mut value_merger = @@ -636,7 +635,7 @@ fn constant_to_radix( } } -fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector>) -> Vec { +fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector) -> Vec { values .iter() .map(|id| { @@ -648,7 +647,7 @@ fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector>) -> Vec { .collect() } -fn array_is_constant(dfg: &DataFlowGraph, values: &im::Vector>) -> bool { +fn array_is_constant(dfg: &DataFlowGraph, values: &im::Vector) -> bool { values.iter().all(|value| dfg.get_numeric_constant(*value).is_some()) } diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 3433c776916..95184266799 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -15,7 +15,7 @@ use super::{ #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct Unresolved; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] pub(crate) struct Resolved; /// A resolved value ID is something we can directly compare. @@ -44,6 +44,11 @@ impl ValueId { pub fn raw(&self) -> RawValueId { Id::new(self.id.to_usize()) } + + /// Demote an ID into an unresolved one. + pub fn unresolved(self) -> ValueId { + ValueId::new(Id::new(self.id.to_usize())) + } } impl ValueId { @@ -94,10 +99,17 @@ impl Into> for ValueId { } } +/// Demote any ID into an unresolved one. +impl Into> for &ValueId { + fn into(self) -> ValueId { + ValueId::new(self.raw()) + } +} + /// Wrap an `Id` into an equivalent `ValueId`` -impl Into> for Id> { - fn into(self) -> ValueId { - ValueId::new(self) +impl From>> for ValueId { + fn from(value: Id>) -> Self { + ValueId::new(value) } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs index 59917e8589b..e93792cef57 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -43,7 +43,7 @@ fn known_slice_lengths(func: &Function) -> HashMap { _ => continue, }; - match &func.dfg[*target_func] { + match &func.dfg[target_func.raw()] { Value::Intrinsic(Intrinsic::AsSlice) => { let array_typ = func.dfg.type_of_value(arguments[0]); if let Type::Array(_, length) = array_typ { diff --git a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs index 348c78683a0..af0b9759574 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs @@ -68,9 +68,9 @@ fn check_instruction( let static_assert_id = function.dfg.import_intrinsic(Intrinsic::StaticAssert); match &function.dfg[instruction] { Instruction::Call { func, arguments } => { - if *func == assert_constant_id { + if assert_constant_id.unresolved_eq(func) { evaluate_assert_constant(function, instruction, arguments) - } else if *func == static_assert_id { + } else if static_assert_id.unresolved_eq(func) { evaluate_static_assert(function, instruction, arguments) } else { Ok(true) diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 3b86ded4a87..0d1e69034c2 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -31,7 +31,7 @@ use crate::ssa::{ function::Function, instruction::{Instruction, InstructionId}, types::Type, - value::{Value, ValueId}, + value::{RawValueId, Resolved, ResolvedValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -91,7 +91,8 @@ struct Context { /// HashMap from (Instruction, side_effects_enabled_var) to the results of the instruction. /// Stored as a two-level map to avoid cloning Instructions during the `.get` call. -type InstructionResultCache = HashMap, Vec>>; +type InstructionResultCache = + HashMap, HashMap, Vec>>; impl Context { fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) { @@ -107,8 +108,10 @@ impl Context { // We partition the maps of constrained values according to the side-effects flag at the point // at which the values are constrained. This prevents constraints which are only sometimes enforced // being used to modify the rest of the program. - let mut constraint_simplification_mappings: HashMap> = - HashMap::default(); + let mut constraint_simplification_mappings: HashMap< + RawValueId, + HashMap, + > = HashMap::default(); let mut side_effects_enabled_var = function.dfg.make_constant(FieldElement::one(), Type::bool()); @@ -131,11 +134,11 @@ impl Context { block: BasicBlockId, id: InstructionId, instruction_result_cache: &mut InstructionResultCache, - constraint_simplification_mappings: &mut HashMap>, + constraint_simplification_mappings: &mut HashMap>, side_effects_enabled_var: &mut ValueId, ) { let constraint_simplification_mapping = - constraint_simplification_mappings.entry(*side_effects_enabled_var).or_default(); + constraint_simplification_mappings.entry(side_effects_enabled_var.raw()).or_default(); let instruction = Self::resolve_instruction(id, dfg, constraint_simplification_mapping); let old_results = dfg.instruction_results(id).to_vec(); @@ -148,7 +151,13 @@ impl Context { } // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. - let new_results = Self::push_instruction(id, instruction.clone(), &old_results, block, dfg); + let new_results = Self::push_instruction( + id, + instruction.map_values(|v| v.unresolved()), + &old_results, + block, + dfg, + ); Self::replace_result_ids(dfg, &old_results, &new_results); @@ -164,7 +173,7 @@ impl Context { // If we just inserted an `Instruction::EnableSideEffectsIf`, we need to update `side_effects_enabled_var` // so that we use the correct set of constrained values in future. if let Instruction::EnableSideEffectsIf { condition } = instruction { - *side_effects_enabled_var = condition; + *side_effects_enabled_var = condition.into(); }; } @@ -172,8 +181,8 @@ impl Context { fn resolve_instruction( instruction_id: InstructionId, dfg: &DataFlowGraph, - constraint_simplification_mapping: &HashMap, - ) -> Instruction { + constraint_simplification_mapping: &HashMap, + ) -> Instruction { let instruction = dfg[instruction_id].clone(); // Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value @@ -183,11 +192,11 @@ impl Context { // constraints to the cache. fn resolve_cache( dfg: &DataFlowGraph, - cache: &HashMap, + cache: &HashMap, value_id: ValueId, - ) -> ValueId { + ) -> ResolvedValueId { let resolved_id = dfg.resolve(value_id); - match cache.get(&resolved_id) { + match cache.get(&resolved_id.raw()) { Some(cached_value) => resolve_cache(dfg, cache, *cached_value), None => resolved_id, } @@ -230,11 +239,11 @@ impl Context { fn cache_instruction( &self, - instruction: Instruction, + instruction: Instruction, instruction_results: Vec, dfg: &DataFlowGraph, instruction_result_cache: &mut InstructionResultCache, - constraint_simplification_mapping: &mut HashMap, + constraint_simplification_mapping: &mut HashMap, side_effects_enabled_var: ValueId, ) { if self.use_constraint_info { @@ -242,24 +251,24 @@ impl Context { // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - match (&dfg[lhs], &dfg[rhs]) { + match (&dfg[lhs.raw()], &dfg[rhs.raw()]) { // Ignore trivial constraints (Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (), // Prefer replacing with constants where possible. (Value::NumericConstant { .. }, _) => { - constraint_simplification_mapping.insert(rhs, lhs); + constraint_simplification_mapping.insert(rhs.raw(), lhs.into()); } (_, Value::NumericConstant { .. }) => { - constraint_simplification_mapping.insert(lhs, rhs); + constraint_simplification_mapping.insert(lhs.raw(), rhs.into()); } // Otherwise prefer block parameters over instruction results. // This is as block parameters are more likely to be a single witness rather than a full expression. (Value::Param { .. }, Value::Instruction { .. }) => { - constraint_simplification_mapping.insert(rhs, lhs); + constraint_simplification_mapping.insert(rhs.raw(), lhs.into()); } (Value::Instruction { .. }, Value::Param { .. }) => { - constraint_simplification_mapping.insert(lhs, rhs); + constraint_simplification_mapping.insert(lhs.raw(), rhs.into()); } (_, _) => (), } @@ -276,7 +285,7 @@ impl Context { instruction_result_cache .entry(instruction) .or_default() - .insert(predicate, instruction_results); + .insert(predicate.map(|v| v.raw()), instruction_results); } } @@ -294,7 +303,7 @@ impl Context { fn get_cached<'a>( dfg: &DataFlowGraph, instruction_result_cache: &'a mut InstructionResultCache, - instruction: &Instruction, + instruction: &Instruction, side_effects_enabled_var: ValueId, ) -> Option<&'a Vec> { let results_for_instruction = instruction_result_cache.get(instruction); @@ -304,8 +313,10 @@ impl Context { return Some(results); } - let predicate = - instruction.requires_acir_gen_predicate(dfg).then_some(side_effects_enabled_var); + let predicate = instruction + .requires_acir_gen_predicate(dfg) + .then_some(side_effects_enabled_var) + .map(|v| v.raw()); results_for_instruction.and_then(|map| map.get(&predicate)) } @@ -321,7 +332,7 @@ mod test { instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }, }; use acvm::{acir::AcirField, FieldElement}; @@ -428,11 +439,11 @@ mod test { let instructions = main.dfg[main.entry_block()].instructions(); assert_eq!(instructions.len(), 1); - let instruction = &main.dfg[instructions[0]]; assert_eq!( - instruction, - &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + main.dfg[instructions[0]].resolved(), + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + .resolved() ); } @@ -484,12 +495,18 @@ mod test { assert_eq!(instructions.len(), 2); assert_eq!( - &main.dfg[instructions[0]], - &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + main.dfg[instructions[0]].resolved(), + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + .resolved() ); assert_eq!( - &main.dfg[instructions[1]], - &Instruction::Truncate { value: ValueId::test_new(6), bit_size: 8, max_bit_size: 16 } + &main.dfg[instructions[1]].resolved(), + &Instruction::Truncate { + value: ValueId::from(RawValueId::test_new(6)), + bit_size: 8, + max_bit_size: 16 + } + .resolved() ); } @@ -522,13 +539,13 @@ mod test { assert_eq!(entry_block.instructions().len(), 1); let new_add_instr = entry_block.instructions().first().unwrap(); let new_add_instr_result = main.dfg.instruction_results(*new_add_instr)[0]; - assert_ne!(new_add_instr_result, v1); + assert_ne!(new_add_instr_result.resolved(), v1.resolved()); let return_value_id = match entry_block.unwrap_terminator() { TerminatorInstruction::Return { return_values, .. } => return_values[0], _ => unreachable!("Should have terminator instruction"), }; - let return_element = match &main.dfg[return_value_id] { + let return_element = match &main.dfg[return_value_id.raw()] { Value::Array { array, .. } => array[0], _ => unreachable!("Return type should be array"), }; @@ -578,7 +595,7 @@ mod test { assert_eq!(instructions.len(), 1); let instruction = &main.dfg[instructions[0]]; - assert_eq!(instruction, &Instruction::Cast(v0, Type::unsigned(32))); + assert_eq!(instruction.resolved(), Instruction::Cast(v0, Type::unsigned(32)).resolved()); } #[test] @@ -698,17 +715,26 @@ mod test { assert_eq!(instructions.len(), 6); assert_eq!( - main.dfg[instructions[0]], - Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Mul, rhs: v1 }) + main.dfg[instructions[0]].resolved(), + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Mul, rhs: v1 }).resolved() + ); + assert_eq!(main.dfg[instructions[1]].resolved(), Instruction::Not(v2.resolved())); + assert_eq!( + main.dfg[instructions[2]].resolved(), + Instruction::Binary(Binary { lhs: v3, operator: BinaryOp::Mul, rhs: v4 }).resolved() ); - assert_eq!(main.dfg[instructions[1]], Instruction::Not(v2)); assert_eq!( - main.dfg[instructions[2]], - Instruction::Binary(Binary { lhs: v3, operator: BinaryOp::Mul, rhs: v4 }) + main.dfg[instructions[3]].resolved(), + Instruction::Constrain(v0, v_true, None).resolved() + ); + assert_eq!( + main.dfg[instructions[4]].resolved(), + Instruction::Constrain(v1, v_true, None).resolved() + ); + assert_eq!( + main.dfg[instructions[5]].resolved(), + Instruction::Constrain(v2, v_false, None).resolved() ); - assert_eq!(main.dfg[instructions[3]], Instruction::Constrain(v0, v_true, None)); - assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); - assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); } // Regression for #4600 @@ -871,7 +897,11 @@ mod test { let array1 = builder.array_constant(array_contents.clone().into(), typ.clone()); let array2 = builder.array_constant(array_contents.into(), typ.clone()); - assert_eq!(array1, array2, "arrays were assigned different value ids"); + assert_eq!( + array1.resolved(), + array2.resolved(), + "arrays were assigned different value ids" + ); let keccakf1600 = builder.import_intrinsic("keccakf1600").expect("keccakf1600 intrinsic should exist"); diff --git a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index cfeb8751f25..60f7c3edc1a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -16,7 +16,7 @@ use crate::ssa::{ function::{Function, FunctionId, Signature}, instruction::{BinaryOp, Instruction}, types::{NumericType, Type}, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -76,7 +76,7 @@ impl DefunctionalizationContext { /// Defunctionalize a single function fn defunctionalize(&mut self, func: &mut Function) { - let mut call_target_values = HashSet::new(); + let mut call_target_values: HashSet = HashSet::new(); for block_id in func.reachable_blocks() { let block = &func.dfg[block_id]; @@ -93,7 +93,7 @@ impl DefunctionalizationContext { _ => continue, }; - match func.dfg[target_func_id] { + match func.dfg[target_func_id.raw()] { // If the target is a function used as value Value::Param { .. } | Value::Instruction { .. } => { let mut arguments = arguments.clone(); @@ -112,12 +112,12 @@ impl DefunctionalizationContext { arguments.insert(0, target_func_id); } let func = apply_function_value_id; - call_target_values.insert(func); + call_target_values.insert(func.raw()); replacement_instruction = Some(Instruction::Call { func, arguments }); } Value::Function(..) => { - call_target_values.insert(target_func_id); + call_target_values.insert(target_func_id.raw()); } _ => {} } @@ -130,11 +130,11 @@ impl DefunctionalizationContext { // Change the type of all the values that are not call targets to NativeField let value_ids = vecmap(func.dfg.values_iter(), |(id, _)| id); for value_id in value_ids { - if let Type::Function = &func.dfg[value_id].get_type() { - match &func.dfg[value_id] { + if let Type::Function = &func.dfg[value_id.raw()].get_type() { + match &func.dfg[value_id.raw()] { // If the value is a static function, transform it to the function id Value::Function(id) => { - if !call_target_values.contains(&value_id) { + if !call_target_values.contains(value_id.as_ref()) { let new_value = func.dfg.make_constant(function_id_to_field(*id), Type::field()); func.dfg.set_value_from_id(value_id, new_value); @@ -193,7 +193,7 @@ fn find_functions_as_values(func: &Function) -> BTreeSet { let mut functions_as_values: BTreeSet = BTreeSet::new(); let mut process_value = |value_id: ValueId| { - if let Value::Function(id) = func.dfg[value_id] { + if let Value::Function(id) = func.dfg[value_id.raw()] { functions_as_values.insert(id); } }; @@ -229,7 +229,8 @@ fn find_dynamic_dispatches(func: &Function) -> BTreeSet { let instruction = &func.dfg[*instruction_id]; match instruction { Instruction::Call { func: target, arguments } => { - if let Value::Param { .. } | Value::Instruction { .. } = &func.dfg[*target] { + if let Value::Param { .. } | Value::Instruction { .. } = &func.dfg[target.raw()] + { let results = func.dfg.instruction_results(*instruction_id); dispatches.insert(Signature { params: vecmap(arguments, |param| func.dfg.type_of_value(*param)), diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index ef208588718..6805ae2757f 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -2,7 +2,7 @@ use crate::ssa::ir::{ dfg::DataFlowGraph, instruction::{Instruction, Intrinsic}, types::Type, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }; use acvm::{acir::AcirField, FieldElement}; @@ -21,42 +21,43 @@ impl<'a> SliceCapacityTracker<'a> { pub(crate) fn collect_slice_information( &self, instruction: &Instruction, - slice_sizes: &mut HashMap, + slice_sizes: &mut HashMap, results: &[ValueId], ) { match instruction { Instruction::ArrayGet { array, .. } => { let array_typ = self.dfg.type_of_value(*array); - let array_value = &self.dfg[*array]; + let array_value = &self.dfg[array.raw()]; if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_capacity(*array, slice_sizes); + self.compute_slice_capacity(array.raw(), slice_sizes); } } Instruction::ArraySet { array, value, .. } => { let array_typ = self.dfg.type_of_value(*array); - let array_value = &self.dfg[*array]; + let array = array.raw(); + let array_value = &self.dfg[array]; if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_capacity(*array, slice_sizes); + self.compute_slice_capacity(array, slice_sizes); } let value_typ = self.dfg.type_of_value(*value); // Compiler sanity check assert!(!value_typ.contains_slice_element(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); - if let Some(capacity) = slice_sizes.get(array) { - slice_sizes.insert(results[0], *capacity); + if let Some(capacity) = slice_sizes.get(&array) { + slice_sizes.insert(results[0].raw(), *capacity); } } Instruction::Call { func, arguments } => { - let func = &self.dfg[*func]; + let func = &self.dfg[func.raw()]; if let Value::Intrinsic(intrinsic) = func { let (argument_index, result_index) = match intrinsic { Intrinsic::SlicePushBack @@ -84,13 +85,15 @@ impl<'a> SliceCapacityTracker<'a> { for arg in &arguments[(argument_index + 1)..] { let element_typ = self.dfg.type_of_value(*arg); if element_typ.contains_slice_element() { - self.compute_slice_capacity(*arg, slice_sizes); + self.compute_slice_capacity(arg.raw(), slice_sizes); } } - if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + if let Some(contents_capacity) = + slice_sizes.get(slice_contents.as_ref()) + { let new_capacity = *contents_capacity + 1; - slice_sizes.insert(result_slice, new_capacity); + slice_sizes.insert(result_slice.raw(), new_capacity); } } Intrinsic::SlicePopBack @@ -98,30 +101,33 @@ impl<'a> SliceCapacityTracker<'a> { | Intrinsic::SlicePopFront => { let slice_contents = arguments[argument_index]; - if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + if let Some(contents_capacity) = + slice_sizes.get(slice_contents.as_ref()) + { // We use a saturating sub here as calling `pop_front` or `pop_back` // on a zero-length slice would otherwise underflow. let new_capacity = contents_capacity.saturating_sub(1); - slice_sizes.insert(result_slice, new_capacity); + slice_sizes.insert(result_slice.raw(), new_capacity); } } Intrinsic::ToBits(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); - slice_sizes.insert(result_slice, FieldElement::max_num_bits() as usize); + slice_sizes + .insert(result_slice.raw(), FieldElement::max_num_bits() as usize); } Intrinsic::ToRadix(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); slice_sizes - .insert(result_slice, FieldElement::max_num_bytes() as usize); + .insert(result_slice.raw(), FieldElement::max_num_bytes() as usize); } Intrinsic::AsSlice => { let array_size = self .dfg .try_get_array_length(arguments[argument_index]) .expect("ICE: Should be have an array length for AsSlice input"); - slice_sizes.insert(result_slice, array_size); + slice_sizes.insert(result_slice.raw(), array_size); } _ => {} } @@ -130,13 +136,13 @@ impl<'a> SliceCapacityTracker<'a> { Instruction::Store { address, value } => { let value_typ = self.dfg.type_of_value(*value); if value_typ.contains_slice_element() { - self.compute_slice_capacity(*value, slice_sizes); + self.compute_slice_capacity(value.raw(), slice_sizes); - let value_capacity = slice_sizes.get(value).unwrap_or_else(|| { + let value_capacity = slice_sizes.get(value.as_ref()).unwrap_or_else(|| { panic!("ICE: should have slice capacity set for value {value} being stored at {address}") }); - slice_sizes.insert(*address, *value_capacity); + slice_sizes.insert(address.raw(), *value_capacity); } } Instruction::Load { address } => { @@ -144,11 +150,11 @@ impl<'a> SliceCapacityTracker<'a> { if load_typ.contains_slice_element() { let result = results[0]; - let address_capacity = slice_sizes.get(address).unwrap_or_else(|| { + let address_capacity = slice_sizes.get(address.as_ref()).unwrap_or_else(|| { panic!("ICE: should have slice capacity set at address {address} being loaded into {result}") }); - slice_sizes.insert(result, *address_capacity); + slice_sizes.insert(result.raw(), *address_capacity); } } _ => {} @@ -158,8 +164,8 @@ impl<'a> SliceCapacityTracker<'a> { /// Computes the starting capacity of a slice which is still a `Value::Array` pub(crate) fn compute_slice_capacity( &self, - array_id: ValueId, - slice_sizes: &mut HashMap, + array_id: RawValueId, + slice_sizes: &mut HashMap, ) { if let Value::Array { array, typ } = &self.dfg[array_id] { // Compiler sanity check diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 75ee57dd4fa..34c9ad58b1c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -6,7 +6,7 @@ use crate::ssa::ir::{ dfg::{CallStack, DataFlowGraph, InsertInstructionResult}, instruction::{BinaryOp, Instruction}, types::Type, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }; pub(crate) struct ValueMerger<'a> { @@ -17,9 +17,9 @@ pub(crate) struct ValueMerger<'a> { // Maps SSA array values with a slice type to their size. // This must be computed before merging values. - slice_sizes: &'a mut HashMap, + slice_sizes: &'a mut HashMap, - array_set_conditionals: &'a mut HashMap, + array_set_conditionals: &'a mut HashMap, call_stack: CallStack, } @@ -28,8 +28,8 @@ impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - slice_sizes: &'a mut HashMap, - array_set_conditionals: &'a mut HashMap, + slice_sizes: &'a mut HashMap, + array_set_conditionals: &'a mut HashMap, current_condition: Option, call_stack: CallStack, ) -> Self { @@ -62,9 +62,12 @@ impl<'a> ValueMerger<'a> { let else_value = self.dfg.resolve(else_value); if then_value == else_value { - return then_value; + return then_value.into(); } + let then_value = then_value.into(); + let else_value = else_value.into(); + match self.dfg.type_of_value(then_value) { Type::Numeric(_) => Self::merge_numeric_values( self.dfg, @@ -102,7 +105,7 @@ impl<'a> ValueMerger<'a> { "Expected values merged to be of the same type but found {then_type} and {else_type}" ); - if then_value == else_value { + if then_value.unresolved_eq(&else_value) { return then_value; } @@ -220,14 +223,14 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected slice type"), }; - let then_len = self.slice_sizes.get(&then_value_id).copied().unwrap_or_else(|| { + let then_len = self.slice_sizes.get(&then_value_id.raw()).copied().unwrap_or_else(|| { let (slice, typ) = self.dfg.get_array_constant(then_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); }); slice.len() / typ.element_types().len() }); - let else_len = self.slice_sizes.get(&else_value_id).copied().unwrap_or_else(|| { + let else_len = self.slice_sizes.get(&else_value_id.raw()).copied().unwrap_or_else(|| { let (slice, typ) = self.dfg.get_array_constant(else_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); }); @@ -336,26 +339,28 @@ impl<'a> ValueMerger<'a> { // ancestor if it exists, alone with the path to it from each starting node. // This path will be the indices that were changed to create each result array. for _ in 0..max_iters { - if current_then == else_value { + if current_then.unresolved_eq(&else_value) { seen_else.clear(); found = true; break; } - if current_else == then_value { + if current_else.unresolved_eq(&then_value) { seen_then.clear(); found = true; break; } - if let Some(index) = seen_then.iter().position(|(elem, _, _, _)| *elem == current_else) + if let Some(index) = + seen_then.iter().position(|(elem, _, _, _)| current_else.unresolved_eq(elem)) { seen_else.truncate(index); found = true; break; } - if let Some(index) = seen_else.iter().position(|(elem, _, _, _)| *elem == current_then) + if let Some(index) = + seen_else.iter().position(|(elem, _, _, _)| current_then.unresolved_eq(elem)) { seen_then.truncate(index); found = true; @@ -366,10 +371,14 @@ impl<'a> ValueMerger<'a> { current_else = self.find_previous_array_set(current_else, &mut seen_else); } - let changed_indices: FxHashSet<_> = seen_then + let changed_indices: FxHashSet<(RawValueId, Type, RawValueId)> = seen_then .into_iter() - .map(|(_, index, typ, condition)| (index, typ, condition)) - .chain(seen_else.into_iter().map(|(_, index, typ, condition)| (index, typ, condition))) + .map(|(_, index, typ, condition)| (index.raw(), typ, condition.raw())) + .chain( + seen_else + .into_iter() + .map(|(_, index, typ, condition)| (index.raw(), typ, condition.raw())), + ) .collect(); if !found || changed_indices.len() >= array_length { @@ -381,11 +390,11 @@ impl<'a> ValueMerger<'a> { for (index, element_type, condition) in changed_indices { let typevars = Some(vec![element_type.clone()]); - let instruction = Instruction::EnableSideEffectsIf { condition }; + let instruction = Instruction::EnableSideEffectsIf { condition: condition.into() }; self.insert_instruction(instruction); let mut get_element = |array, typevars| { - let get = Instruction::ArrayGet { array, index }; + let get = Instruction::ArrayGet { array, index: index.into() }; self.dfg .insert_instruction_and_results( get, @@ -402,7 +411,8 @@ impl<'a> ValueMerger<'a> { let value = self.merge_values(then_condition, else_condition, then_element, else_element); - array = self.insert_array_set(array, index, value, Some(condition)).first(); + array = + self.insert_array_set(array, index.into(), value, Some(condition.into())).first(); } let instruction = Instruction::EnableSideEffectsIf { condition: current_condition }; @@ -444,7 +454,7 @@ impl<'a> ValueMerger<'a> { }; let result_value = result[result_index]; - self.array_set_conditionals.insert(result_value, condition); + self.array_set_conditionals.insert(result_value.raw(), condition); } result @@ -455,11 +465,11 @@ impl<'a> ValueMerger<'a> { result: ValueId, changed_indices: &mut Vec<(ValueId, ValueId, Type, ValueId)>, ) -> ValueId { - match &self.dfg[result] { + match &self.dfg[result.raw()] { Value::Instruction { instruction, .. } => match &self.dfg[*instruction] { Instruction::ArraySet { array, index, value, .. } => { let condition = - *self.array_set_conditionals.get(&result).unwrap_or_else(|| { + *self.array_set_conditionals.get(&result.raw()).unwrap_or_else(|| { panic!( "Expected to have conditional for array set {result}\n{:?}", self.array_set_conditionals diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 2eb0f2eda0f..db7d8b16a80 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -14,7 +14,7 @@ use crate::ssa::{ dfg::{CallStack, InsertInstructionResult}, function::{Function, FunctionId, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -115,7 +115,7 @@ struct PerFunctionContext<'function> { /// Maps ValueIds in the function being inlined to the new ValueIds to use in the function /// being inlined into. This mapping also contains the mapping from parameter values to /// argument values. - values: HashMap, + values: HashMap, /// Maps blocks in the source function to blocks in the function being inlined into, where /// each mapping is from the start of a source block to an inlined block in which the @@ -139,7 +139,7 @@ fn called_functions_vec(func: &Function) -> Vec { continue; }; - if let Value::Function(function_id) = func.dfg[*called_value_id] { + if let Value::Function(function_id) = func.dfg[called_value_id.raw()] { called_function_ids.push(function_id); } } @@ -387,7 +387,7 @@ impl InlineContext { for parameter in original_parameters { let typ = context.source_function.dfg.type_of_value(*parameter); let new_parameter = context.context.builder.add_block_parameter(entry_block, typ); - context.values.insert(*parameter, new_parameter); + context.values.insert(parameter.raw(), new_parameter); } context.blocks.insert(context.source_function.entry_block(), entry_block); @@ -425,7 +425,8 @@ impl InlineContext { let parameters = source_function.parameters(); assert_eq!(parameters.len(), arguments.len()); - context.values = parameters.iter().copied().zip(arguments.iter().copied()).collect(); + context.values = + parameters.iter().map(|p| p.raw()).zip(arguments.iter().copied()).collect(); let current_block = context.context.builder.current_block(); context.blocks.insert(source_function.entry_block(), current_block); @@ -457,11 +458,11 @@ impl<'function> PerFunctionContext<'function> { /// and blocks respectively. If these assertions trigger it means a value is being used before /// the instruction or block that defines the value is inserted. fn translate_value(&mut self, id: ValueId) -> ValueId { - if let Some(value) = self.values.get(&id) { + if let Some(value) = self.values.get(&id.raw()) { return *value; } - let new_value = match &self.source_function.dfg[id] { + let new_value = match &self.source_function.dfg[id.raw()] { value @ Value::Instruction { .. } => { unreachable!("All Value::Instructions should already be known during inlining after creating the original inlined instruction. Unknown value {id} = {value:?}") } @@ -482,7 +483,7 @@ impl<'function> PerFunctionContext<'function> { } }; - self.values.insert(id, new_value); + self.values.insert(id.raw(), new_value); new_value } @@ -512,7 +513,7 @@ impl<'function> PerFunctionContext<'function> { for parameter in original_parameters { let typ = self.source_function.dfg.type_of_value(*parameter); let new_parameter = self.context.builder.add_block_parameter(new_block, typ); - self.values.insert(*parameter, new_parameter); + self.values.insert(parameter.raw(), new_parameter); } self.blocks.insert(source_block, new_block); @@ -685,7 +686,7 @@ impl<'function> PerFunctionContext<'function> { call_stack.append(self.source_function.dfg.get_call_stack(id)); let results = self.source_function.dfg.instruction_results(id); - let results = vecmap(results, |id| self.source_function.dfg.resolve(*id)); + let results = vecmap(results, |id| self.source_function.dfg.resolve(*id).into()); let ctrl_typevars = instruction .requires_ctrl_typevars() @@ -700,7 +701,7 @@ impl<'function> PerFunctionContext<'function> { /// Modify the values HashMap to remember the mapping between an instruction result's previous /// ValueId (from the source_function) and its new ValueId in the destination function. fn insert_new_instruction_results( - values: &mut HashMap, + values: &mut HashMap, old_results: &[ValueId], new_results: InsertInstructionResult, ) { @@ -708,16 +709,16 @@ impl<'function> PerFunctionContext<'function> { match new_results { InsertInstructionResult::SimplifiedTo(new_result) => { - values.insert(old_results[0], new_result); + values.insert(old_results[0].raw(), new_result); } InsertInstructionResult::SimplifiedToMultiple(new_results) => { for (old_result, new_result) in old_results.iter().zip(new_results) { - values.insert(*old_result, new_result); + values.insert(old_result.raw(), new_result); } } InsertInstructionResult::Results(_, new_results) => { for (old_result, new_result) in old_results.iter().zip(new_results) { - values.insert(*old_result, *new_result); + values.insert(old_result.raw(), *new_result); } } InsertInstructionResult::InstructionRemoved => (), diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs index 5477025e429..23dbf5f743d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs @@ -1,6 +1,6 @@ use std::collections::BTreeSet; -use crate::ssa::ir::value::ValueId; +use crate::ssa::ir::value::{RawValueId, ValueId}; /// A set of possible aliases. Each ValueId in this set represents one possible value the reference /// holding this AliasSet may be aliased to. This struct wrapper is provided so that when we take @@ -10,7 +10,7 @@ use crate::ssa::ir::value::ValueId; /// "unknown which aliases this may refer to" - `None`. #[derive(Debug, Default, Clone)] pub(super) struct AliasSet { - aliases: Option>, + aliases: Option>, } impl AliasSet { @@ -20,7 +20,7 @@ impl AliasSet { pub(super) fn known(value: ValueId) -> AliasSet { let mut aliases = BTreeSet::new(); - aliases.insert(value); + aliases.insert(value.raw()); Self { aliases: Some(aliases) } } @@ -41,6 +41,7 @@ impl AliasSet { self.aliases .as_ref() .and_then(|aliases| (aliases.len() == 1).then(|| *aliases.first().unwrap())) + .map(|a| a.into()) } /// Unify this alias set with another. The result of this set is empty if either set is empty. @@ -56,20 +57,20 @@ impl AliasSet { /// Inserts a new alias into this set if it is not unknown pub(super) fn insert(&mut self, new_alias: ValueId) { if let Some(aliases) = &mut self.aliases { - aliases.insert(new_alias); + aliases.insert(new_alias.raw()); } } /// Returns `Some(true)` if `f` returns true for any known alias in this set. /// If this alias set is unknown, None is returned. - pub(super) fn any(&self, f: impl FnMut(ValueId) -> bool) -> Option { - self.aliases.as_ref().map(|aliases| aliases.iter().copied().any(f)) + pub(super) fn any(&self, mut f: impl FnMut(ValueId) -> bool) -> Option { + self.aliases.as_ref().map(|aliases| aliases.iter().copied().any(|v| f(v.into()))) } pub(super) fn for_each(&self, mut f: impl FnMut(ValueId)) { if let Some(aliases) = &self.aliases { for alias in aliases { - f(*alias); + f((*alias).into()); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 532785d2928..f15d2c737df 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, - value::ValueId, + value::{Resolved, ResolvedValueId, ValueId}, }; use super::alias_set::AliasSet; @@ -19,38 +19,38 @@ pub(super) struct Block { /// Maps a ValueId to the Expression it represents. /// Multiple ValueIds can map to the same Expression, e.g. /// dereferences to the same allocation. - pub(super) expressions: im::OrdMap, + pub(super) expressions: im::OrdMap, /// Each expression is tracked as to how many aliases it /// may have. If there is only 1, we can attempt to optimize /// out any known loads to that alias. Note that "alias" here /// includes the original reference as well. - pub(super) aliases: im::OrdMap, + pub(super) aliases: im::OrdMap, AliasSet>, /// Each allocate instruction result (and some reference block parameters) /// will map to a Reference value which tracks whether the last value stored /// to the reference is known. - pub(super) references: im::OrdMap, + pub(super) references: im::OrdMap, /// The last instance of a `Store` instruction to each address in this block - pub(super) last_stores: im::OrdMap, + pub(super) last_stores: im::OrdMap, } /// An `Expression` here is used to represent a canonical key /// into the aliases map since otherwise two dereferences of the /// same address will be given different ValueIds. #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub(super) enum Expression { - Dereference(Box), - ArrayElement(Box), - Other(ValueId), +pub(super) enum Expression { + Dereference(Box>), + ArrayElement(Box>), + Other(ValueId), } /// Every reference's value is either Known and can be optimized away, or Unknown. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub(super) enum ReferenceValue { Unknown, - Known(ValueId), + Known(ResolvedValueId), } impl ReferenceValue { @@ -65,7 +65,7 @@ impl ReferenceValue { impl Block { /// If the given reference id points to a known value, return the value - pub(super) fn get_known_value(&self, address: ValueId) -> Option { + pub(super) fn get_known_value(&self, address: ResolvedValueId) -> Option { if let Some(expression) = self.expressions.get(&address) { if let Some(aliases) = self.aliases.get(expression) { // We could allow multiple aliases if we check that the reference @@ -81,15 +81,15 @@ impl Block { } /// If the given address is known, set its value to `ReferenceValue::Known(value)`. - pub(super) fn set_known_value(&mut self, address: ValueId, value: ValueId) { + pub(super) fn set_known_value(&mut self, address: ResolvedValueId, value: ResolvedValueId) { self.set_value(address, ReferenceValue::Known(value)); } - pub(super) fn set_unknown(&mut self, address: ValueId) { + pub(super) fn set_unknown(&mut self, address: ResolvedValueId) { self.set_value(address, ReferenceValue::Unknown); } - fn set_value(&mut self, address: ValueId, value: ReferenceValue) { + fn set_value(&mut self, address: ResolvedValueId, value: ReferenceValue) { let expression = self.expressions.entry(address).or_insert(Expression::Other(address)); let aliases = self.aliases.entry(expression.clone()).or_default(); diff --git a/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs index 6914bf87c5d..ae50f5902c2 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -6,7 +6,7 @@ use crate::ssa::{ function::{Function, FunctionId}, map::SparseMap, post_order::PostOrder, - value::{Value, ValueId}, + value::{RawValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -52,7 +52,7 @@ struct IdMaps { // Maps old value id -> new value id // Cleared in between each function. - values: HashMap, + values: HashMap, } impl Context { @@ -104,7 +104,7 @@ impl Context { for (old_result, new_result) in old_results.iter().zip(new_results.results().iter()) { let old_result = old_function.dfg.resolve(*old_result); - self.new_ids.values.insert(old_result, *new_result); + self.new_ids.values.insert(old_result.raw(), *new_result); } } @@ -145,7 +145,7 @@ impl IdMaps { let old_parameter = old_function.dfg.resolve(old_parameter); let typ = old_function.dfg.type_of_value(old_parameter); let new_parameter = new_function.dfg.add_block_parameter(new_id, typ); - self.values.insert(old_parameter, new_parameter); + self.values.insert(old_parameter.raw(), new_parameter); } } } @@ -156,7 +156,7 @@ impl IdMaps { old_function: &Function, old_value: ValueId, ) -> ValueId { - let old_value = old_function.dfg.resolve(old_value); + let old_value = old_function.dfg.resolve(old_value).raw(); match &old_function.dfg[old_value] { value @ Value::Instruction { instruction, .. } => { *self.values.get(&old_value).unwrap_or_else(|| { diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 012f6e6b27d..3d79788b401 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -82,7 +82,7 @@ impl Context { // continue with the new `Instruction::EnableSideEffectsIf`. if let Instruction::EnableSideEffectsIf { condition } = instruction { // If this instruction isn't changing the currently active condition then we can ignore it. - if active_condition == *condition { + if active_condition.unresolved_eq(condition) { continue; } @@ -155,7 +155,7 @@ impl Context { | Load { .. } => true, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match dfg[func.raw()] { Value::Intrinsic(intrinsic) => match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront @@ -270,8 +270,11 @@ mod test { let instructions = main.dfg[main.entry_block()].instructions(); assert_eq!(instructions.len(), 4); + let expected = Instruction::binary(BinaryOp::Mul, v0.resolved(), two.resolved()); for instruction in instructions.iter().take(4) { - assert_eq!(&main.dfg[*instruction], &Instruction::binary(BinaryOp::Mul, v0, two)); + let instruction = main.dfg[*instruction]; + let instruction = instruction.map_values(|v| v.resolved()); + assert_eq!(instruction, expected); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index c387e0b6234..0f964d8414c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -4,7 +4,7 @@ use acvm::{acir::AcirField, FieldElement}; use fxhash::FxHashMap as HashMap; use crate::ssa::ir::function::RuntimeType; -use crate::ssa::ir::value::ValueId; +use crate::ssa::ir::value::{RawValueId, ValueId}; use crate::ssa::{ ir::{ dfg::DataFlowGraph, @@ -48,14 +48,14 @@ impl Function { #[derive(Default)] struct Context { - slice_sizes: HashMap, + slice_sizes: HashMap, // Maps array_set result -> element that was overwritten by that instruction. // Used to undo array_sets while merging values prev_array_set_elem_values: HashMap, // Maps array_set result -> enable_side_effects_if value which was active during it. - array_set_conditionals: HashMap, + array_set_conditionals: HashMap, } impl Context { @@ -102,26 +102,26 @@ impl Context { // }; function.dfg.set_value_from_id(result, value); - self.array_set_conditionals.insert(result, current_conditional); + self.array_set_conditionals.insert(result.raw(), current_conditional); } Instruction::Call { func, arguments } => { - if let Value::Intrinsic(intrinsic) = function.dfg[*func] { + if let Value::Intrinsic(intrinsic) = function.dfg[func.raw()] { let results = function.dfg.instruction_results(instruction); match slice_capacity_change(&function.dfg, intrinsic, arguments, results) { SizeChange::None => (), SizeChange::SetTo(value, new_capacity) => { - self.slice_sizes.insert(value, new_capacity); + self.slice_sizes.insert(value.raw(), new_capacity); } SizeChange::Inc { old, new } => { let old_capacity = self.get_or_find_capacity(&function.dfg, old); - self.slice_sizes.insert(new, old_capacity + 1); + self.slice_sizes.insert(new.raw(), old_capacity + 1); } SizeChange::Dec { old, new } => { let old_capacity = self.get_or_find_capacity(&function.dfg, old); // We use a saturating sub here as calling `pop_front` or `pop_back` on a zero-length slice // would otherwise underflow. - self.slice_sizes.insert(new, old_capacity.saturating_sub(1)); + self.slice_sizes.insert(new.raw(), old_capacity.saturating_sub(1)); } } } @@ -131,10 +131,10 @@ impl Context { let results = function.dfg.instruction_results(instruction); let result = if results.len() == 2 { results[1] } else { results[0] }; - self.array_set_conditionals.insert(result, current_conditional); + self.array_set_conditionals.insert(result.raw(), current_conditional); let old_capacity = self.get_or_find_capacity(&function.dfg, *array); - self.slice_sizes.insert(result, old_capacity); + self.slice_sizes.insert(result.raw(), old_capacity); function.dfg[block].instructions_mut().push(instruction); } Instruction::EnableSideEffectsIf { condition } => { @@ -149,7 +149,7 @@ impl Context { } fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> usize { - match self.slice_sizes.entry(value) { + match self.slice_sizes.entry(value.raw()) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { if let Some((array, typ)) = dfg.get_array_constant(value) { @@ -163,7 +163,7 @@ impl Context { } } - let dbg_value = &dfg[value]; + let dbg_value = &dfg[value.raw()]; unreachable!("No size for slice {value} = {dbg_value:?}") } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs index 3d40c88d704..ced5a428595 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs @@ -34,7 +34,7 @@ impl Function { _ => continue, }; - if let Value::Intrinsic(Intrinsic::IsUnconstrained) = &self.dfg[target_func] { + if let Value::Intrinsic(Intrinsic::IsUnconstrained) = &self.dfg[target_func.raw()] { is_unconstrained_calls.insert(instruction_id); } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs b/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs index 5628e12b9ae..d00216830ef 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs @@ -6,7 +6,7 @@ use crate::ssa::{ ir::{ function::{Function, FunctionId, RuntimeType}, instruction::Instruction, - value::{Value, ValueId}, + value::{RawValueId, Value}, }, ssa_gen::Ssa, }; @@ -117,7 +117,7 @@ impl RuntimeSeparatorContext { }; if let Some(mapped_func_id) = self.mapped_functions.get(called_func_id) { let mapped_value_id = func.dfg.import_function(*mapped_func_id); - func.dfg.set_value_from_id(*called_func_value_id, mapped_value_id); + func.dfg.set_value_from_id(called_func_value_id.into(), mapped_value_id); } } } @@ -126,16 +126,16 @@ impl RuntimeSeparatorContext { } // We only consider direct calls to functions since functions as values should have been resolved -fn called_functions_values(func: &Function) -> BTreeSet { +fn called_functions_values(func: &Function) -> BTreeSet { let mut called_function_ids = BTreeSet::default(); for block_id in func.reachable_blocks() { for instruction_id in func.dfg[block_id].instructions() { let Instruction::Call { func: called_value_id, .. } = &func.dfg[*instruction_id] else { continue; }; - - if let Value::Function(_) = func.dfg[*called_value_id] { - called_function_ids.insert(*called_value_id); + let called_value_id = called_value_id.raw(); + if let Value::Function(_) = func.dfg[called_value_id] { + called_function_ids.insert(called_value_id); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 661109c1786..d23118b1af9 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -431,7 +431,7 @@ impl<'f> LoopIteration<'f> { ) -> Vec { let condition = self.inserter.resolve(condition); - match self.dfg().get_numeric_constant(condition) { + match self.dfg().get_numeric_constant(condition.into()) { Some(constant) => { let destination = if constant.is_zero() { else_destination } else { then_destination }; @@ -490,7 +490,7 @@ impl<'f> LoopIteration<'f> { let mut terminator = self.dfg()[self.source_block] .unwrap_terminator() .clone() - .map_values(|value| self.inserter.resolve(value)); + .map_values(|value| self.inserter.resolve(value).into()); terminator.mutate_blocks(|block| self.get_or_insert_block(block)); self.inserter.function.dfg.set_block_terminator(self.insert_block, terminator); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 0c6041029da..20d636f970f 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -930,7 +930,8 @@ impl<'a> FunctionContext<'a> { let mut dropped_parameters = self.builder.current_function.dfg.block_parameters(scope).to_vec(); - dropped_parameters.retain(|parameter| !terminator_args.contains(parameter)); + dropped_parameters + .retain(|parameter| !terminator_args.iter().any(|arg| arg.unresolved_eq(parameter))); for parameter in dropped_parameters { self.builder.decrement_array_reference_count(parameter); From ebed1dea0a4ed58fcb6fb4d3fa7d95b9b4de66a9 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Fri, 8 Nov 2024 21:36:33 +0000 Subject: [PATCH 03/19] Index by ValueId --- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 23 +++++----- .../check_for_underconstrained_values.rs | 44 ++++++++++--------- .../src/ssa/function_builder/data_bus.rs | 10 ++--- .../src/ssa/function_builder/mod.rs | 6 +-- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 8 ++-- .../src/ssa/ir/function_inserter.rs | 2 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 12 ++--- .../src/ssa/ir/instruction/call.rs | 2 +- .../noirc_evaluator/src/ssa/ir/printer.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 5 +++ .../src/ssa/opt/as_slice_length.rs | 2 +- .../src/ssa/opt/constant_folding.rs | 4 +- .../src/ssa/opt/defunctionalize.rs | 11 +++-- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 21 +++++---- .../src/ssa/opt/flatten_cfg/value_merger.rs | 2 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 4 +- .../src/ssa/opt/normalize_value_ids.rs | 10 ++--- .../src/ssa/opt/remove_enable_side_effects.rs | 2 +- .../src/ssa/opt/remove_if_else.rs | 4 +- .../src/ssa/opt/resolve_is_unconstrained.rs | 2 +- .../src/ssa/opt/runtime_separation.rs | 14 +++--- 21 files changed, 101 insertions(+), 89 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a42a6223581..3a4b9981339 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -19,7 +19,6 @@ use super::{ instruction::{ Binary, BinaryOp, Instruction, InstructionId, Intrinsic, TerminatorInstruction, }, - map::Id, types::{NumericType, Type}, value::{Value, ValueId}, }, @@ -785,7 +784,7 @@ impl<'a> Context<'a> { match instruction { Instruction::Call { func, arguments } => { - let function_value = &dfg[func.raw()]; + let function_value = &dfg[*func]; match function_value { Value::Function(id) => { let func = &ssa.functions[id]; @@ -1346,13 +1345,17 @@ impl<'a> Context<'a> { let results = dfg.instruction_results(instruction); let res_typ = dfg.type_of_value(results[0]); // Get operations to call-data parameters are replaced by a get to the call-data-bus array - let call_data = - self.data_bus.call_data.iter().find(|cd| cd.index_map.contains_key(&array)).cloned(); + let call_data = self + .data_bus + .call_data + .iter() + .find(|cd| cd.index_map.contains_key(array.as_ref())) + .cloned(); if let Some(call_data) = call_data { let call_data_block = self.ensure_array_is_initialized(call_data.array_id, dfg)?; let bus_index = self .acir_context - .add_constant(FieldElement::from(call_data.index_map[&array] as i128)); + .add_constant(FieldElement::from(call_data.index_map[array.as_ref()] as i128)); let mut current_index = self.acir_context.add_var(bus_index, var_index)?; let result = self.get_from_call_data(&mut current_index, call_data_block, &res_typ)?; self.define_result(dfg, instruction, result.clone()); @@ -1476,7 +1479,7 @@ impl<'a> Context<'a> { .expect("Array set does not have one result"); let result_block_id; if mutate_array { - self.memory_blocks.insert(*result_id, block_id); + self.memory_blocks.insert(result_id.raw(), block_id); result_block_id = block_id; } else { // Initialize the new array with the values from the old array @@ -1813,7 +1816,7 @@ impl<'a> Context<'a> { result: AcirValue, ) { let result_ids = dfg.instruction_results(instruction); - self.ssa_values.insert(result_ids[0], result); + self.ssa_values.insert(result_ids[0].raw(), result); } /// Remember the result of instruction returning a single numeric value @@ -1897,7 +1900,7 @@ impl<'a> Context<'a> { fn convert_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> AcirValue { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; - if let Some(acir_value) = self.ssa_values.get(&value_id) { + if let Some(acir_value) = self.ssa_values.get(&value_id.raw()) { return acir_value.clone(); } @@ -1924,7 +1927,7 @@ impl<'a> Context<'a> { unreachable!("ICE: Should have been in cache {value_id} {value:?}") } }; - self.ssa_values.insert(value_id, acir_value.clone()); + self.ssa_values.insert(value_id.raw(), acir_value.clone()); acir_value } @@ -2131,7 +2134,7 @@ impl<'a> Context<'a> { dfg: &DataFlowGraph, ) -> Result { let mut var = self.convert_numeric_value(value_id, dfg)?; - match &dfg[value_id.raw()] { + match &dfg[value_id] { Value::Instruction { instruction, .. } => { if matches!( &dfg[*instruction], diff --git a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 90eb79ccb69..67f3bf024f5 100644 --- a/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -6,12 +6,14 @@ use crate::ssa::ir::basic_block::BasicBlockId; use crate::ssa::ir::function::RuntimeType; use crate::ssa::ir::function::{Function, FunctionId}; use crate::ssa::ir::instruction::{Instruction, InstructionId, Intrinsic}; -use crate::ssa::ir::value::{Value, ValueId}; +use crate::ssa::ir::value::{RawValueId, Value, ValueId}; use crate::ssa::ssa_gen::Ssa; use im::HashMap; use rayon::prelude::*; use std::collections::{BTreeMap, HashSet}; +type ValueIdHashSet = HashSet; + impl Ssa { /// Go through each top-level non-brillig function and detect if it has independent subgraphs #[tracing::instrument(level = "trace", skip(self))] @@ -44,7 +46,7 @@ fn check_for_underconstrained_values_within_function( context.compute_sets_of_connected_value_ids(function, all_functions); - let all_brillig_generated_values: HashSet = + let all_brillig_generated_values: ValueIdHashSet = context.brillig_return_to_argument.keys().copied().collect(); let connected_sets_indices = @@ -67,9 +69,9 @@ fn check_for_underconstrained_values_within_function( struct Context { visited_blocks: HashSet, block_queue: Vec, - value_sets: Vec>, - brillig_return_to_argument: HashMap>, - brillig_return_to_instruction_id: HashMap, + value_sets: Vec, + brillig_return_to_argument: HashMap>, + brillig_return_to_instruction_id: HashMap, } impl Context { @@ -114,7 +116,7 @@ impl Context { // If it's the case, then that set doesn't present an issue for parameter_or_return_value in variable_parameters_and_return_values { for (set_index, final_set) in self.value_sets.iter().enumerate() { - if final_set.contains(¶meter_or_return_value) { + if final_set.contains(¶meter_or_return_value.raw()) { connected_sets_indices.insert(set_index); } } @@ -125,8 +127,8 @@ impl Context { /// Find which brillig calls separate this set from others and return bug warnings about them fn find_disconnecting_brillig_calls_with_results_in_set( &self, - current_set: &HashSet, - all_brillig_generated_values: &HashSet, + current_set: &ValueIdHashSet, + all_brillig_generated_values: &ValueIdHashSet, function: &Function, ) -> Vec { let mut warnings = Vec::new(); @@ -136,8 +138,10 @@ impl Context { // Go through all brillig outputs in the set for brillig_output_in_set in intersection { // Get the inputs that correspond to the output - let inputs: HashSet = - self.brillig_return_to_argument[&brillig_output_in_set].iter().copied().collect(); + let inputs: ValueIdHashSet = self.brillig_return_to_argument[&brillig_output_in_set] + .iter() + .map(|v| v.raw()) + .collect(); // Check if any of them are not in the set let unused_inputs = inputs.difference(current_set).next().is_some(); @@ -170,13 +174,13 @@ impl Context { // Insert non-constant instruction arguments function.dfg[*instruction].for_each_value(|value_id| { if function.dfg.get_numeric_constant(value_id).is_none() { - instruction_arguments_and_results.insert(function.dfg.resolve(value_id)); + instruction_arguments_and_results.insert(function.dfg.resolve(value_id).raw()); } }); // And non-constant results for value_id in function.dfg.instruction_results(*instruction).iter() { if function.dfg.get_numeric_constant(*value_id).is_none() { - instruction_arguments_and_results.insert(function.dfg.resolve(*value_id)); + instruction_arguments_and_results.insert(function.dfg.resolve(*value_id).raw()); } } @@ -235,9 +239,9 @@ impl Context { ) { self.brillig_return_to_argument - .insert(*result, argument_ids.clone()); + .insert(result.raw(), argument_ids.clone()); self.brillig_return_to_instruction_id - .insert(*result, *instruction); + .insert(result.raw(), *instruction); } } RuntimeType::Acir(..) => { @@ -269,15 +273,15 @@ impl Context { /// Merge all small sets into larger ones based on whether the sets intersect or not /// /// If two small sets have a common ValueId, we merge them into one - fn merge_sets(current: &[HashSet]) -> Vec> { + fn merge_sets(current: &[ValueIdHashSet]) -> Vec { let mut new_set_id: usize = 0; - let mut updated_sets: HashMap> = HashMap::new(); - let mut value_dictionary: HashMap = HashMap::new(); - let mut parsed_value_set: HashSet = HashSet::new(); + let mut updated_sets: HashMap = HashMap::new(); + let mut value_dictionary: HashMap = HashMap::new(); + let mut parsed_value_set: ValueIdHashSet = HashSet::new(); for set in current.iter() { // Check if the set has any of the ValueIds we've encountered at previous iterations - let intersection: HashSet = + let intersection: ValueIdHashSet = set.intersection(&parsed_value_set).copied().collect(); parsed_value_set.extend(set.iter()); @@ -333,7 +337,7 @@ impl Context { /// Parallel version of merge_sets /// The sets are merged by chunks, and then the chunks are merged together - fn merge_sets_par(sets: &[HashSet]) -> Vec> { + fn merge_sets_par(sets: &[ValueIdHashSet]) -> Vec { let mut sets = sets.to_owned(); let mut len = sets.len(); let mut prev_len = len + 1; diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 0d702e0a7f7..e7887f8501a 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -81,16 +81,16 @@ impl DataBus { .map(|cd| { let mut call_data_map = HashMap::default(); for (k, v) in cd.index_map.iter() { - call_data_map.insert(f(*k), *v); + call_data_map.insert(f(k.into()).raw(), *v); } CallData { - array_id: f(cd.array_id.raw()).into(), + array_id: f(cd.array_id).into(), index_map: call_data_map, call_data_id: cd.call_data_id, } }) .collect(); - DataBus { call_data, return_data: self.return_data.map(|rd| f(rd.raw()).into()) } + DataBus { call_data, return_data: self.return_data.map(|rd| f(rd)) } } pub(crate) fn call_data_array(&self) -> Vec<(u32, ValueId)> { @@ -117,7 +117,7 @@ impl FunctionBuilder { /// Insert a value into a data bus builder fn add_to_data_bus(&mut self, value: ValueId, databus: &mut DataBusBuilder) { assert!(databus.databus.is_none(), "initializing finalized call data"); - let typ = self.current_function.dfg[value.raw()].get_type().clone(); + let typ = self.current_function.dfg[value].get_type().clone(); match typ { Type::Numeric(_) => { databus.values.push_back(value); @@ -229,7 +229,7 @@ impl FunctionBuilder { ) -> Vec { let ssa_param_sizes: Vec<_> = ssa_params .iter() - .map(|ssa_param| self.current_function.dfg[ssa_param.raw()].get_type().flattened_size()) + .map(|ssa_param| self.current_function.dfg[*ssa_param].get_type().flattened_size()) .collect(); let mut is_ssa_params_databus = Vec::with_capacity(ssa_params.len()); diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 2bc95dd4923..7f33052a5aa 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -415,7 +415,7 @@ impl FunctionBuilder { } pub(crate) fn get_intrinsic_from_value(&mut self, value: ValueId) -> Option { - match self.current_function.dfg[value.raw()] { + match self.current_function.dfg[value] { Value::Intrinsic(intrinsic) => Some(intrinsic), _ => None, } @@ -464,7 +464,7 @@ impl std::ops::Index for FunctionBuilder { type Output = Value; fn index(&self, id: ValueId) -> &Self::Output { - &self.current_function.dfg[id.raw()] + &self.current_function.dfg[id] } } @@ -517,7 +517,7 @@ mod tests { let call_results = builder.insert_call(to_bits_id, vec![input, length], result_types).into_owned(); - let slice = match &builder.current_function.dfg[call_results[0].raw()] { + let slice = match &builder.current_function.dfg[call_results[0]] { Value::Array { array, .. } => vecmap(array, |v| v.resolved()), _ => panic!(), }; diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 93dfee28fa1..57663249788 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -563,10 +563,12 @@ impl std::ops::IndexMut for DataFlowGraph { } } -impl std::ops::Index for DataFlowGraph { +/// Indexing the DFG by unresolved value IDs is all over the codebase, +/// but it's not obvious whether we should apply resolution. +impl std::ops::Index for DataFlowGraph { type Output = Value; - fn index(&self, id: RawValueId) -> &Self::Output { - &self.values[id] + fn index(&self, id: ValueId) -> &Self::Output { + &self.values[id.raw()] } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index eb2a2da7f29..3e507097901 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -36,7 +36,7 @@ impl<'f> FunctionInserter<'f> { let value = self.function.dfg.resolve(value); match self.values.get(&value.raw()) { Some(value) => self.resolve(*value), - None => match &self.function.dfg[value.raw()] { + None => match &self.function.dfg[value] { super::value::Value::Array { array, typ } => { let typ = typ.clone(); let new_array: im::Vector = diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index a884aa6eef4..5bbdb130975 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -658,7 +658,7 @@ impl Instruction { Instruction::EnableSideEffectsIf { .. } | Instruction::ArraySet { .. } => true, - Instruction::Call { func, .. } => match dfg[func.raw()] { + Instruction::Call { func, .. } => match dfg[func.unresolved()] { Value::Function(_) => true, Value::Intrinsic(intrinsic) => { matches!(intrinsic, Intrinsic::SliceInsert | Intrinsic::SliceRemove) @@ -700,7 +700,7 @@ impl Instruction { | IncrementRc { .. } | DecrementRc { .. } => false, - Call { func, .. } => match dfg[func.raw()] { + Call { func, .. } => match dfg[func.unresolved()] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, }, @@ -755,7 +755,7 @@ impl Instruction { | RangeCheck { .. } => false, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. - Call { func, .. } => match dfg[func.raw()] { + Call { func, .. } => match dfg[func.unresolved()] { // Explicitly allows removal of unused ec operations, even if they can fail Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) | Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::EmbeddedCurveAdd)) => true, @@ -800,7 +800,7 @@ fn try_optimize_array_get_from_previous_set( // Arbitrary number of maximum tries just to prevent this optimization from taking too long. let max_tries = 5; for _ in 0..max_tries { - match &dfg[array_id.raw()] { + match &dfg[array_id] { Value::Instruction { instruction, .. } => { match &dfg[*instruction] { Instruction::ArraySet { array, index, value, .. } => { @@ -867,7 +867,7 @@ fn try_optimize_array_set_from_previous_get( target_index: ValueId, target_value: ValueId, ) -> SimplifyResult { - let array_from_get = match &dfg[target_value.raw()] { + let array_from_get = match &dfg[target_value] { Value::Instruction { instruction, .. } => match &dfg[*instruction] { Instruction::ArrayGet { array, index } => { if array_id.unresolved_eq(array) && target_index.unresolved_eq(index) { @@ -900,7 +900,7 @@ fn try_optimize_array_set_from_previous_get( // Arbitrary number of maximum tries just to prevent this optimization from taking too long. let max_tries = 5; for _ in 0..max_tries { - match &dfg[array_id.raw()] { + match &dfg[array_id] { Value::Instruction { instruction, .. } => match &dfg[*instruction] { Instruction::ArraySet { array, index, .. } => { let Some(index) = dfg.get_numeric_constant(*index) else { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 6421243febe..0fee3e3bd97 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -39,7 +39,7 @@ pub(super) fn simplify_call( ctrl_typevars: Option>, call_stack: &CallStack, ) -> SimplifyResult { - let intrinsic = match &dfg[func.raw()] { + let intrinsic = match &dfg[func] { Value::Intrinsic(intrinsic) => *intrinsic, _ => return SimplifyResult::None, }; diff --git a/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/compiler/noirc_evaluator/src/ssa/ir/printer.rs index a52ccd24d3e..2b564c14aa7 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -220,7 +220,7 @@ pub(crate) fn try_to_extract_string_from_error_payload( ((error_selector == STRING_ERROR_SELECTOR) && (values.len() == 1)) .then_some(()) .and_then(|()| { - let Value::Array { array: values, .. } = &dfg[values[0].raw()] else { + let Value::Array { array: values, .. } = &dfg[values[0]] else { return None; }; let fields: Option> = diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 95184266799..6b67c1f827a 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -112,6 +112,11 @@ impl From>> for ValueId { ValueId::new(value) } } +impl From<&Id>> for ValueId { + fn from(value: &Id>) -> Self { + ValueId::new(*value) + } +} /// Value is the most basic type allowed in the IR. /// Transition Note: A Id is similar to `NodeId` in our previous IR. diff --git a/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs index e93792cef57..59917e8589b 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -43,7 +43,7 @@ fn known_slice_lengths(func: &Function) -> HashMap { _ => continue, }; - match &func.dfg[target_func.raw()] { + match &func.dfg[*target_func] { Value::Intrinsic(Intrinsic::AsSlice) => { let array_typ = func.dfg.type_of_value(arguments[0]); if let Type::Array(_, length) = array_typ { diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 0d1e69034c2..88734c3c1d9 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -251,7 +251,7 @@ impl Context { // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - match (&dfg[lhs.raw()], &dfg[rhs.raw()]) { + match (&dfg[lhs], &dfg[rhs]) { // Ignore trivial constraints (Value::NumericConstant { .. }, Value::NumericConstant { .. }) => (), @@ -545,7 +545,7 @@ mod test { TerminatorInstruction::Return { return_values, .. } => return_values[0], _ => unreachable!("Should have terminator instruction"), }; - let return_element = match &main.dfg[return_value_id.raw()] { + let return_element = match &main.dfg[return_value_id] { Value::Array { array, .. } => array[0], _ => unreachable!("Return type should be array"), }; diff --git a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index 60f7c3edc1a..e4e6ad0b2bb 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -93,7 +93,7 @@ impl DefunctionalizationContext { _ => continue, }; - match func.dfg[target_func_id.raw()] { + match func.dfg[target_func_id] { // If the target is a function used as value Value::Param { .. } | Value::Instruction { .. } => { let mut arguments = arguments.clone(); @@ -130,8 +130,8 @@ impl DefunctionalizationContext { // Change the type of all the values that are not call targets to NativeField let value_ids = vecmap(func.dfg.values_iter(), |(id, _)| id); for value_id in value_ids { - if let Type::Function = &func.dfg[value_id.raw()].get_type() { - match &func.dfg[value_id.raw()] { + if let Type::Function = &func.dfg[value_id].get_type() { + match &func.dfg[value_id] { // If the value is a static function, transform it to the function id Value::Function(id) => { if !call_target_values.contains(value_id.as_ref()) { @@ -193,7 +193,7 @@ fn find_functions_as_values(func: &Function) -> BTreeSet { let mut functions_as_values: BTreeSet = BTreeSet::new(); let mut process_value = |value_id: ValueId| { - if let Value::Function(id) = func.dfg[value_id.raw()] { + if let Value::Function(id) = func.dfg[value_id] { functions_as_values.insert(id); } }; @@ -229,8 +229,7 @@ fn find_dynamic_dispatches(func: &Function) -> BTreeSet { let instruction = &func.dfg[*instruction_id]; match instruction { Instruction::Call { func: target, arguments } => { - if let Value::Param { .. } | Value::Instruction { .. } = &func.dfg[target.raw()] - { + if let Value::Param { .. } | Value::Instruction { .. } = &func.dfg[*target] { let results = func.dfg.instruction_results(*instruction_id); dispatches.insert(Signature { params: vecmap(arguments, |param| func.dfg.type_of_value(*param)), diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 6805ae2757f..c864f0bd1bb 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -27,37 +27,36 @@ impl<'a> SliceCapacityTracker<'a> { match instruction { Instruction::ArrayGet { array, .. } => { let array_typ = self.dfg.type_of_value(*array); - let array_value = &self.dfg[array.raw()]; + let array_value = &self.dfg[*array]; if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_capacity(array.raw(), slice_sizes); + self.compute_slice_capacity(*array, slice_sizes); } } Instruction::ArraySet { array, value, .. } => { let array_typ = self.dfg.type_of_value(*array); - let array = array.raw(); - let array_value = &self.dfg[array]; + let array_value = &self.dfg[*array]; if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_capacity(array, slice_sizes); + self.compute_slice_capacity(*array, slice_sizes); } let value_typ = self.dfg.type_of_value(*value); // Compiler sanity check assert!(!value_typ.contains_slice_element(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); - if let Some(capacity) = slice_sizes.get(&array) { + if let Some(capacity) = slice_sizes.get(array.as_ref()) { slice_sizes.insert(results[0].raw(), *capacity); } } Instruction::Call { func, arguments } => { - let func = &self.dfg[func.raw()]; + let func = &self.dfg[*func]; if let Value::Intrinsic(intrinsic) = func { let (argument_index, result_index) = match intrinsic { Intrinsic::SlicePushBack @@ -85,7 +84,7 @@ impl<'a> SliceCapacityTracker<'a> { for arg in &arguments[(argument_index + 1)..] { let element_typ = self.dfg.type_of_value(*arg); if element_typ.contains_slice_element() { - self.compute_slice_capacity(arg.raw(), slice_sizes); + self.compute_slice_capacity(*arg, slice_sizes); } } @@ -136,7 +135,7 @@ impl<'a> SliceCapacityTracker<'a> { Instruction::Store { address, value } => { let value_typ = self.dfg.type_of_value(*value); if value_typ.contains_slice_element() { - self.compute_slice_capacity(value.raw(), slice_sizes); + self.compute_slice_capacity(*value, slice_sizes); let value_capacity = slice_sizes.get(value.as_ref()).unwrap_or_else(|| { panic!("ICE: should have slice capacity set for value {value} being stored at {address}") @@ -164,7 +163,7 @@ impl<'a> SliceCapacityTracker<'a> { /// Computes the starting capacity of a slice which is still a `Value::Array` pub(crate) fn compute_slice_capacity( &self, - array_id: RawValueId, + array_id: ValueId, slice_sizes: &mut HashMap, ) { if let Value::Array { array, typ } = &self.dfg[array_id] { @@ -173,7 +172,7 @@ impl<'a> SliceCapacityTracker<'a> { if let Type::Slice(_) = typ { let element_size = typ.element_size(); let len = array.len() / element_size; - slice_sizes.insert(array_id, len); + slice_sizes.insert(array_id.raw(), len); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 34c9ad58b1c..c948efcbc3f 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -465,7 +465,7 @@ impl<'a> ValueMerger<'a> { result: ValueId, changed_indices: &mut Vec<(ValueId, ValueId, Type, ValueId)>, ) -> ValueId { - match &self.dfg[result.raw()] { + match &self.dfg[result] { Value::Instruction { instruction, .. } => match &self.dfg[*instruction] { Instruction::ArraySet { array, index, value, .. } => { let condition = diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index db7d8b16a80..8ade47bceed 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -139,7 +139,7 @@ fn called_functions_vec(func: &Function) -> Vec { continue; }; - if let Value::Function(function_id) = func.dfg[called_value_id.raw()] { + if let Value::Function(function_id) = func.dfg[*called_value_id] { called_function_ids.push(function_id); } } @@ -462,7 +462,7 @@ impl<'function> PerFunctionContext<'function> { return *value; } - let new_value = match &self.source_function.dfg[id.raw()] { + let new_value = match &self.source_function.dfg[id] { value @ Value::Instruction { .. } => { unreachable!("All Value::Instructions should already be known during inlining after creating the original inlined instruction. Unknown value {id} = {value:?}") } diff --git a/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs index ae50f5902c2..b08547edd51 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -156,17 +156,17 @@ impl IdMaps { old_function: &Function, old_value: ValueId, ) -> ValueId { - let old_value = old_function.dfg.resolve(old_value).raw(); + let old_value = old_function.dfg.resolve(old_value); match &old_function.dfg[old_value] { value @ Value::Instruction { instruction, .. } => { - *self.values.get(&old_value).unwrap_or_else(|| { + *self.values.get(&old_value.raw()).unwrap_or_else(|| { let instruction = &old_function.dfg[*instruction]; unreachable!("Unmapped value with id {old_value}: {value:?}\n from instruction: {instruction:?}, SSA: {old_function}") }) } value @ Value::Param { .. } => { - *self.values.get(&old_value).unwrap_or_else(|| { + *self.values.get(&old_value.raw()).unwrap_or_else(|| { unreachable!("Unmapped value with id {old_value}: {value:?}") }) } @@ -180,7 +180,7 @@ impl IdMaps { new_function.dfg.make_constant(*constant, typ.clone()) } Value::Array { array, typ } => { - if let Some(value) = self.values.get(&old_value) { + if let Some(value) = self.values.get(&old_value.raw()) { return *value; } @@ -189,7 +189,7 @@ impl IdMaps { .map(|value| self.map_value(new_function, old_function, *value)) .collect(); let new_value = new_function.dfg.make_array(array, typ.clone()); - self.values.insert(old_value, new_value); + self.values.insert(old_value.raw(), new_value); new_value } Value::Intrinsic(intrinsic) => new_function.dfg.import_intrinsic(*intrinsic), diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 3d79788b401..cbee2cce7ad 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -155,7 +155,7 @@ impl Context { | Load { .. } => true, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. - Call { func, .. } => match dfg[func.raw()] { + Call { func, .. } => match dfg[*func] { Value::Intrinsic(intrinsic) => match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 0f964d8414c..f8b4670c336 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -105,7 +105,7 @@ impl Context { self.array_set_conditionals.insert(result.raw(), current_conditional); } Instruction::Call { func, arguments } => { - if let Value::Intrinsic(intrinsic) = function.dfg[func.raw()] { + if let Value::Intrinsic(intrinsic) = function.dfg[*func] { let results = function.dfg.instruction_results(instruction); match slice_capacity_change(&function.dfg, intrinsic, arguments, results) { @@ -163,7 +163,7 @@ impl Context { } } - let dbg_value = &dfg[value.raw()]; + let dbg_value = &dfg[value]; unreachable!("No size for slice {value} = {dbg_value:?}") } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs index ced5a428595..3d40c88d704 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs @@ -34,7 +34,7 @@ impl Function { _ => continue, }; - if let Value::Intrinsic(Intrinsic::IsUnconstrained) = &self.dfg[target_func.raw()] { + if let Value::Intrinsic(Intrinsic::IsUnconstrained) = &self.dfg[target_func] { is_unconstrained_calls.insert(instruction_id); } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs b/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs index d00216830ef..0dd53da8aa6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/runtime_separation.rs @@ -6,7 +6,7 @@ use crate::ssa::{ ir::{ function::{Function, FunctionId, RuntimeType}, instruction::Instruction, - value::{RawValueId, Value}, + value::{RawValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -112,12 +112,13 @@ impl RuntimeSeparatorContext { for (_function_id, func) in ssa.functions.iter_mut() { if matches!(func.runtime(), RuntimeType::Brillig(_)) { for called_func_value_id in called_functions_values(func).iter() { - let Value::Function(called_func_id) = &func.dfg[*called_func_value_id] else { + let called_func_value_id = called_func_value_id.into(); + let Value::Function(called_func_id) = &func.dfg[called_func_value_id] else { unreachable!("Value should be a function") }; if let Some(mapped_func_id) = self.mapped_functions.get(called_func_id) { let mapped_value_id = func.dfg.import_function(*mapped_func_id); - func.dfg.set_value_from_id(called_func_value_id.into(), mapped_value_id); + func.dfg.set_value_from_id(called_func_value_id, mapped_value_id); } } } @@ -133,9 +134,8 @@ fn called_functions_values(func: &Function) -> BTreeSet { let Instruction::Call { func: called_value_id, .. } = &func.dfg[*instruction_id] else { continue; }; - let called_value_id = called_value_id.raw(); - if let Value::Function(_) = func.dfg[called_value_id] { - called_function_ids.insert(called_value_id); + if let Value::Function(_) = func.dfg[*called_value_id] { + called_function_ids.insert(called_value_id.raw()); } } } @@ -147,7 +147,7 @@ fn called_functions(func: &Function) -> BTreeSet { called_functions_values(func) .into_iter() .map(|value_id| { - let Value::Function(func_id) = func.dfg[value_id] else { + let Value::Function(func_id) = func.dfg[ValueId::from(value_id)] else { unreachable!("Value should be a function") }; func_id From ee857025db107108aa319ad9f925dae360ae6502 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 00:08:11 +0000 Subject: [PATCH 04/19] mem2reg --- .../brillig_gen/constant_allocation.rs | 8 +- .../brillig/brillig_gen/variable_liveness.rs | 84 +++++++------- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 16 +++ .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 108 ++++++++++-------- .../src/ssa/opt/mem2reg/alias_set.rs | 2 +- .../src/ssa/opt/mem2reg/block.rs | 59 +++++----- 7 files changed, 153 insertions(+), 126 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index 5428cff8dc5..d68675d98cf 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -11,7 +11,7 @@ use crate::ssa::ir::{ function::Function, instruction::InstructionId, post_order::PostOrder, - value::{RawValueId, Value, ValueId}, + value::{RawValueId, ResolvedValueId, Value, ValueId}, }; use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; @@ -65,7 +65,7 @@ impl ConstantAllocation { fn collect_constant_usage(&mut self, func: &Function) { let mut record_if_constant = - |block_id: BasicBlockId, value_id: ValueId, location: InstructionLocation| { + |block_id: BasicBlockId, value_id: ResolvedValueId, location: InstructionLocation| { if is_constant_value(value_id, &func.dfg) { self.constant_usage .entry(value_id.raw()) @@ -166,8 +166,8 @@ impl ConstantAllocation { } } -pub(crate) fn is_constant_value(id: ValueId, dfg: &DataFlowGraph) -> bool { - matches!(&dfg[dfg.resolve(id)], Value::NumericConstant { .. } | Value::Array { .. }) +fn is_constant_value(id: ResolvedValueId, dfg: &DataFlowGraph) -> bool { + matches!(&dfg[id], Value::NumericConstant { .. } | Value::Array { .. }) } /// For a given function, finds all the blocks that are within loops diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index a18461bc0cd..a3c0c07b4e5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -9,7 +9,7 @@ use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, post_order::PostOrder, - value::{Value, ValueId}, + value::{ResolvedValueId, Value, ValueId}, }; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -45,7 +45,10 @@ fn find_back_edges( } /// Collects the underlying variables inside a value id. It might be more than one, for example in constant arrays that are constructed with multiple vars. -pub(crate) fn collect_variables_of_value(value_id: ValueId, dfg: &DataFlowGraph) -> Vec { +pub(crate) fn collect_variables_of_value( + value_id: ValueId, + dfg: &DataFlowGraph, +) -> Vec { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; @@ -99,7 +102,7 @@ fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables .collect(); // We consider block parameters used, so they live up to the block that owns them. - used.extend(block.parameters().iter()); + used.extend(block.parameters().iter().map(|p| p.resolved())); if let Some(terminator) = block.terminator() { terminator.for_each_value(|value_id| { @@ -110,7 +113,7 @@ fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables used } -type Variables = HashSet; +type Variables = HashSet; fn compute_used_before_def( block: &BasicBlock, @@ -226,7 +229,7 @@ impl VariableLiveness { ) { let mut defined = self.compute_defined_variables(block_id, &func.dfg); - defined.extend(constants.allocated_in_block(block_id)); + defined.extend(constants.allocated_in_block(block_id).iter().map(|c| c.resolved())); let block: &BasicBlock = &func.dfg[block_id]; @@ -351,6 +354,12 @@ mod test { use crate::ssa::ir::map::Id; use crate::ssa::ir::types::Type; + use super::{ResolvedValueId, ValueId}; + + fn resolved_set(it: &[ValueId]) -> FxHashSet { + FxHashSet::from_iter(it.iter().map(|v| v.resolved())) + } + #[test] fn simple_back_propagation() { // brillig fn main f0 { @@ -419,30 +428,24 @@ mod test { let liveness = VariableLiveness::from_function(func, &constants); assert!(liveness.get_live_in(&func.entry_block()).is_empty()); - assert_eq!( - liveness.get_live_in(&b2), - &FxHashSet::from_iter([v3, v0, twenty_seven].into_iter()) - ); - assert_eq!( - liveness.get_live_in(&b1), - &FxHashSet::from_iter([v3, v1, twenty_seven].into_iter()) - ); - assert_eq!(liveness.get_live_in(&b3), &FxHashSet::from_iter([v3].into_iter())); + assert_eq!(liveness.get_live_in(&b2), &resolved_set(&[v3, v0, twenty_seven])); + assert_eq!(liveness.get_live_in(&b1), &resolved_set(&[v3, v1, twenty_seven])); + assert_eq!(liveness.get_live_in(&b3), &resolved_set(&[v3])); let block_1 = &func.dfg[b1]; let block_2 = &func.dfg[b2]; let block_3 = &func.dfg[b3]; assert_eq!( liveness.get_last_uses(&b1).get(&block_1.instructions()[0]), - Some(&FxHashSet::from_iter([v1, twenty_seven].into_iter())) + Some(&resolved_set(&[v1, twenty_seven])) ); assert_eq!( liveness.get_last_uses(&b2).get(&block_2.instructions()[0]), - Some(&FxHashSet::from_iter([v0, twenty_seven].into_iter())) + Some(&resolved_set(&[v0, twenty_seven])) ); assert_eq!( liveness.get_last_uses(&b3).get(&block_3.instructions()[0]), - Some(&FxHashSet::from_iter([v3].into_iter())) + Some(&resolved_set(&[v3])) ); } @@ -571,40 +574,31 @@ mod test { let liveness = VariableLiveness::from_function(func, &constants); assert!(liveness.get_live_in(&func.entry_block()).is_empty()); - assert_eq!( - liveness.get_live_in(&b1), - &FxHashSet::from_iter([v0, v1, v3, v4, twenty_seven, one].into_iter()) - ); - assert_eq!(liveness.get_live_in(&b3), &FxHashSet::from_iter([v3].into_iter())); - assert_eq!( - liveness.get_live_in(&b2), - &FxHashSet::from_iter([v0, v1, v3, v4, twenty_seven, one].into_iter()) - ); + assert_eq!(liveness.get_live_in(&b1), &resolved_set(&[v0, v1, v3, v4, twenty_seven, one])); + assert_eq!(liveness.get_live_in(&b3), &resolved_set(&[v3])); + assert_eq!(liveness.get_live_in(&b2), &resolved_set(&[v0, v1, v3, v4, twenty_seven, one])); assert_eq!( liveness.get_live_in(&b4), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, twenty_seven, one].into_iter()) - ); - assert_eq!( - liveness.get_live_in(&b6), - &FxHashSet::from_iter([v0, v1, v3, v4, twenty_seven, one].into_iter()) + &resolved_set(&[v0, v1, v3, v4, v6, v7, twenty_seven, one]) ); + assert_eq!(liveness.get_live_in(&b6), &resolved_set(&[v0, v1, v3, v4, twenty_seven, one])); assert_eq!( liveness.get_live_in(&b5), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, twenty_seven, one].into_iter()) + &resolved_set(&[v0, v1, v3, v4, v6, v7, twenty_seven, one]) ); assert_eq!( liveness.get_live_in(&b7), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, twenty_seven, one].into_iter()) + &resolved_set(&[v0, v1, v3, v4, v6, v7, twenty_seven, one]) ); assert_eq!( liveness.get_live_in(&b8), - &FxHashSet::from_iter([v0, v1, v3, v4, v6, v7, twenty_seven, one].into_iter()) + &resolved_set(&[v0, v1, v3, v4, v6, v7, twenty_seven, one]) ); let block_3 = &func.dfg[b3]; assert_eq!( liveness.get_last_uses(&b3).get(&block_3.instructions()[0]), - Some(&FxHashSet::from_iter([v3].into_iter())) + Some(&resolved_set(&[v3])) ); } @@ -655,12 +649,20 @@ mod test { let liveness = VariableLiveness::from_function(func, &constants); // Entry point defines its own params and also b3's params. - assert_eq!(liveness.defined_block_params(&func.entry_block()), vec![v0, v1, v2]); - assert_eq!(liveness.defined_block_params(&b1), vec![]); - assert_eq!(liveness.defined_block_params(&b2), vec![]); - assert_eq!(liveness.defined_block_params(&b3), vec![]); + let resolved_params = |block_id| { + liveness + .defined_block_params(&block_id) + .iter() + .map(|p| p.resolved()) + .collect::() + }; + + assert_eq!(resolved_params(func.entry_block()), resolved_set(&[v0, v1, v2])); + assert_eq!(resolved_params(b1), resolved_set(&[])); + assert_eq!(resolved_params(b2), resolved_set(&[])); + assert_eq!(resolved_params(b3), resolved_set(&[])); - assert_eq!(liveness.get_live_in(&b1), &FxHashSet::from_iter([v1, v2].into_iter())); - assert_eq!(liveness.get_live_in(&b2), &FxHashSet::from_iter([v1, v2].into_iter())); + assert_eq!(liveness.get_live_in(&b1), &resolved_set(&[v1, v2])); + assert_eq!(liveness.get_live_in(&b2), &resolved_set(&[v1, v2])); } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 57663249788..e15e7662b11 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -573,7 +573,7 @@ impl std::ops::Index for DataFlowGraph { } impl std::ops::Index for DataFlowGraph { - type Output = Value; + type Output = Value; // The value can still contain unresolved IDs. fn index(&self, id: ResolvedValueId) -> &Self::Output { &self.values[id.raw()] } diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 6b67c1f827a..e391388bb38 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -175,4 +175,20 @@ impl Value { Value::ForeignFunction { .. } => &Type::Function, } } + + pub(crate) fn map_values(self, f: impl Fn(ValueId) -> ValueId) -> Value { + match self { + Value::Instruction { instruction, position, typ } => { + Value::Instruction { instruction, position, typ } + } + Value::Param { block, position, typ } => Value::Param { block, position, typ }, + Value::NumericConstant { constant, typ } => Value::NumericConstant { constant, typ }, + Value::Array { array, typ } => { + Value::Array { array: array.into_iter().map(|v| f(v)).collect(), typ } + } + Value::Function(id) => Value::Function(id), + Value::Intrinsic(intrinsic) => Value::Intrinsic(intrinsic), + Value::ForeignFunction(s) => Value::ForeignFunction(s), + } + } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index a052abc5e16..be434e488e4 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -77,7 +77,7 @@ use crate::ssa::{ instruction::{Instruction, InstructionId, TerminatorInstruction}, post_order::PostOrder, types::Type, - value::ValueId, + value::{RawValueId, ResolvedValueId, ValueId}, }, ssa_gen::Ssa, }; @@ -122,16 +122,16 @@ struct PerFunctionContext<'f> { /// Track a value's last load across all blocks. /// If a value is not used in anymore loads we can remove the last store to that value. - last_loads: HashMap, + last_loads: HashMap, /// Track whether a reference was passed into another entry point /// This is needed to determine whether we can remove a store. - calls_reference_input: HashSet, + calls_reference_input: HashSet, /// Track whether a reference has been aliased, and store the respective /// instruction that aliased that reference. /// If that store has been set for removal, we can also remove this instruction. - aliased_references: HashMap>, + aliased_references: HashMap>, } impl<'f> PerFunctionContext<'f> { @@ -166,10 +166,10 @@ impl<'f> PerFunctionContext<'f> { } let mut all_terminator_values = HashSet::default(); - let mut per_func_block_params: HashSet = HashSet::default(); + let mut per_func_block_params: HashSet = HashSet::default(); for (block_id, _) in self.blocks.iter() { let block_params = self.inserter.function.dfg.block_parameters(*block_id); - per_func_block_params.extend(block_params.iter()); + per_func_block_params.extend(block_params.iter().map(|p| p.raw())); let terminator = self.inserter.function.dfg[*block_id].unwrap_terminator(); terminator.for_each_value(|value| { self.recursively_add_values(value, &mut all_terminator_values); @@ -180,6 +180,7 @@ impl<'f> PerFunctionContext<'f> { // This rule does not apply to reference parameters, which we must also check for before removing these stores. for (_, block) in self.blocks.iter() { for (store_address, store_instruction) in block.last_stores.iter() { + let store_address = &store_address.raw(); let store_alias_used = self.is_store_alias_used( store_address, block, @@ -207,41 +208,41 @@ impl<'f> PerFunctionContext<'f> { // an allocation did not come from an entry point or was passed to an entry point. fn is_store_alias_used( &self, - store_address: &ValueId, + store_address: &RawValueId, block: &Block, - all_terminator_values: &HashSet, - per_func_block_params: &HashSet, + all_terminator_values: &HashSet, + per_func_block_params: &HashSet, ) -> bool { let reference_parameters = self.reference_parameters(); if let Some(expression) = block.expressions.get(store_address) { if let Some(aliases) = block.aliases.get(expression) { let allocation_aliases_parameter = - aliases.any(|alias| reference_parameters.contains(&alias)); + aliases.any(|alias| reference_parameters.contains(alias.as_ref())); if allocation_aliases_parameter == Some(true) { return true; } let allocation_aliases_parameter = - aliases.any(|alias| per_func_block_params.contains(&alias)); + aliases.any(|alias| per_func_block_params.contains(alias.as_ref())); if allocation_aliases_parameter == Some(true) { return true; } let allocation_aliases_parameter = - aliases.any(|alias| self.calls_reference_input.contains(&alias)); + aliases.any(|alias| self.calls_reference_input.contains(alias.as_ref())); if allocation_aliases_parameter == Some(true) { return true; } let allocation_aliases_parameter = - aliases.any(|alias| all_terminator_values.contains(&alias)); + aliases.any(|alias| all_terminator_values.contains(alias.as_ref())); if allocation_aliases_parameter == Some(true) { return true; } let allocation_aliases_parameter = aliases.any(|alias| { - if let Some(alias_instructions) = self.aliased_references.get(&alias) { + if let Some(alias_instructions) = self.aliased_references.get(alias.as_ref()) { self.instructions_to_remove.is_disjoint(alias_instructions) } else { false @@ -260,16 +261,16 @@ impl<'f> PerFunctionContext<'f> { /// All references are mutable, so these inputs are shared with the function caller /// and thus stores should not be eliminated, even if the blocks in this function /// don't use them anywhere. - fn reference_parameters(&self) -> BTreeSet { + fn reference_parameters(&self) -> BTreeSet { let parameters = self.inserter.function.parameters().iter(); parameters .filter(|param| self.inserter.function.dfg.value_is_reference(**param)) - .copied() + .map(|v| v.raw()) .collect() } - fn recursively_add_values(&self, value: ValueId, set: &mut HashSet) { - set.insert(value); + fn recursively_add_values(&self, value: ValueId, set: &mut HashSet) { + set.insert(value.raw()); if let Some((elements, _)) = self.inserter.function.dfg.get_array_constant(value) { for array_element in elements { self.recursively_add_values(array_element, set); @@ -358,12 +359,12 @@ impl<'f> PerFunctionContext<'f> { let first = aliases.first(); let first = first.expect("All parameters alias at least themselves or we early return"); - let expression = Expression::Other(first); + let expression = Expression::Other(first.resolved()); let previous = references.aliases.insert(expression.clone(), aliases.clone()); assert!(previous.is_none()); aliases.for_each(|alias| { - let previous = references.expressions.insert(alias, expression.clone()); + let previous = references.expressions.insert(alias.raw(), expression.clone()); assert!(previous.is_none()); }); } @@ -375,10 +376,10 @@ impl<'f> PerFunctionContext<'f> { let reference_parameters = self.reference_parameters(); for (allocation, instruction) in &references.last_stores { - if let Some(expression) = references.expressions.get(allocation) { + if let Some(expression) = references.expressions.get(&allocation.raw()) { if let Some(aliases) = references.aliases.get(expression) { let allocation_aliases_parameter = - aliases.any(|alias| reference_parameters.contains(&alias)); + aliases.any(|alias| reference_parameters.contains(alias.as_ref())); // If `allocation_aliases_parameter` is known to be false if allocation_aliases_parameter == Some(false) { @@ -414,12 +415,12 @@ impl<'f> PerFunctionContext<'f> { // If the load is known, replace it with the known value and remove the load if let Some(value) = references.get_known_value(address) { let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - self.inserter.map_value(result, value); + self.inserter.map_value(result, value.into()); self.instructions_to_remove.insert(instruction); } else { - references.mark_value_used(address, self.inserter.function); + references.mark_value_used(address.into(), self.inserter.function); - self.last_loads.insert(address, (instruction, block_id)); + self.last_loads.insert(address.raw(), (instruction, block_id)); } } Instruction::Store { address, value } => { @@ -434,12 +435,12 @@ impl<'f> PerFunctionContext<'f> { self.instructions_to_remove.insert(*last_store); } - if self.inserter.function.dfg.value_is_reference(value) { - if let Some(expression) = references.expressions.get(&value) { + if self.inserter.function.dfg.value_is_reference(value.into()) { + if let Some(expression) = references.expressions.get(&value.raw()) { if let Some(aliases) = references.aliases.get(expression) { aliases.for_each(|alias| { self.aliased_references - .entry(alias) + .entry(alias.raw()) .or_default() .insert(instruction); }); @@ -453,29 +454,29 @@ impl<'f> PerFunctionContext<'f> { Instruction::Allocate => { // Register the new reference let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - references.expressions.insert(result, Expression::Other(result)); - references.aliases.insert(Expression::Other(result), AliasSet::known(result)); + let expr = Expression::Other(result.resolved()); + references.expressions.insert(result.raw(), expr); + references.aliases.insert(expr, AliasSet::known(result)); } Instruction::ArrayGet { array, .. } => { + let array = self.inserter.function.dfg.resolve(*array); let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - references.mark_value_used(*array, self.inserter.function); + references.mark_value_used(array, self.inserter.function); if self.inserter.function.dfg.value_is_reference(result) { - let array = self.inserter.function.dfg.resolve(*array); let expression = Expression::ArrayElement(Box::new(Expression::Other(array))); - if let Some(aliases) = references.aliases.get_mut(&expression) { aliases.insert(result); } } } Instruction::ArraySet { array, value, .. } => { - references.mark_value_used(*array, self.inserter.function); + let array = self.inserter.function.dfg.resolve(*array); + references.mark_value_used(array, self.inserter.function); let element_type = self.inserter.function.dfg.type_of_value(*value); if Self::contains_references(&element_type) { let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - let array = self.inserter.function.dfg.resolve(*array); let expression = Expression::ArrayElement(Box::new(Expression::Other(array))); @@ -483,7 +484,7 @@ impl<'f> PerFunctionContext<'f> { { aliases.clone() } else if let Some((elements, _)) = - self.inserter.function.dfg.get_array_constant(array) + self.inserter.function.dfg.get_array_constant(array.into()) { let aliases = references.collect_all_aliases(elements); self.set_aliases(references, array, aliases.clone()); @@ -494,17 +495,17 @@ impl<'f> PerFunctionContext<'f> { aliases.unify(&references.get_aliases_for_value(*value)); - references.expressions.insert(result, expression.clone()); + references.expressions.insert(result.raw(), expression.clone()); references.aliases.insert(expression, aliases); } } Instruction::Call { arguments, .. } => { for arg in arguments { if self.inserter.function.dfg.value_is_reference(*arg) { - if let Some(expression) = references.expressions.get(arg) { + if let Some(expression) = references.expressions.get(arg.as_ref()) { if let Some(aliases) = references.aliases.get(expression) { aliases.for_each(|alias| { - self.calls_reference_input.insert(alias); + self.calls_reference_input.insert(alias.raw()); }); } } @@ -518,12 +519,12 @@ impl<'f> PerFunctionContext<'f> { /// If `array` is an array constant that contains reference types, then insert each element /// as a potential alias to the array itself. - fn check_array_aliasing(&self, references: &mut Block, array: ValueId) { - if let Some((elements, typ)) = self.inserter.function.dfg.get_array_constant(array) { + fn check_array_aliasing(&self, references: &mut Block, array: ResolvedValueId) { + if let Some((elements, typ)) = self.inserter.function.dfg.get_array_constant(array.into()) { if Self::contains_references(&typ) { // TODO: Check if type directly holds references or holds arrays that hold references let expr = Expression::ArrayElement(Box::new(Expression::Other(array))); - references.expressions.insert(array, expr.clone()); + references.expressions.insert(array.raw(), expr.clone()); let aliases = references.aliases.entry(expr).or_default(); for element in elements { @@ -544,9 +545,9 @@ impl<'f> PerFunctionContext<'f> { } } - fn set_aliases(&self, references: &mut Block, address: ValueId, new_aliases: AliasSet) { + fn set_aliases(&self, references: &mut Block, address: ResolvedValueId, new_aliases: AliasSet) { let expression = - references.expressions.entry(address).or_insert(Expression::Other(address)); + references.expressions.entry(address.raw()).or_insert(Expression::Other(address)); let aliases = references.aliases.entry(expression.clone()).or_default(); *aliases = new_aliases; } @@ -556,7 +557,7 @@ impl<'f> PerFunctionContext<'f> { if self.inserter.function.dfg.value_is_reference(*value) { let value = self.inserter.function.dfg.resolve(*value); references.set_unknown(value); - references.mark_value_used(value, self.inserter.function); + references.mark_value_used(value.into(), self.inserter.function); } } } @@ -575,7 +576,8 @@ impl<'f> PerFunctionContext<'f> { fn update_data_bus(&mut self) { let databus = self.inserter.function.dfg.data_bus.clone(); - self.inserter.function.dfg.data_bus = databus.map_values(|t| self.inserter.resolve(t)); + self.inserter.function.dfg.data_bus = + databus.map_values(|t| self.inserter.resolve(t).into()); } fn handle_terminator(&mut self, block: BasicBlockId, references: &mut Block) { @@ -592,7 +594,7 @@ impl<'f> PerFunctionContext<'f> { if self.inserter.function.dfg.value_is_reference(*parameter) { let argument = self.inserter.function.dfg.resolve(*argument); - if let Some(expression) = references.expressions.get(&argument) { + if let Some(expression) = references.expressions.get(&argument.raw()) { if let Some(aliases) = references.aliases.get_mut(expression) { // The argument reference is possibly aliased by this block parameter aliases.insert(*parameter); @@ -667,7 +669,10 @@ mod tests { TerminatorInstruction::Return { return_values, .. } => return_values.first().unwrap(), _ => unreachable!(), }; - assert_eq!(func.dfg[*ret_val_id], func.dfg[two]); + assert_eq!( + func.dfg[*ret_val_id].map_values(|v| v.resolved()), + func.dfg[two].map_values(|v| v.resolved()) + ); } #[test] @@ -703,7 +708,10 @@ mod tests { TerminatorInstruction::Return { return_values, .. } => return_values.first().unwrap(), _ => unreachable!(), }; - assert_eq!(func.dfg[*ret_val_id], func.dfg[one]); + assert_eq!( + func.dfg[*ret_val_id].map_values(|v| v.resolved()), + func.dfg[one].map_values(|v| v.resolved()) + ); } #[test] @@ -740,7 +748,7 @@ mod tests { // Since the mem2reg pass simplifies as it goes, the id of the allocate instruction result // is most likely no longer v0. We have to retrieve the new id here. let allocate_id = func.dfg.instruction_results(instructions[0])[0]; - assert_eq!(ret_val_id, allocate_id); + assert_eq!(ret_val_id.resolved(), allocate_id.resolved()); } fn count_stores(block: BasicBlockId, dfg: &DataFlowGraph) -> usize { diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs index 68089bdb381..0dfc808310d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/alias_set.rs @@ -79,6 +79,6 @@ impl AliasSet { /// The ordering is arbitrary (by lowest ValueId) so this method should only be /// used when you need an arbitrary ValueId from the alias set. pub(super) fn first(&self) -> Option { - self.aliases.as_ref().and_then(|aliases| aliases.first().copied()) + self.aliases.as_ref().and_then(|aliases| aliases.first()).map(|a| a.into()) } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index f15d2c737df..df3b069178c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, - value::{Resolved, ResolvedValueId, ValueId}, + value::{RawValueId, Resolved, ResolvedValueId, ValueId}, }; use super::alias_set::AliasSet; @@ -19,18 +19,18 @@ pub(super) struct Block { /// Maps a ValueId to the Expression it represents. /// Multiple ValueIds can map to the same Expression, e.g. /// dereferences to the same allocation. - pub(super) expressions: im::OrdMap, + pub(super) expressions: im::OrdMap, /// Each expression is tracked as to how many aliases it /// may have. If there is only 1, we can attempt to optimize /// out any known loads to that alias. Note that "alias" here /// includes the original reference as well. - pub(super) aliases: im::OrdMap, AliasSet>, + pub(super) aliases: im::OrdMap, /// Each allocate instruction result (and some reference block parameters) /// will map to a Reference value which tracks whether the last value stored /// to the reference is known. - pub(super) references: im::OrdMap, + pub(super) references: im::OrdMap, /// The last instance of a `Store` instruction to each address in this block pub(super) last_stores: im::OrdMap, @@ -40,10 +40,10 @@ pub(super) struct Block { /// into the aliases map since otherwise two dereferences of the /// same address will be given different ValueIds. #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub(super) enum Expression { - Dereference(Box>), - ArrayElement(Box>), - Other(ValueId), +pub(super) enum Expression { + Dereference(Box), + ArrayElement(Box), + Other(ValueId), } /// Every reference's value is either Known and can be optimized away, or Unknown. @@ -66,12 +66,13 @@ impl ReferenceValue { impl Block { /// If the given reference id points to a known value, return the value pub(super) fn get_known_value(&self, address: ResolvedValueId) -> Option { - if let Some(expression) = self.expressions.get(&address) { + if let Some(expression) = self.expressions.get(&address.raw()) { if let Some(aliases) = self.aliases.get(expression) { // We could allow multiple aliases if we check that the reference // value in each is equal. if let Some(alias) = aliases.single_alias() { - if let Some(ReferenceValue::Known(value)) = self.references.get(&alias) { + if let Some(ReferenceValue::Known(value)) = self.references.get(alias.as_ref()) + { return Some(*value); } } @@ -90,7 +91,8 @@ impl Block { } fn set_value(&mut self, address: ResolvedValueId, value: ReferenceValue) { - let expression = self.expressions.entry(address).or_insert(Expression::Other(address)); + let expression = + self.expressions.entry(address.raw()).or_insert(Expression::Other(address)); let aliases = self.aliases.entry(expression.clone()).or_default(); if aliases.is_unknown() { @@ -98,12 +100,12 @@ impl Block { // Now we have to invalidate every reference we know of self.invalidate_all_references(); } else if let Some(alias) = aliases.single_alias() { - self.references.insert(alias, value); + self.references.insert(alias.raw(), value); } else { // More than one alias. We're not sure which it refers to so we have to // conservatively invalidate all references it may refer to. aliases.for_each(|alias| { - if let Some(reference_value) = self.references.get_mut(&alias) { + if let Some(reference_value) = self.references.get_mut(&alias.raw()) { *reference_value = ReferenceValue::Unknown; } }); @@ -150,15 +152,15 @@ impl Block { pub(super) fn remember_dereference( &mut self, function: &Function, - address: ValueId, + address: ResolvedValueId, result: ValueId, ) { if function.dfg.value_is_reference(result) { if let Some(known_address) = self.get_known_value(address) { - self.expressions.insert(result, Expression::Other(known_address)); + self.expressions.insert(result.raw(), Expression::Other(known_address)); } else { let expression = Expression::Dereference(Box::new(Expression::Other(address))); - self.expressions.insert(result, expression); + self.expressions.insert(result.raw(), expression); // No known aliases to insert for this expression... can we find an alias // even if we don't have a known address? If not we'll have to invalidate all // known references if this reference is ever stored to. @@ -169,10 +171,10 @@ impl Block { /// Iterate through each known alias of the given address and apply the function `f` to each. fn for_each_alias_of( &mut self, - address: ValueId, + address: ResolvedValueId, mut f: impl FnMut(&mut Self, ValueId) -> T, ) { - if let Some(expr) = self.expressions.get(&address) { + if let Some(expr) = self.expressions.get(&address.raw()) { if let Some(aliases) = self.aliases.get(expr).cloned() { aliases.for_each(|alias| { f(self, alias); @@ -181,21 +183,20 @@ impl Block { } } - fn keep_last_stores_for(&mut self, address: ValueId, function: &Function) { - let address = function.dfg.resolve(address); + fn keep_last_stores_for(&mut self, address: ResolvedValueId, function: &Function) { self.keep_last_store(address, function); - self.for_each_alias_of(address, |t, alias| t.keep_last_store(alias, function)); + self.for_each_alias_of(address, |t, alias| { + t.keep_last_store(function.dfg.resolve(alias), function) + }); } - fn keep_last_store(&mut self, address: ValueId, function: &Function) { - let address = function.dfg.resolve(address); - + fn keep_last_store(&mut self, address: ResolvedValueId, function: &Function) { if let Some(instruction) = self.last_stores.remove(&address) { // Whenever we decide we want to keep a store instruction, we also need // to go through its stored value and mark that used as well. match &function.dfg[instruction] { Instruction::Store { value, .. } => { - self.mark_value_used(*value, function); + self.mark_value_used(function.dfg.resolve(*value), function); } other => { unreachable!("last_store held an id of a non-store instruction: {other:?}") @@ -204,14 +205,14 @@ impl Block { } } - pub(super) fn mark_value_used(&mut self, value: ValueId, function: &Function) { + pub(super) fn mark_value_used(&mut self, value: ResolvedValueId, function: &Function) { self.keep_last_stores_for(value, function); // We must do a recursive check for arrays since they're the only Values which may contain // other ValueIds. - if let Some((array, _)) = function.dfg.get_array_constant(value) { + if let Some((array, _)) = function.dfg.get_array_constant(value.into()) { for value in array { - self.mark_value_used(value, function); + self.mark_value_used(function.dfg.resolve(value), function); } } } @@ -229,7 +230,7 @@ impl Block { } pub(super) fn get_aliases_for_value(&self, value: ValueId) -> Cow { - if let Some(expression) = self.expressions.get(&value) { + if let Some(expression) = self.expressions.get(value.as_ref()) { if let Some(aliases) = self.aliases.get(expression) { return Cow::Borrowed(aliases); } From 998381cdb60b60d8bade9b0443ebc993fbeb6a54 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 00:20:54 +0000 Subject: [PATCH 05/19] Resolution --- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 29 +++++++++++++------ .../src/ssa/ir/instruction/cast.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 20 +++++++++++-- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 6 ++-- .../src/ssa/opt/mem2reg/block.rs | 2 +- .../noirc_evaluator/src/ssa/opt/unrolling.rs | 2 +- 6 files changed, 44 insertions(+), 17 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index e15e7662b11..bb2156085ce 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -10,7 +10,7 @@ use super::{ }, map::DenseMap, types::Type, - value::{RawValueId, ResolvedValueId, Value, ValueId}, + value::{RawValueId, Resolution, ResolvedValueId, Value, ValueId}, }; use acvm::{acir::AcirField, FieldElement}; @@ -352,8 +352,13 @@ impl DataFlowGraph { } /// Resolve and get a value by ID - fn resolve_value(&self, original_value_id: ValueId) -> Value { - self.values[self.resolve(original_value_id).raw()] + fn resolve_value(&self, original_value_id: ValueId) -> Value { + let id = if R::is_resolved() { + original_value_id.raw() + } else { + self.resolve(original_value_id.unresolved()).raw() + }; + self.values[id] } /// Returns the type of a given value @@ -365,7 +370,7 @@ impl DataFlowGraph { /// /// Should `value` be a numeric constant then this function will return the exact number of bits required, /// otherwise it will return the minimum number of bits based on type information. - pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { + pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { match self.get_value(value) { Value::Instruction { instruction, .. } => { if let Instruction::Cast(original_value, _) = self[instruction] { @@ -382,7 +387,7 @@ impl DataFlowGraph { /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. - pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { + pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { matches!(self.get_value(value).get_type(), Type::Reference(_)) } @@ -450,15 +455,18 @@ impl DataFlowGraph { /// Returns the field element represented by this value if it is a numeric constant. /// Returns None if the given value is not a numeric constant. - pub(crate) fn get_numeric_constant(&self, value: ValueId) -> Option { + pub(crate) fn get_numeric_constant( + &self, + value: ValueId, + ) -> Option { self.get_numeric_constant_with_type(value).map(|(value, _typ)| value) } /// Returns the field element and type represented by this value if it is a numeric constant. /// Returns None if the given value is not a numeric constant. - pub(crate) fn get_numeric_constant_with_type( + pub(crate) fn get_numeric_constant_with_type( &self, - value: ValueId, + value: ValueId, ) -> Option<(FieldElement, Type)> { match &self.resolve_value(value) { Value::NumericConstant { constant, typ } => Some((*constant, typ.clone())), @@ -468,7 +476,10 @@ impl DataFlowGraph { /// Returns the Value::Array associated with this ValueId if it refers to an array constant. /// Otherwise, this returns None. - pub(crate) fn get_array_constant(&self, value: ValueId) -> Option<(im::Vector, Type)> { + pub(crate) fn get_array_constant( + &self, + value: ValueId, + ) -> Option<(im::Vector, Type)> { match &self.resolve_value(value) { // Arrays are shared, so cloning them is cheap Value::Array { array, typ } => Some((array.clone(), typ.clone())), diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs index 7bcc0783e54..e4a363abe60 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs @@ -19,7 +19,7 @@ pub(super) fn simplify_cast( } } - if let Some(constant) = dfg.get_numeric_constant(value.into()) { + if let Some(constant) = dfg.get_numeric_constant(value) { let src_typ = dfg.type_of_value(value); match (src_typ, dst_typ) { (Type::Numeric(NumericType::NativeField), Type::Numeric(NumericType::NativeField)) => { diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index e391388bb38..32f8e99a8ed 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -13,10 +13,26 @@ use super::{ }; #[derive(Debug, Clone, Serialize, Deserialize)] -pub(crate) struct Unresolved; +pub(crate) enum Unresolved {} #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] -pub(crate) struct Resolved; +pub(crate) enum Resolved {} + +pub(crate) trait Resolution { + fn is_resolved() -> bool; +} + +impl Resolution for Resolved { + fn is_resolved() -> bool { + true + } +} + +impl Resolution for Unresolved { + fn is_resolved() -> bool { + false + } +} /// A resolved value ID is something we can directly compare. pub(crate) type ResolvedValueId = ValueId; diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index be434e488e4..4fd480504f7 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -435,7 +435,7 @@ impl<'f> PerFunctionContext<'f> { self.instructions_to_remove.insert(*last_store); } - if self.inserter.function.dfg.value_is_reference(value.into()) { + if self.inserter.function.dfg.value_is_reference(value) { if let Some(expression) = references.expressions.get(&value.raw()) { if let Some(aliases) = references.aliases.get(expression) { aliases.for_each(|alias| { @@ -484,7 +484,7 @@ impl<'f> PerFunctionContext<'f> { { aliases.clone() } else if let Some((elements, _)) = - self.inserter.function.dfg.get_array_constant(array.into()) + self.inserter.function.dfg.get_array_constant(array) { let aliases = references.collect_all_aliases(elements); self.set_aliases(references, array, aliases.clone()); @@ -520,7 +520,7 @@ impl<'f> PerFunctionContext<'f> { /// If `array` is an array constant that contains reference types, then insert each element /// as a potential alias to the array itself. fn check_array_aliasing(&self, references: &mut Block, array: ResolvedValueId) { - if let Some((elements, typ)) = self.inserter.function.dfg.get_array_constant(array.into()) { + if let Some((elements, typ)) = self.inserter.function.dfg.get_array_constant(array) { if Self::contains_references(&typ) { // TODO: Check if type directly holds references or holds arrays that hold references let expr = Expression::ArrayElement(Box::new(Expression::Other(array))); diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index df3b069178c..46ac59beb00 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -210,7 +210,7 @@ impl Block { // We must do a recursive check for arrays since they're the only Values which may contain // other ValueIds. - if let Some((array, _)) = function.dfg.get_array_constant(value.into()) { + if let Some((array, _)) = function.dfg.get_array_constant(value) { for value in array { self.mark_value_used(function.dfg.resolve(value), function); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index d23118b1af9..c695a21c5a4 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -431,7 +431,7 @@ impl<'f> LoopIteration<'f> { ) -> Vec { let condition = self.inserter.resolve(condition); - match self.dfg().get_numeric_constant(condition.into()) { + match self.dfg().get_numeric_constant(condition) { Some(constant) => { let destination = if constant.is_zero() { else_destination } else { then_destination }; From 6cbbd53ec5f0aea198651e0c550b7b3897579fd8 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 01:04:35 +0000 Subject: [PATCH 06/19] Fix remaining issues --- .../src/brillig/brillig_gen/brillig_block.rs | 42 +++++++++++-------- .../brillig_gen/brillig_block_variables.rs | 30 ++++++------- .../src/brillig/brillig_gen/brillig_fn.rs | 4 +- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 10 ++--- .../src/ssa/ir/function_inserter.rs | 3 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 10 ++--- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 17 ++++---- .../src/ssa/opt/remove_enable_side_effects.rs | 2 +- 8 files changed, 63 insertions(+), 55 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 0e9ebd8900d..1f4609fc9c3 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -9,6 +9,7 @@ use crate::brillig::brillig_ir::{ }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::instruction::ConstrainError; +use crate::ssa::ir::value::ResolvedValueId; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -41,7 +42,7 @@ pub(crate) struct BrilligBlock<'block> { /// Tracks the available variable during the codegen of the block pub(crate) variables: BlockVariables, /// For each instruction, the set of values that are not used anymore after it. - pub(crate) last_uses: HashMap>, + pub(crate) last_uses: HashMap>, } impl<'block> BrilligBlock<'block> { @@ -149,8 +150,8 @@ impl<'block> BrilligBlock<'block> { let target_block = &dfg[*destination_block]; for (src, dest) in arguments.iter().zip(target_block.parameters()) { // Destinations are block parameters so they should have been allocated previously. - let destination = - self.variables.get_allocation(self.function_context, *dest, dfg); + let dest = dfg.resolve(*dest); + let destination = self.variables.get_allocation(self.function_context, dest); let source = self.convert_ssa_value(*src, dfg); self.brillig_context .mov_instruction(destination.extract_register(), source.extract_register()); @@ -772,7 +773,7 @@ impl<'block> BrilligBlock<'block> { for dead_variable in dead_variables { self.variables.remove_variable( - dead_variable, + dead_variable.into(), self.function_context, self.brillig_context, ); @@ -1479,19 +1480,18 @@ impl<'block> BrilligBlock<'block> { Value::Param { .. } | Value::Instruction { .. } => { // All block parameters and instruction results should have already been // converted to registers so we fetch from the cache. - - self.variables.get_allocation(self.function_context, value_id, dfg) + self.variables.get_allocation(self.function_context, value_id) } Value::NumericConstant { constant, .. } => { // Constants might have been converted previously or not, so we get or create and // (re)initialize the value inside. - if self.variables.is_allocated(&value_id) { - self.variables.get_allocation(self.function_context, value_id, dfg) + if self.variables.is_allocated(value_id) { + self.variables.get_allocation(self.function_context, value_id) } else { let new_variable = self.variables.define_variable( self.function_context, self.brillig_context, - value_id, + value_id.into(), dfg, ); @@ -1501,13 +1501,13 @@ impl<'block> BrilligBlock<'block> { } } Value::Array { array, typ } => { - if self.variables.is_allocated(&value_id) { - self.variables.get_allocation(self.function_context, value_id, dfg) + if self.variables.is_allocated(value_id) { + self.variables.get_allocation(self.function_context, value_id) } else { let new_variable = self.variables.define_variable( self.function_context, self.brillig_context, - value_id, + value_id.into(), dfg, ); @@ -1548,13 +1548,13 @@ impl<'block> BrilligBlock<'block> { let new_variable = self.variables.define_variable( self.function_context, self.brillig_context, - value_id, + value_id.into(), dfg, ); self.brillig_context.const_instruction( new_variable.extract_single_addr(), - value_id.to_usize().into(), + value_id.raw().to_usize().into(), ); new_variable } @@ -1577,11 +1577,13 @@ impl<'block> BrilligBlock<'block> { let item_types = typ.clone().element_types(); // Find out if we are repeating the same item over and over - let first_item = data.iter().take(item_types.len()).copied().collect(); + let first_item: Vec = + data.iter().take(item_types.len()).map(|v| v.resolved()).collect(); let mut is_repeating = true; for item_index in (item_types.len()..data.len()).step_by(item_types.len()) { - let item: Vec<_> = (0..item_types.len()).map(|i| data[item_index + i]).collect(); + let item: Vec<_> = + (0..item_types.len()).map(|i| data[item_index + i].resolved()).collect(); if first_item != item { is_repeating = false; break; @@ -1597,7 +1599,11 @@ impl<'block> BrilligBlock<'block> { && item_types.iter().all(|typ| matches!(typ, Type::Numeric(_))) { self.initialize_constant_array_runtime( - item_types, first_item, item_count, pointer, dfg, + item_types, + vecmap(first_item, |v| v.into()), + item_count, + pointer, + dfg, ); } else { self.initialize_constant_array_comptime(data, dfg, pointer); @@ -1688,7 +1694,7 @@ impl<'block> BrilligBlock<'block> { fn initialize_constant_array_comptime( &mut self, - data: &im::Vector>, + data: &im::Vector, dfg: &DataFlowGraph, pointer: MemoryAddress, ) { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 393d4c967c2..46425ad429b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -13,7 +13,7 @@ use crate::{ ssa::ir::{ dfg::DataFlowGraph, types::{CompositeType, Type}, - value::ValueId, + value::{RawValueId, ResolvedValueId, ValueId}, }, }; @@ -21,13 +21,13 @@ use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] pub(crate) struct BlockVariables { - available_variables: HashSet, + available_variables: HashSet, } impl BlockVariables { /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) - pub(crate) fn new(live_in: HashSet) -> Self { - BlockVariables { available_variables: live_in } + pub(crate) fn new(live_in: HashSet) -> Self { + BlockVariables { available_variables: live_in.iter().map(|v| v.raw()).collect() } } /// Returns all variables that have not been removed at this point. @@ -57,6 +57,7 @@ impl BlockVariables { ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let variable = allocate_value(value_id, brillig_context, dfg); + let value_id = value_id.raw(); if function_context.ssa_value_allocations.insert(value_id, variable).is_some() { unreachable!("ICE: ValueId {value_id:?} was already in cache"); @@ -82,32 +83,33 @@ impl BlockVariables { /// Removes a variable so it's not used anymore within this block. pub(crate) fn remove_variable( &mut self, - value_id: &ValueId, + value_id: ValueId, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, ) { - assert!(self.available_variables.remove(value_id), "ICE: Variable is not available"); + assert!( + self.available_variables.remove(value_id.as_ref()), + "ICE: Variable is not available" + ); let variable = function_context .ssa_value_allocations - .get(value_id) + .get(value_id.as_ref()) .expect("ICE: Variable allocation not found"); brillig_context.deallocate_register(variable.extract_register()); } /// Checks if a variable is allocated. - pub(crate) fn is_allocated(&self, value_id: &ValueId) -> bool { - self.available_variables.contains(value_id) + pub(crate) fn is_allocated(&self, value_id: ResolvedValueId) -> bool { + self.available_variables.contains(&value_id.raw()) } /// For a given SSA value id, return the corresponding cached allocation. pub(crate) fn get_allocation( &mut self, function_context: &FunctionContext, - value_id: ValueId, - dfg: &DataFlowGraph, + value_id: ResolvedValueId, ) -> BrilligVariable { - let value_id = dfg.resolve(value_id); - + let value_id = value_id.raw(); assert!( self.available_variables.contains(&value_id), "ICE: ValueId {value_id:?} is not available" @@ -127,7 +129,7 @@ pub(crate) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) /// For a given value_id, allocates the necessary registers to hold it. pub(crate) fn allocate_value( - value_id: ValueId, + value_id: ResolvedValueId, brillig_context: &mut BrilligContext, dfg: &DataFlowGraph, ) -> BrilligVariable { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 2779be103cd..e16e3787fee 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -10,7 +10,7 @@ use crate::{ function::{Function, FunctionId}, post_order::PostOrder, types::Type, - value::ValueId, + value::RawValueId, }, }; use fxhash::FxHashMap as HashMap; @@ -20,7 +20,7 @@ use super::{constant_allocation::ConstantAllocation, variable_liveness::Variable pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(crate) ssa_value_allocations: HashMap, + pub(crate) ssa_value_allocations: HashMap, /// The block ids of the function in reverse post order. pub(crate) blocks: Vec, /// Liveness information for each variable in the function. diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index bb2156085ce..15535b71769 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -347,18 +347,18 @@ impl DataFlowGraph { } /// Look up a value by ID. - fn get_value(&self, value: ValueId) -> Value { - self.values[value.raw()] + fn get_value(&self, value: ValueId) -> &Value { + &self.values[value.raw()] } /// Resolve and get a value by ID - fn resolve_value(&self, original_value_id: ValueId) -> Value { + fn resolve_value(&self, original_value_id: ValueId) -> &Value { let id = if R::is_resolved() { original_value_id.raw() } else { self.resolve(original_value_id.unresolved()).raw() }; - self.values[id] + &self.values[id] } /// Returns the type of a given value @@ -373,7 +373,7 @@ impl DataFlowGraph { pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { match self.get_value(value) { Value::Instruction { instruction, .. } => { - if let Instruction::Cast(original_value, _) = self[instruction] { + if let Instruction::Cast(original_value, _) = self[*instruction] { self.type_of_value(original_value).bit_size() } else { self.type_of_value(value).bit_size() diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index 3e507097901..d81878c4ceb 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -39,8 +39,9 @@ impl<'f> FunctionInserter<'f> { None => match &self.function.dfg[value] { super::value::Value::Array { array, typ } => { let typ = typ.clone(); + let array = array.clone(); let new_array: im::Vector = - array.into_iter().map(|id| self.resolve(*id)).collect(); + array.iter().map(|id| self.resolve(*id)).collect(); let array_and_typ = (new_array, typ); if let Some(fetched_value) = self.const_arrays.get(&array_and_typ) { diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 32f8e99a8ed..d169656ed21 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -52,17 +52,17 @@ pub(crate) struct ValueId { } impl ValueId { - pub fn new(id: Id>) -> Self { + pub(crate) fn new(id: Id>) -> Self { Self { id, _marker: PhantomData } } /// Access the underlying raw ID for indexing into data structures. - pub fn raw(&self) -> RawValueId { + pub(crate) fn raw(&self) -> RawValueId { Id::new(self.id.to_usize()) } /// Demote an ID into an unresolved one. - pub fn unresolved(self) -> ValueId { + pub(crate) fn unresolved(self) -> ValueId { ValueId::new(Id::new(self.id.to_usize())) } } @@ -70,11 +70,11 @@ impl ValueId { impl ValueId { /// Be careful when using this comparison. /// Sure the IDs don't have to be resolved first? - pub fn unresolved_eq(&self, other: &Self) -> bool { + pub(crate) fn unresolved_eq(&self, other: &Self) -> bool { self.id == other.id } /// Promote an unresolved ID into a resolved one. - pub fn resolved(self) -> ValueId { + pub(crate) fn resolved(self) -> ValueId { ValueId::new(Id::new(self.id.to_usize())) } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 4fd480504f7..f1765bb72a8 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -455,7 +455,7 @@ impl<'f> PerFunctionContext<'f> { // Register the new reference let result = self.inserter.function.dfg.instruction_results(instruction)[0]; let expr = Expression::Other(result.resolved()); - references.expressions.insert(result.raw(), expr); + references.expressions.insert(result.raw(), expr.clone()); references.aliases.insert(expr, AliasSet::known(result)); } Instruction::ArrayGet { array, .. } => { @@ -628,9 +628,14 @@ mod tests { instruction::{BinaryOp, Instruction, Intrinsic, TerminatorInstruction}, map::Id, types::Type, + value::{Resolved, Value}, }, }; + fn resolved_value(value: &Value) -> Value { + value.clone().map_values(|v| v.resolved()) + } + #[test] fn test_simple() { // fn func() { @@ -669,10 +674,7 @@ mod tests { TerminatorInstruction::Return { return_values, .. } => return_values.first().unwrap(), _ => unreachable!(), }; - assert_eq!( - func.dfg[*ret_val_id].map_values(|v| v.resolved()), - func.dfg[two].map_values(|v| v.resolved()) - ); + assert_eq!(resolved_value(&func.dfg[*ret_val_id]), resolved_value(&func.dfg[two])); } #[test] @@ -708,10 +710,7 @@ mod tests { TerminatorInstruction::Return { return_values, .. } => return_values.first().unwrap(), _ => unreachable!(), }; - assert_eq!( - func.dfg[*ret_val_id].map_values(|v| v.resolved()), - func.dfg[one].map_values(|v| v.resolved()) - ); + assert_eq!(resolved_value(&func.dfg[*ret_val_id]), resolved_value(&func.dfg[one])); } #[test] diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index cbee2cce7ad..d1c29204cc1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -272,7 +272,7 @@ mod test { assert_eq!(instructions.len(), 4); let expected = Instruction::binary(BinaryOp::Mul, v0.resolved(), two.resolved()); for instruction in instructions.iter().take(4) { - let instruction = main.dfg[*instruction]; + let instruction = &main.dfg[*instruction]; let instruction = instruction.map_values(|v| v.resolved()); assert_eq!(instruction, expected); } From 66944475b55217eecfe58006a9c4e154e78e11bd Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 01:28:23 +0000 Subject: [PATCH 07/19] Fix clippy --- .../src/ssa/function_builder/data_bus.rs | 6 +++--- compiler/noirc_evaluator/src/ssa/ir/value.rs | 14 +++++++------- compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs | 4 ++-- .../noirc_evaluator/src/ssa/opt/mem2reg/block.rs | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index e7887f8501a..8026b6695d6 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -84,17 +84,17 @@ impl DataBus { call_data_map.insert(f(k.into()).raw(), *v); } CallData { - array_id: f(cd.array_id).into(), + array_id: f(cd.array_id), index_map: call_data_map, call_data_id: cd.call_data_id, } }) .collect(); - DataBus { call_data, return_data: self.return_data.map(|rd| f(rd)) } + DataBus { call_data, return_data: self.return_data.map(f) } } pub(crate) fn call_data_array(&self) -> Vec<(u32, ValueId)> { - self.call_data.iter().map(|cd| (cd.call_data_id, cd.array_id.into())).collect() + self.call_data.iter().map(|cd| (cd.call_data_id, cd.array_id)).collect() } /// Construct a databus from call_data and return_data data bus builders pub(crate) fn get_data_bus( diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index d169656ed21..cac9f103db9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -109,16 +109,16 @@ impl AsRef>> for ValueId { } /// Demote a resolved ID into an unresolved one. -impl Into> for ValueId { - fn into(self) -> ValueId { - ValueId::new(self.raw()) +impl From> for ValueId { + fn from(value: ValueId) -> Self { + value.unresolved() } } /// Demote any ID into an unresolved one. -impl Into> for &ValueId { - fn into(self) -> ValueId { - ValueId::new(self.raw()) +impl From<&ValueId> for ValueId { + fn from(value: &ValueId) -> Self { + value.unresolved() } } @@ -200,7 +200,7 @@ impl Value { Value::Param { block, position, typ } => Value::Param { block, position, typ }, Value::NumericConstant { constant, typ } => Value::NumericConstant { constant, typ }, Value::Array { array, typ } => { - Value::Array { array: array.into_iter().map(|v| f(v)).collect(), typ } + Value::Array { array: array.into_iter().map(f).collect(), typ } } Value::Function(id) => Value::Function(id), Value::Intrinsic(intrinsic) => Value::Intrinsic(intrinsic), diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index f1765bb72a8..a36f0718bda 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -418,7 +418,7 @@ impl<'f> PerFunctionContext<'f> { self.inserter.map_value(result, value.into()); self.instructions_to_remove.insert(instruction); } else { - references.mark_value_used(address.into(), self.inserter.function); + references.mark_value_used(address, self.inserter.function); self.last_loads.insert(address.raw(), (instruction, block_id)); } @@ -557,7 +557,7 @@ impl<'f> PerFunctionContext<'f> { if self.inserter.function.dfg.value_is_reference(*value) { let value = self.inserter.function.dfg.resolve(*value); references.set_unknown(value); - references.mark_value_used(value.into(), self.inserter.function); + references.mark_value_used(value, self.inserter.function); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 46ac59beb00..65adce9da19 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -186,7 +186,7 @@ impl Block { fn keep_last_stores_for(&mut self, address: ResolvedValueId, function: &Function) { self.keep_last_store(address, function); self.for_each_alias_of(address, |t, alias| { - t.keep_last_store(function.dfg.resolve(alias), function) + t.keep_last_store(function.dfg.resolve(alias), function); }); } From b40d43e863dc95bb780c3ba916c9baaceee6805e Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 20:32:08 +0000 Subject: [PATCH 08/19] Fix set_value_from_id --- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 15535b71769..65157b4e46f 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -222,7 +222,7 @@ impl DataFlowGraph { /// values since other instructions referring to the same ValueId need /// not be modified to refer to a new ValueId. pub(crate) fn set_value_from_id(&mut self, value_to_replace: ValueId, new_value: ValueId) { - if value_to_replace.unresolved_eq(&new_value) { + if !value_to_replace.unresolved_eq(&new_value) { self.replaced_value_ids.insert(value_to_replace.raw(), self.resolve(new_value).into()); let new_value = self.values[new_value.raw()].clone(); self.values[value_to_replace.raw()] = new_value; From c370239212ea96fc32f1d487698e3105a570a2f7 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Sat, 9 Nov 2024 21:02:19 +0000 Subject: [PATCH 09/19] Use if let Some --- compiler/noirc_evaluator/src/ssa/opt/die.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/die.rs b/compiler/noirc_evaluator/src/ssa/opt/die.rs index 201fac48c83..0dbb7b43bc9 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -177,11 +177,10 @@ impl Context { results.iter().all(|result| !self.used_values.contains(&result.raw())) } else if let Instruction::Call { func, arguments } = instruction { // TODO: make this more general for instructions which don't have results but have side effects "sometimes" like `Intrinsic::AsWitness` - match function.dfg.get_intrinsic(Intrinsic::AsWitness) { - Some(as_witness_id) if as_witness_id.unresolved_eq(func) => { - !self.used_values.contains(&arguments[0].raw()) - } - _ => false, + if let Some(as_witness_id) = function.dfg.get_intrinsic(Intrinsic::AsWitness) { + as_witness_id.unresolved_eq(func) && !self.used_values.contains(&arguments[0].raw()) + } else { + false } } else { // If the instruction has side effects we should never remove it. From e4b3580fd0e20bde2ac2b91780a21062894f078a Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 12 Nov 2024 11:45:53 +0000 Subject: [PATCH 10/19] Add back resolve in acir_gen --- compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 88eddcba98a..f3baa120249 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -1045,6 +1045,7 @@ impl<'a> Context<'a> { } }; // Ensure that array id is fully resolved. + let array = dfg.resolve(array); let array_typ = dfg.type_of_value(array); // Compiler sanity checks assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); @@ -1052,7 +1053,7 @@ impl<'a> Context<'a> { unreachable!("ICE: expected array or slice type"); }; - if self.handle_constant_index_wrapper(instruction, dfg, array, index, store_value)? { + if self.handle_constant_index_wrapper(instruction, dfg, array.into(), index, store_value)? { return Ok(()); } @@ -1076,7 +1077,7 @@ impl<'a> Context<'a> { None }; let (new_index, new_value) = self.convert_array_operation_inputs( - array, + array.into(), dfg, index, store_value, @@ -1086,7 +1087,7 @@ impl<'a> Context<'a> { if let Some(new_value) = new_value { self.array_set(instruction, new_index, new_value, dfg, mutable_array_set)?; } else { - self.array_get(instruction, array, new_index, dfg, offset.is_none())?; + self.array_get(instruction, array.into(), new_index, dfg, offset.is_none())?; } Ok(()) @@ -1100,6 +1101,7 @@ impl<'a> Context<'a> { index: ValueId, store_value: Option, ) -> Result { + let array = dfg.resolve(array); let array_typ = dfg.type_of_value(array); // Compiler sanity checks assert!(!array_typ.is_nested_slice(), "ICE: Nested slice type has reached ACIR generation"); @@ -1107,7 +1109,7 @@ impl<'a> Context<'a> { unreachable!("ICE: expected array or slice type"); }; - match self.convert_value(array, dfg) { + match self.convert_value(array.into(), dfg) { AcirValue::Var(acir_var, _) => { Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), From d63f74d1c343e98fe99d576ba0ea1c582f7b9d2c Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 12 Nov 2024 11:51:31 +0000 Subject: [PATCH 11/19] Use ResolvedValueId in Brillig gen --- .../src/brillig/brillig_gen/brillig_block.rs | 2 +- .../brillig_gen/brillig_block_variables.rs | 21 ++++++++----------- .../src/brillig/brillig_gen/brillig_fn.rs | 4 ++-- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 1f4609fc9c3..6fc753e027d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -773,7 +773,7 @@ impl<'block> BrilligBlock<'block> { for dead_variable in dead_variables { self.variables.remove_variable( - dead_variable.into(), + *dead_variable, self.function_context, self.brillig_context, ); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 46425ad429b..4c102ae82ef 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -13,7 +13,7 @@ use crate::{ ssa::ir::{ dfg::DataFlowGraph, types::{CompositeType, Type}, - value::{RawValueId, ResolvedValueId, ValueId}, + value::{ResolvedValueId, ValueId}, }, }; @@ -21,13 +21,15 @@ use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] pub(crate) struct BlockVariables { - available_variables: HashSet, + // Since we're generating Brillig bytecode here, there shouldn't be any more value ID replacements + // and we can use the final resolved ID. + available_variables: HashSet, } impl BlockVariables { /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) pub(crate) fn new(live_in: HashSet) -> Self { - BlockVariables { available_variables: live_in.iter().map(|v| v.raw()).collect() } + BlockVariables { available_variables: live_in } } /// Returns all variables that have not been removed at this point. @@ -57,7 +59,6 @@ impl BlockVariables { ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let variable = allocate_value(value_id, brillig_context, dfg); - let value_id = value_id.raw(); if function_context.ssa_value_allocations.insert(value_id, variable).is_some() { unreachable!("ICE: ValueId {value_id:?} was already in cache"); @@ -83,24 +84,21 @@ impl BlockVariables { /// Removes a variable so it's not used anymore within this block. pub(crate) fn remove_variable( &mut self, - value_id: ValueId, + value_id: ResolvedValueId, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, ) { - assert!( - self.available_variables.remove(value_id.as_ref()), - "ICE: Variable is not available" - ); + assert!(self.available_variables.remove(&value_id), "ICE: Variable is not available"); let variable = function_context .ssa_value_allocations - .get(value_id.as_ref()) + .get(&value_id) .expect("ICE: Variable allocation not found"); brillig_context.deallocate_register(variable.extract_register()); } /// Checks if a variable is allocated. pub(crate) fn is_allocated(&self, value_id: ResolvedValueId) -> bool { - self.available_variables.contains(&value_id.raw()) + self.available_variables.contains(&value_id) } /// For a given SSA value id, return the corresponding cached allocation. @@ -109,7 +107,6 @@ impl BlockVariables { function_context: &FunctionContext, value_id: ResolvedValueId, ) -> BrilligVariable { - let value_id = value_id.raw(); assert!( self.available_variables.contains(&value_id), "ICE: ValueId {value_id:?} is not available" diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index e16e3787fee..2f1a641d4bf 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -10,7 +10,7 @@ use crate::{ function::{Function, FunctionId}, post_order::PostOrder, types::Type, - value::RawValueId, + value::ResolvedValueId, }, }; use fxhash::FxHashMap as HashMap; @@ -20,7 +20,7 @@ use super::{constant_allocation::ConstantAllocation, variable_liveness::Variable pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(crate) ssa_value_allocations: HashMap, + pub(crate) ssa_value_allocations: HashMap, /// The block ids of the function in reverse post order. pub(crate) blocks: Vec, /// Liveness information for each variable in the function. From 61a7b60838e2d5fa9be0e1634eb6692e35582002 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 12 Nov 2024 11:58:47 +0000 Subject: [PATCH 12/19] Use ResolvedValueId in ConstantAllocation --- .../src/brillig/brillig_gen/constant_allocation.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index d68675d98cf..96af9daa9f8 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -11,7 +11,7 @@ use crate::ssa::ir::{ function::Function, instruction::InstructionId, post_order::PostOrder, - value::{RawValueId, ResolvedValueId, Value, ValueId}, + value::{ResolvedValueId, Value, ValueId}, }; use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; @@ -23,7 +23,7 @@ pub(crate) enum InstructionLocation { } pub(crate) struct ConstantAllocation { - constant_usage: HashMap>>, + constant_usage: HashMap>>, allocation_points: HashMap>>, dominator_tree: DominatorTree, blocks_within_loops: HashSet, @@ -68,7 +68,7 @@ impl ConstantAllocation { |block_id: BasicBlockId, value_id: ResolvedValueId, location: InstructionLocation| { if is_constant_value(value_id, &func.dfg) { self.constant_usage - .entry(value_id.raw()) + .entry(value_id) .or_default() .entry(block_id) .or_default() From fa70f953fae1ad0f833d280cec79c92e1da2adc7 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 12 Nov 2024 12:18:09 +0000 Subject: [PATCH 13/19] Use ResolvedValueId in array_set last_use --- compiler/noirc_evaluator/src/ssa/opt/array_set.rs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 360029f3d32..9dabc7d3c5a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -7,7 +7,7 @@ use crate::ssa::{ function::{Function, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, types::Type::{Array, Slice}, - value::{RawValueId, ValueId}, + value::{RawValueId, ResolvedValueId, ValueId}, }, ssa_gen::Ssa, }; @@ -56,7 +56,7 @@ struct Context<'f> { dfg: &'f DataFlowGraph, function_parameters: Vec, is_brillig_runtime: bool, - array_to_last_use: HashMap, + array_to_last_use: HashMap, instructions_that_can_be_made_mutable: HashSet, // Mapping of an array that comes from a load and whether the address // it was loaded from is a reference parameter passed to the block. @@ -91,18 +91,14 @@ impl<'f> Context<'f> { Instruction::ArrayGet { array, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = - self.array_to_last_use.insert(array.raw(), *instruction_id) - { + if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { self.instructions_that_can_be_made_mutable.remove(&existing); } } Instruction::ArraySet { array, value, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = - self.array_to_last_use.insert(array.raw(), *instruction_id) - { + if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { self.instructions_that_can_be_made_mutable.remove(&existing); } if self.is_brillig_runtime { @@ -157,7 +153,7 @@ impl<'f> Context<'f> { let argument = self.dfg.resolve(*argument); if let Some(existing) = - self.array_to_last_use.insert(argument.raw(), *instruction_id) + self.array_to_last_use.insert(argument, *instruction_id) { self.instructions_that_can_be_made_mutable.remove(&existing); } From ed4117a569bef04d31516679c89f43dd066bfc9a Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Mon, 18 Nov 2024 20:42:04 +0000 Subject: [PATCH 14/19] More ResolvedValueId in brillig_gen --- .../src/brillig/brillig_gen/brillig_block.rs | 10 +++++++--- .../brillig/brillig_gen/constant_allocation.rs | 15 +++++++-------- .../src/brillig/brillig_gen/variable_liveness.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 15 +++++++-------- compiler/noirc_evaluator/src/ssa/ir/value.rs | 7 ------- 5 files changed, 22 insertions(+), 27 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 33164768a8b..7d4a1189c0a 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -9,7 +9,7 @@ use crate::brillig::brillig_ir::{ }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::instruction::ConstrainError; -use crate::ssa::ir::value::ResolvedValueId; +use crate::ssa::ir::value::{Resolution, ResolvedValueId}; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -1498,14 +1498,18 @@ impl<'block> BrilligBlock<'block> { } } - fn initialize_constants(&mut self, constants: &[ValueId], dfg: &DataFlowGraph) { + fn initialize_constants(&mut self, constants: &[ResolvedValueId], dfg: &DataFlowGraph) { for &constant_id in constants { self.convert_ssa_value(constant_id, dfg); } } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. - fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> BrilligVariable { + fn convert_ssa_value( + &mut self, + value_id: ValueId, + dfg: &DataFlowGraph, + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index 33bc4993b5d..028fa8a6890 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -11,7 +11,7 @@ use crate::ssa::ir::{ function::Function, instruction::InstructionId, post_order::PostOrder, - value::{ResolvedValueId, Value, ValueId}, + value::{ResolvedValueId, Value}, }; use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; @@ -24,7 +24,7 @@ pub(crate) enum InstructionLocation { pub(crate) struct ConstantAllocation { constant_usage: HashMap>>, - allocation_points: HashMap>>, + allocation_points: HashMap>>, dominator_tree: DominatorTree, blocks_within_loops: HashSet, } @@ -47,7 +47,7 @@ impl ConstantAllocation { instance } - pub(crate) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { + pub(crate) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { allocations.iter().flat_map(|(_, constants)| constants.iter()).copied().collect() }) @@ -57,7 +57,7 @@ impl ConstantAllocation { &self, block_id: BasicBlockId, location: InstructionLocation, - ) -> Vec { + ) -> Vec { self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { allocations.get(&location).map_or(Vec::default(), |constants| constants.clone()) }) @@ -100,9 +100,8 @@ impl ConstantAllocation { fn decide_allocation_points(&mut self, func: &Function) { for (constant_id, usage_in_blocks) in self.constant_usage.iter() { let block_ids: Vec<_> = usage_in_blocks.iter().map(|(block_id, _)| *block_id).collect(); - let constant_id = constant_id.into(); - let allocation_point = self.decide_allocation_point(constant_id, &block_ids, func); + let allocation_point = self.decide_allocation_point(*constant_id, &block_ids, func); // If the allocation point is one of the places where it's used, we take the first usage in the allocation point. // Otherwise, we allocate it at the terminator of the allocation point. @@ -121,13 +120,13 @@ impl ConstantAllocation { .or_default() .entry(location) .or_default() - .push(constant_id); + .push(*constant_id); } } fn decide_allocation_point( &self, - constant_id: ValueId, + constant_id: ResolvedValueId, blocks_where_is_used: &[BasicBlockId], func: &Function, ) -> BasicBlockId { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 35a44cc4493..478679087cb 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -213,7 +213,7 @@ impl VariableLiveness { ) { let mut defined = self.compute_defined_variables(block_id, &func.dfg); - defined.extend(constants.allocated_in_block(block_id).iter().map(|c| c.resolved())); + defined.extend(constants.allocated_in_block(block_id)); let block: &BasicBlock = &func.dfg[block_id]; diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index eb0ca38e35d..0767b4aaf33 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -248,10 +248,13 @@ impl DataFlowGraph { /// `ValueId`, this function will return the `ValueId` from which the substitution was taken. /// If `original_value_id`'s underlying `Value` has not been substituted, the same `ValueId` /// is returned. - pub(crate) fn resolve(&self, original_value_id: ValueId) -> ResolvedValueId { + pub(crate) fn resolve(&self, original_value_id: ValueId) -> ResolvedValueId { + if R::is_resolved() { + return ResolvedValueId::new(original_value_id.raw()); + } match self.replaced_value_ids.get(original_value_id.as_ref()) { Some(id) => self.resolve(*id), - None => original_value_id.resolved(), + None => ResolvedValueId::new(original_value_id.raw()), } } @@ -347,11 +350,7 @@ impl DataFlowGraph { /// Resolve and get a value by ID fn resolve_value(&self, original_value_id: ValueId) -> &Value { - let id = if R::is_resolved() { - original_value_id.raw() - } else { - self.resolve(original_value_id.unresolved()).raw() - }; + let id = self.resolve(original_value_id.unresolved()).raw(); &self.values[id] } @@ -544,7 +543,7 @@ impl DataFlowGraph { } /// True if the given ValueId refers to a (recursively) constant value - pub(crate) fn is_constant(&self, argument: ValueId) -> bool { + pub(crate) fn is_constant(&self, argument: ValueId) -> bool { match &self.resolve_value(argument) { Value::Param { .. } => false, Value::Instruction { instruction, .. } => match &self[*instruction] { diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index a4c1e5a89a9..a2f757b58d2 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -115,13 +115,6 @@ impl From> for ValueId { } } -/// Demote any ID into an unresolved one. -impl From<&ValueId> for ValueId { - fn from(value: &ValueId) -> Self { - value.unresolved() - } -} - impl From> for ValueId { fn from(value: Id) -> Self { ValueId::new(value) From 72ad1db158397e87077da32748d051c32775710c Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Mon, 18 Nov 2024 23:02:06 +0000 Subject: [PATCH 15/19] Remove Hash from Resolved, add module specific variants --- .../src/brillig/brillig_gen.rs | 19 ++++++- .../brillig/brillig_gen/brillig_black_box.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 25 ++++----- .../brillig_gen/brillig_block_variables.rs | 34 ++++++------- .../src/brillig/brillig_gen/brillig_fn.rs | 17 ++++--- .../brillig_gen/constant_allocation.rs | 37 +++++++------- .../brillig/brillig_gen/variable_liveness.rs | 40 +++++++-------- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 15 +++--- compiler/noirc_evaluator/src/ssa/ir/value.rs | 21 +++++--- .../noirc_evaluator/src/ssa/opt/array_set.rs | 26 ++++++++-- .../src/ssa/opt/constant_folding.rs | 24 +++++---- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 28 ++++++---- .../src/ssa/opt/mem2reg/block.rs | 51 +++++++++++++------ 13 files changed, 205 insertions(+), 134 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index 786a03031d6..30920fd300d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -13,7 +13,24 @@ use super::brillig_ir::{ artifact::{BrilligArtifact, Label}, BrilligContext, }; -use crate::ssa::ir::function::Function; +use crate::ssa::ir::{ + function::Function, + value::{IsResolved, ResolvedValueId, ValueId}, +}; + +/// Private resolution type, to limit the scope of storing them in data structures. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct FinalResolved {} + +impl IsResolved for FinalResolved {} + +type FinalValueId = ValueId; + +impl From for FinalValueId { + fn from(value: ResolvedValueId) -> Self { + ValueId::new(value.raw()) + } +} /// Converting an SSA function into Brillig bytecode. pub(crate) fn convert_ssa_function( diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 3685c9540f3..ba6c75bc38b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -14,7 +14,7 @@ use crate::brillig::brillig_ir::{ /// Transforms SSA's black box function calls into the corresponding brillig instructions /// Extracting arguments and results from the SSA function call /// And making any necessary type conversions to adapt noir's blackbox calls to brillig's -pub(crate) fn convert_black_box_call( +pub(super) fn convert_black_box_call( brillig_context: &mut BrilligContext, bb_func: &BlackBoxFunc, function_arguments: &[BrilligVariable], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 7d4a1189c0a..076a3bbca87 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -31,23 +31,24 @@ use super::brillig_black_box::convert_black_box_call; use super::brillig_block_variables::BlockVariables; use super::brillig_fn::FunctionContext; use super::constant_allocation::InstructionLocation; +use super::FinalValueId; /// Generate the compilation artifacts for compiling a function into brillig bytecode. -pub(crate) struct BrilligBlock<'block> { - pub(crate) function_context: &'block mut FunctionContext, +pub(super) struct BrilligBlock<'block> { + pub(super) function_context: &'block mut FunctionContext, /// The basic block that is being converted - pub(crate) block_id: BasicBlockId, + pub(super) block_id: BasicBlockId, /// Context for creating brillig opcodes - pub(crate) brillig_context: &'block mut BrilligContext, + pub(super) brillig_context: &'block mut BrilligContext, /// Tracks the available variable during the codegen of the block - pub(crate) variables: BlockVariables, + pub(super) variables: BlockVariables, /// For each instruction, the set of values that are not used anymore after it. - pub(crate) last_uses: HashMap>, + pub(super) last_uses: HashMap>, } impl<'block> BrilligBlock<'block> { /// Converts an SSA Basic block into a sequence of Brillig opcodes - pub(crate) fn compile( + pub(super) fn compile( function_context: &'block mut FunctionContext, brillig_context: &'block mut BrilligContext, block_id: BasicBlockId, @@ -150,7 +151,7 @@ impl<'block> BrilligBlock<'block> { let target_block = &dfg[*destination_block]; for (src, dest) in arguments.iter().zip(target_block.parameters()) { // Destinations are block parameters so they should have been allocated previously. - let dest = dfg.resolve(*dest); + let dest = dfg.resolve(*dest).into(); let destination = self.variables.get_allocation(self.function_context, dest); let source = self.convert_ssa_value(*src, dfg); self.brillig_context @@ -762,7 +763,7 @@ impl<'block> BrilligBlock<'block> { } Instruction::MakeArray { elements: array, typ } => { let value_id = dfg.instruction_results(instruction_id)[0]; - if !self.variables.is_allocated(dfg.resolve(value_id)) { + if !self.variables.is_allocated(dfg.resolve(value_id).into()) { let new_variable = self.variables.define_variable( self.function_context, self.brillig_context, @@ -1498,7 +1499,7 @@ impl<'block> BrilligBlock<'block> { } } - fn initialize_constants(&mut self, constants: &[ResolvedValueId], dfg: &DataFlowGraph) { + fn initialize_constants(&mut self, constants: &[FinalValueId], dfg: &DataFlowGraph) { for &constant_id in constants { self.convert_ssa_value(constant_id, dfg); } @@ -1510,7 +1511,7 @@ impl<'block> BrilligBlock<'block> { value_id: ValueId, dfg: &DataFlowGraph, ) -> BrilligVariable { - let value_id = dfg.resolve(value_id); + let value_id = dfg.resolve(value_id).into(); let value = &dfg[value_id]; match value { @@ -1849,7 +1850,7 @@ impl<'block> BrilligBlock<'block> { } /// Returns the type of the operation considering the types of the operands -pub(crate) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type, op: BinaryOp) -> Type { +pub(super) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type, op: BinaryOp) -> Type { match (lhs_type, rhs_type) { (_, Type::Function) | (Type::Function, _) => { unreachable!("Functions are invalid in binary operations") diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 4c102ae82ef..2a449b36527 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -13,27 +13,27 @@ use crate::{ ssa::ir::{ dfg::DataFlowGraph, types::{CompositeType, Type}, - value::{ResolvedValueId, ValueId}, + value::ValueId, }, }; -use super::brillig_fn::FunctionContext; +use super::{brillig_fn::FunctionContext, FinalValueId}; #[derive(Debug, Default)] -pub(crate) struct BlockVariables { +pub(super) struct BlockVariables { // Since we're generating Brillig bytecode here, there shouldn't be any more value ID replacements // and we can use the final resolved ID. - available_variables: HashSet, + available_variables: HashSet, } impl BlockVariables { /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) - pub(crate) fn new(live_in: HashSet) -> Self { + pub(super) fn new(live_in: HashSet) -> Self { BlockVariables { available_variables: live_in } } /// Returns all variables that have not been removed at this point. - pub(crate) fn get_available_variables( + pub(super) fn get_available_variables( &self, function_context: &FunctionContext, ) -> Vec { @@ -50,14 +50,14 @@ impl BlockVariables { } /// For a given SSA value id, define the variable and return the corresponding cached allocation. - pub(crate) fn define_variable( + pub(super) fn define_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, ) -> BrilligVariable { - let value_id = dfg.resolve(value_id); + let value_id = dfg.resolve(value_id).into(); let variable = allocate_value(value_id, brillig_context, dfg); if function_context.ssa_value_allocations.insert(value_id, variable).is_some() { @@ -70,7 +70,7 @@ impl BlockVariables { } /// Defines a variable that fits in a single register and returns the allocated register. - pub(crate) fn define_single_addr_variable( + pub(super) fn define_single_addr_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, @@ -82,9 +82,9 @@ impl BlockVariables { } /// Removes a variable so it's not used anymore within this block. - pub(crate) fn remove_variable( + pub(super) fn remove_variable( &mut self, - value_id: ResolvedValueId, + value_id: FinalValueId, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, ) { @@ -97,15 +97,15 @@ impl BlockVariables { } /// Checks if a variable is allocated. - pub(crate) fn is_allocated(&self, value_id: ResolvedValueId) -> bool { + pub(super) fn is_allocated(&self, value_id: FinalValueId) -> bool { self.available_variables.contains(&value_id) } /// For a given SSA value id, return the corresponding cached allocation. - pub(crate) fn get_allocation( + pub(super) fn get_allocation( &mut self, function_context: &FunctionContext, - value_id: ResolvedValueId, + value_id: FinalValueId, ) -> BrilligVariable { assert!( self.available_variables.contains(&value_id), @@ -120,13 +120,13 @@ impl BlockVariables { } /// Computes the length of an array. This will match with the indexes that SSA will issue -pub(crate) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { +pub(super) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { item_typ.len() * elem_count } /// For a given value_id, allocates the necessary registers to hold it. -pub(crate) fn allocate_value( - value_id: ResolvedValueId, +pub(super) fn allocate_value( + value_id: FinalValueId, brillig_context: &mut BrilligContext, dfg: &DataFlowGraph, ) -> BrilligVariable { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 2f1a641d4bf..ec5aa5b0783 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -10,28 +10,29 @@ use crate::{ function::{Function, FunctionId}, post_order::PostOrder, types::Type, - value::ResolvedValueId, }, }; use fxhash::FxHashMap as HashMap; -use super::{constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness}; +use super::{ + constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness, FinalValueId, +}; pub(crate) struct FunctionContext { - pub(crate) function_id: FunctionId, + pub(super) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(crate) ssa_value_allocations: HashMap, + pub(super) ssa_value_allocations: HashMap, /// The block ids of the function in reverse post order. - pub(crate) blocks: Vec, + pub(super) blocks: Vec, /// Liveness information for each variable in the function. - pub(crate) liveness: VariableLiveness, + pub(super) liveness: VariableLiveness, /// Information on where to allocate constants - pub(crate) constant_allocation: ConstantAllocation, + pub(super) constant_allocation: ConstantAllocation, } impl FunctionContext { /// Creates a new function context. It will allocate parameters for all blocks and compute the liveness of every variable. - pub(crate) fn new(function: &Function) -> Self { + pub(super) fn new(function: &Function) -> Self { let id = function.id(); let mut reverse_post_order = Vec::new(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index 028fa8a6890..9a7af265bf7 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -4,33 +4,30 @@ use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::ir::{ - basic_block::BasicBlockId, - cfg::ControlFlowGraph, - dfg::DataFlowGraph, - dom::DominatorTree, - function::Function, - instruction::InstructionId, - post_order::PostOrder, - value::{ResolvedValueId, Value}, + basic_block::BasicBlockId, cfg::ControlFlowGraph, dfg::DataFlowGraph, dom::DominatorTree, + function::Function, instruction::InstructionId, post_order::PostOrder, value::Value, }; -use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; +use super::{ + variable_liveness::{collect_variables_of_value, variables_used_in_instruction}, + FinalValueId, +}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub(crate) enum InstructionLocation { +pub(super) enum InstructionLocation { Instruction(InstructionId), Terminator, } -pub(crate) struct ConstantAllocation { - constant_usage: HashMap>>, - allocation_points: HashMap>>, +pub(super) struct ConstantAllocation { + constant_usage: HashMap>>, + allocation_points: HashMap>>, dominator_tree: DominatorTree, blocks_within_loops: HashSet, } impl ConstantAllocation { - pub(crate) fn from_function(func: &Function) -> Self { + pub(super) fn from_function(func: &Function) -> Self { let cfg = ControlFlowGraph::with_function(func); let post_order = PostOrder::with_function(func); let mut dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); @@ -47,17 +44,17 @@ impl ConstantAllocation { instance } - pub(crate) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { + pub(super) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { allocations.iter().flat_map(|(_, constants)| constants.iter()).copied().collect() }) } - pub(crate) fn allocated_at_location( + pub(super) fn allocated_at_location( &self, block_id: BasicBlockId, location: InstructionLocation, - ) -> Vec { + ) -> Vec { self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { allocations.get(&location).map_or(Vec::default(), |constants| constants.clone()) }) @@ -65,7 +62,7 @@ impl ConstantAllocation { fn collect_constant_usage(&mut self, func: &Function) { let mut record_if_constant = - |block_id: BasicBlockId, value_id: ResolvedValueId, location: InstructionLocation| { + |block_id: BasicBlockId, value_id: FinalValueId, location: InstructionLocation| { if is_constant_value(value_id, &func.dfg) { self.constant_usage .entry(value_id) @@ -126,7 +123,7 @@ impl ConstantAllocation { fn decide_allocation_point( &self, - constant_id: ResolvedValueId, + constant_id: FinalValueId, blocks_where_is_used: &[BasicBlockId], func: &Function, ) -> BasicBlockId { @@ -164,7 +161,7 @@ impl ConstantAllocation { } } -pub(crate) fn is_constant_value(id: ResolvedValueId, dfg: &DataFlowGraph) -> bool { +pub(super) fn is_constant_value(id: FinalValueId, dfg: &DataFlowGraph) -> bool { matches!(&dfg[id], Value::NumericConstant { .. }) } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 478679087cb..1562404734b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -9,12 +9,12 @@ use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, post_order::PostOrder, - value::{ResolvedValueId, Value, ValueId}, + value::{Value, ValueId}, }; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; -use super::constant_allocation::ConstantAllocation; +use super::{constant_allocation::ConstantAllocation, FinalValueId}; /// A back edge is an edge from a node to one of its ancestors. It denotes a loop in the CFG. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -45,11 +45,11 @@ fn find_back_edges( } /// Collects the underlying variables inside a value id. It might be more than one, for example in constant arrays that are constructed with multiple vars. -pub(crate) fn collect_variables_of_value( +pub(super) fn collect_variables_of_value( value_id: ValueId, dfg: &DataFlowGraph, -) -> Option { - let value_id = dfg.resolve(value_id); +) -> Option { + let value_id = dfg.resolve(value_id).into(); let value = &dfg[value_id]; match value { @@ -61,7 +61,7 @@ pub(crate) fn collect_variables_of_value( } } -pub(crate) fn variables_used_in_instruction( +pub(super) fn variables_used_in_instruction( instruction: &Instruction, dfg: &DataFlowGraph, ) -> Variables { @@ -86,7 +86,7 @@ fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables .collect(); // We consider block parameters used, so they live up to the block that owns them. - used.extend(block.parameters().iter().map(|p| p.resolved())); + used.extend(block.parameters().iter().map(|p| FinalValueId::from(p.resolved()))); if let Some(terminator) = block.terminator() { terminator.for_each_value(|value_id| { @@ -97,7 +97,7 @@ fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables used } -type Variables = HashSet; +type Variables = HashSet; fn compute_used_before_def( block: &BasicBlock, @@ -113,7 +113,7 @@ fn compute_used_before_def( type LastUses = HashMap; /// A struct representing the liveness of variables throughout a function. -pub(crate) struct VariableLiveness { +pub(super) struct VariableLiveness { cfg: ControlFlowGraph, post_order: PostOrder, dominator_tree: DominatorTree, @@ -127,7 +127,7 @@ pub(crate) struct VariableLiveness { impl VariableLiveness { /// Computes the liveness of variables throughout a function. - pub(crate) fn from_function(func: &Function, constants: &ConstantAllocation) -> Self { + pub(super) fn from_function(func: &Function, constants: &ConstantAllocation) -> Self { let cfg = ControlFlowGraph::with_function(func); let post_order = PostOrder::with_function(func); let dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); @@ -151,12 +151,12 @@ impl VariableLiveness { } /// The set of values that are alive before the block starts executing - pub(crate) fn get_live_in(&self, block_id: &BasicBlockId) -> &Variables { + pub(super) fn get_live_in(&self, block_id: &BasicBlockId) -> &Variables { self.live_in.get(block_id).expect("Live ins should have been calculated") } /// The set of values that are alive after the block has finished executed - pub(crate) fn get_live_out(&self, block_id: &BasicBlockId) -> Variables { + pub(super) fn get_live_out(&self, block_id: &BasicBlockId) -> Variables { let mut live_out = HashSet::default(); for successor_id in self.cfg.successors(*block_id) { live_out.extend(self.get_live_in(&successor_id)); @@ -165,14 +165,14 @@ impl VariableLiveness { } /// A map of instruction id to the set of values that die after the instruction has executed - pub(crate) fn get_last_uses(&self, block_id: &BasicBlockId) -> &LastUses { + pub(super) fn get_last_uses(&self, block_id: &BasicBlockId) -> &LastUses { self.last_uses.get(block_id).expect("Last uses should have been calculated") } /// Retrieves the list of block params the given block is defining. /// Block params are defined before the block that owns them (since they are used by the predecessor blocks). They must be defined in the immediate dominator. /// This is the last point where the block param can be allocated without it being allocated in different places in different branches. - pub(crate) fn defined_block_params(&self, block_id: &BasicBlockId) -> Vec { + pub(super) fn defined_block_params(&self, block_id: &BasicBlockId) -> Vec { self.param_definitions.get(block_id).cloned().unwrap_or_default() } @@ -244,13 +244,13 @@ impl VariableLiveness { let mut defined_vars = HashSet::default(); for parameter in self.defined_block_params(&block_id) { - defined_vars.insert(dfg.resolve(parameter)); + defined_vars.insert(dfg.resolve(parameter).into()); } for instruction_id in block.instructions() { let result_values = dfg.instruction_results(*instruction_id); for result_value in result_values { - defined_vars.insert(dfg.resolve(*result_value)); + defined_vars.insert(dfg.resolve(*result_value).into()); } } @@ -338,10 +338,10 @@ mod test { use crate::ssa::ir::map::Id; use crate::ssa::ir::types::Type; - use super::{ResolvedValueId, ValueId}; + use super::{FinalValueId, ValueId}; - fn resolved_set(it: &[ValueId]) -> FxHashSet { - FxHashSet::from_iter(it.iter().map(|v| v.resolved())) + fn resolved_set(it: &[ValueId]) -> FxHashSet { + FxHashSet::from_iter(it.iter().map(|v| v.resolved().into())) } #[test] @@ -637,7 +637,7 @@ mod test { liveness .defined_block_params(&block_id) .iter() - .map(|p| p.resolved()) + .map(|p| p.resolved().into()) .collect::() }; diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 0767b4aaf33..09862c10920 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -10,7 +10,7 @@ use super::{ }, map::DenseMap, types::Type, - value::{RawValueId, Resolution, ResolvedValueId, Value, ValueId}, + value::{IsResolved, RawValueId, Resolution, ResolvedValueId, Unresolved, Value, ValueId}, }; use acvm::{acir::AcirField, FieldElement}; @@ -581,16 +581,19 @@ impl std::ops::IndexMut for DataFlowGraph { /// Indexing the DFG by unresolved value IDs is all over the codebase, /// but it's not obvious whether we should apply resolution. -impl std::ops::Index for DataFlowGraph { +impl std::ops::Index> for DataFlowGraph { type Output = Value; - fn index(&self, id: ValueId) -> &Self::Output { + fn index(&self, id: ValueId) -> &Self::Output { &self.values[id.raw()] } } -impl std::ops::Index for DataFlowGraph { - type Output = Value; // The value can still contain unresolved IDs. - fn index(&self, id: ResolvedValueId) -> &Self::Output { +impl std::ops::Index> for DataFlowGraph +where + R: IsResolved, +{ + type Output = Value; + fn index(&self, id: ValueId) -> &Self::Output { &self.values[id.raw()] } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index a2f757b58d2..3654d7d122d 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -15,22 +15,27 @@ use super::{ #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) enum Unresolved {} -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +/// Resolved marker; doesn't implement `Hash` so it can't be stored in maps. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize /* PartialOrd, Ord, Hash */)] pub(crate) enum Resolved {} +pub(crate) trait IsResolved {} + +impl IsResolved for Resolved {} + pub(crate) trait Resolution { fn is_resolved() -> bool; } -impl Resolution for Resolved { +impl Resolution for Unresolved { fn is_resolved() -> bool { - true + false } } -impl Resolution for Unresolved { +impl Resolution for R { fn is_resolved() -> bool { - false + true } } @@ -63,7 +68,7 @@ impl ValueId { /// Demote an ID into an unresolved one. pub(crate) fn unresolved(self) -> ValueId { - ValueId::new(Id::new(self.id.to_usize())) + ValueId::new(self.id) } } @@ -109,8 +114,8 @@ impl AsRef> for ValueId { } /// Demote a resolved ID into an unresolved one. -impl From> for ValueId { - fn from(value: ValueId) -> Self { +impl From> for ValueId { + fn from(value: ValueId) -> Self { value.unresolved() } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 74381d634e4..0a023b31e58 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -7,7 +7,7 @@ use crate::ssa::{ function::{Function, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, types::Type::{Array, Slice}, - value::{RawValueId, ResolvedValueId}, + value::{RawValueId, ResolvedValueId, ValueId}, }, ssa_gen::Ssa, }; @@ -55,9 +55,21 @@ impl Function { } } +/// Private resolution type, to limit the scope of storing them in data structures. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(super) enum ContextResolved {} + +//impl IsResolved for ContextResolved {} + +impl From for ValueId { + fn from(value: ResolvedValueId) -> Self { + ValueId::new(value.raw()) + } +} + struct Context<'f> { dfg: &'f DataFlowGraph, - array_to_last_use: HashMap, + array_to_last_use: HashMap, InstructionId>, instructions_that_can_be_made_mutable: HashSet, // Mapping of an array that comes from a load and whether the address // it was loaded from is a reference parameter passed to the block. @@ -86,14 +98,18 @@ impl<'f> Context<'f> { Instruction::ArrayGet { array, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { + if let Some(existing) = + self.array_to_last_use.insert(array.into(), *instruction_id) + { self.instructions_that_can_be_made_mutable.remove(&existing); } } Instruction::ArraySet { array, .. } => { let array = self.dfg.resolve(*array); - if let Some(existing) = self.array_to_last_use.insert(array, *instruction_id) { + if let Some(existing) = + self.array_to_last_use.insert(array.into(), *instruction_id) + { self.instructions_that_can_be_made_mutable.remove(&existing); } @@ -135,7 +151,7 @@ impl<'f> Context<'f> { let argument = self.dfg.resolve(*argument); if let Some(existing) = - self.array_to_last_use.insert(argument, *instruction_id) + self.array_to_last_use.insert(argument.into(), *instruction_id) { self.instructions_that_can_be_made_mutable.remove(&existing); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 7b7357cfce7..ff08ce6fa7b 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -32,7 +32,7 @@ use crate::ssa::{ function::Function, instruction::{Instruction, InstructionId}, types::Type, - value::{RawValueId, Resolved, ResolvedValueId, Value, ValueId}, + value::{IsResolved, RawValueId, Value, ValueId}, }, ssa_gen::Ssa, }; @@ -103,13 +103,19 @@ struct Context { dom: DominatorTree, } +/// Private resolution type, to limit the scope of storing them in data structures. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum ContextResolved {} + +impl IsResolved for ContextResolved {} + /// HashMap from (Instruction, side_effects_enabled_var) to the results of the instruction. /// Stored as a two-level map to avoid cloning Instructions during the `.get` call. /// /// In addition to each result, the original BasicBlockId is stored as well. This allows us /// to deduplicate instructions across blocks as long as the new block dominates the original. type InstructionResultCache = - HashMap, HashMap, ResultCache>>; + HashMap, HashMap, ResultCache>>; /// Records the results of all duplicate [`Instruction`]s along with the blocks in which they sit. /// @@ -209,7 +215,7 @@ impl Context { instruction_id: InstructionId, dfg: &DataFlowGraph, constraint_simplification_mapping: &HashMap, - ) -> Instruction { + ) -> Instruction { let instruction = dfg[instruction_id].clone(); // Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value @@ -221,11 +227,11 @@ impl Context { dfg: &DataFlowGraph, cache: &HashMap, value_id: ValueId, - ) -> ResolvedValueId { - let resolved_id = dfg.resolve(value_id); - match cache.get(&resolved_id.raw()) { + ) -> ValueId { + let resolved_id = dfg.resolve(value_id).raw(); + match cache.get(&resolved_id) { Some(cached_value) => resolve_cache(dfg, cache, *cached_value), - None => resolved_id, + None => ValueId::new(resolved_id), } } @@ -266,7 +272,7 @@ impl Context { fn cache_instruction( &mut self, - instruction: Instruction, + instruction: Instruction, instruction_results: Vec, dfg: &DataFlowGraph, side_effects_enabled_var: ValueId, @@ -342,7 +348,7 @@ impl Context { fn get_cached( &mut self, dfg: &DataFlowGraph, - instruction: &Instruction, + instruction: &Instruction, side_effects_enabled_var: ValueId, block: BasicBlockId, ) -> Option { diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 9167070619b..e082b726d0c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -66,6 +66,7 @@ mod block; use std::collections::{BTreeMap, BTreeSet}; +use block::ContextResolved; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::{ @@ -77,7 +78,7 @@ use crate::ssa::{ instruction::{Instruction, InstructionId, TerminatorInstruction}, post_order::PostOrder, types::Type, - value::{RawValueId, ResolvedValueId, ValueId}, + value::{RawValueId, ValueId}, }, ssa_gen::Ssa, }; @@ -348,7 +349,7 @@ impl<'f> PerFunctionContext<'f> { let first = aliases.first(); let first = first.expect("All parameters alias at least themselves or we early return"); - let expression = Expression::Other(first.resolved()); + let expression = Expression::Other(first.resolved().into()); let previous = references.aliases.insert(expression.clone(), aliases.clone()); assert!(previous.is_none()); @@ -396,7 +397,7 @@ impl<'f> PerFunctionContext<'f> { match &self.inserter.function.dfg[instruction] { Instruction::Load { address } => { - let address = self.inserter.function.dfg.resolve(*address); + let address = ValueId::from(self.inserter.function.dfg.resolve(*address)); let result = self.inserter.function.dfg.instruction_results(instruction)[0]; references.remember_dereference(self.inserter.function, address, result); @@ -413,8 +414,8 @@ impl<'f> PerFunctionContext<'f> { } } Instruction::Store { address, value } => { - let address = self.inserter.function.dfg.resolve(*address); - let value = self.inserter.function.dfg.resolve(*value); + let address = self.inserter.function.dfg.resolve(*address).into(); + let value = self.inserter.function.dfg.resolve(*value).into(); // FIXME: This causes errors in the sha256 tests // @@ -443,12 +444,12 @@ impl<'f> PerFunctionContext<'f> { Instruction::Allocate => { // Register the new reference let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - let expr = Expression::Other(result.resolved()); + let expr = Expression::Other(result.resolved().into()); references.expressions.insert(result.raw(), expr.clone()); references.aliases.insert(expr, AliasSet::known(result)); } Instruction::ArrayGet { array, .. } => { - let array = self.inserter.function.dfg.resolve(*array); + let array = self.inserter.function.dfg.resolve(*array).into(); let result = self.inserter.function.dfg.instruction_results(instruction)[0]; references.mark_value_used(array, self.inserter.function); @@ -460,7 +461,7 @@ impl<'f> PerFunctionContext<'f> { } } Instruction::ArraySet { array, value, .. } => { - let array = self.inserter.function.dfg.resolve(*array); + let array = self.inserter.function.dfg.resolve(*array).into(); references.mark_value_used(array, self.inserter.function); let element_type = self.inserter.function.dfg.type_of_value(*value); @@ -507,7 +508,7 @@ impl<'f> PerFunctionContext<'f> { // as a potential alias to the array itself. if Self::contains_references(typ) { let array = self.inserter.function.dfg.instruction_results(instruction)[0]; - let array = self.inserter.function.dfg.resolve(array); + let array = self.inserter.function.dfg.resolve(array).into(); let expr = Expression::ArrayElement(Box::new(Expression::Other(array))); references.expressions.insert(array.raw(), expr.clone()); @@ -533,7 +534,12 @@ impl<'f> PerFunctionContext<'f> { } } - fn set_aliases(&self, references: &mut Block, address: ResolvedValueId, new_aliases: AliasSet) { + fn set_aliases( + &self, + references: &mut Block, + address: ValueId, + new_aliases: AliasSet, + ) { let expression = references.expressions.entry(address.raw()).or_insert(Expression::Other(address)); let aliases = references.aliases.entry(expression.clone()).or_default(); @@ -543,7 +549,7 @@ impl<'f> PerFunctionContext<'f> { fn mark_all_unknown(&self, values: &[ValueId], references: &mut Block) { for value in values { if self.inserter.function.dfg.value_is_reference(*value) { - let value = self.inserter.function.dfg.resolve(*value); + let value = self.inserter.function.dfg.resolve(*value).into(); references.set_unknown(value); references.mark_value_used(value, self.inserter.function); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 65adce9da19..191a803bb9d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -3,11 +3,23 @@ use std::borrow::Cow; use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, - value::{RawValueId, Resolved, ResolvedValueId, ValueId}, + value::{IsResolved, RawValueId, ResolvedValueId, ValueId}, }; use super::alias_set::AliasSet; +/// Private resolution type, to limit the scope of storing them in data structures. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(super) enum ContextResolved {} + +impl IsResolved for ContextResolved {} + +impl From for ValueId { + fn from(value: ResolvedValueId) -> Self { + ValueId::new(value.raw()) + } +} + /// A `Block` acts as a per-block context for the mem2reg pass. /// Most notably, it contains the current alias set thought to track each /// reference value if known, and it contains the expected ReferenceValue @@ -33,7 +45,7 @@ pub(super) struct Block { pub(super) references: im::OrdMap, /// The last instance of a `Store` instruction to each address in this block - pub(super) last_stores: im::OrdMap, + pub(super) last_stores: im::OrdMap, InstructionId>, } /// An `Expression` here is used to represent a canonical key @@ -43,14 +55,14 @@ pub(super) struct Block { pub(super) enum Expression { Dereference(Box), ArrayElement(Box), - Other(ValueId), + Other(ValueId), } /// Every reference's value is either Known and can be optimized away, or Unknown. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub(super) enum ReferenceValue { Unknown, - Known(ResolvedValueId), + Known(ValueId), } impl ReferenceValue { @@ -65,7 +77,10 @@ impl ReferenceValue { impl Block { /// If the given reference id points to a known value, return the value - pub(super) fn get_known_value(&self, address: ResolvedValueId) -> Option { + pub(super) fn get_known_value( + &self, + address: ValueId, + ) -> Option> { if let Some(expression) = self.expressions.get(&address.raw()) { if let Some(aliases) = self.aliases.get(expression) { // We could allow multiple aliases if we check that the reference @@ -82,15 +97,19 @@ impl Block { } /// If the given address is known, set its value to `ReferenceValue::Known(value)`. - pub(super) fn set_known_value(&mut self, address: ResolvedValueId, value: ResolvedValueId) { + pub(super) fn set_known_value( + &mut self, + address: ValueId, + value: ValueId, + ) { self.set_value(address, ReferenceValue::Known(value)); } - pub(super) fn set_unknown(&mut self, address: ResolvedValueId) { + pub(super) fn set_unknown(&mut self, address: ValueId) { self.set_value(address, ReferenceValue::Unknown); } - fn set_value(&mut self, address: ResolvedValueId, value: ReferenceValue) { + fn set_value(&mut self, address: ValueId, value: ReferenceValue) { let expression = self.expressions.entry(address.raw()).or_insert(Expression::Other(address)); let aliases = self.aliases.entry(expression.clone()).or_default(); @@ -152,7 +171,7 @@ impl Block { pub(super) fn remember_dereference( &mut self, function: &Function, - address: ResolvedValueId, + address: ValueId, result: ValueId, ) { if function.dfg.value_is_reference(result) { @@ -171,7 +190,7 @@ impl Block { /// Iterate through each known alias of the given address and apply the function `f` to each. fn for_each_alias_of( &mut self, - address: ResolvedValueId, + address: ValueId, mut f: impl FnMut(&mut Self, ValueId) -> T, ) { if let Some(expr) = self.expressions.get(&address.raw()) { @@ -183,20 +202,20 @@ impl Block { } } - fn keep_last_stores_for(&mut self, address: ResolvedValueId, function: &Function) { + fn keep_last_stores_for(&mut self, address: ValueId, function: &Function) { self.keep_last_store(address, function); self.for_each_alias_of(address, |t, alias| { - t.keep_last_store(function.dfg.resolve(alias), function); + t.keep_last_store(function.dfg.resolve(alias).into(), function); }); } - fn keep_last_store(&mut self, address: ResolvedValueId, function: &Function) { + fn keep_last_store(&mut self, address: ValueId, function: &Function) { if let Some(instruction) = self.last_stores.remove(&address) { // Whenever we decide we want to keep a store instruction, we also need // to go through its stored value and mark that used as well. match &function.dfg[instruction] { Instruction::Store { value, .. } => { - self.mark_value_used(function.dfg.resolve(*value), function); + self.mark_value_used(function.dfg.resolve(*value).into(), function); } other => { unreachable!("last_store held an id of a non-store instruction: {other:?}") @@ -205,14 +224,14 @@ impl Block { } } - pub(super) fn mark_value_used(&mut self, value: ResolvedValueId, function: &Function) { + pub(super) fn mark_value_used(&mut self, value: ValueId, function: &Function) { self.keep_last_stores_for(value, function); // We must do a recursive check for arrays since they're the only Values which may contain // other ValueIds. if let Some((array, _)) = function.dfg.get_array_constant(value) { for value in array { - self.mark_value_used(function.dfg.resolve(value), function); + self.mark_value_used(function.dfg.resolve(value).into(), function); } } } From efc9a618d564f00c4fd874f055c43961ac2f4fd9 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 19 Nov 2024 00:10:15 +0000 Subject: [PATCH 16/19] Add lifetime to Resolved so it is non-trivial to persist it --- .../src/brillig/brillig_gen.rs | 4 +-- compiler/noirc_evaluator/src/ssa/ir/dfg.rs | 6 ++-- .../src/ssa/ir/function_inserter.rs | 8 ++--- compiler/noirc_evaluator/src/ssa/ir/value.rs | 32 +++++++++++++------ .../noirc_evaluator/src/ssa/opt/array_set.rs | 4 +-- .../src/ssa/opt/mem2reg/block.rs | 2 +- 6 files changed, 34 insertions(+), 22 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index 30920fd300d..8e52d9b31db 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -26,8 +26,8 @@ impl IsResolved for FinalResolved {} type FinalValueId = ValueId; -impl From for FinalValueId { - fn from(value: ResolvedValueId) -> Self { +impl From> for FinalValueId { + fn from(value: ResolvedValueId<'_>) -> Self { ValueId::new(value.raw()) } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 09862c10920..db319514d96 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -250,11 +250,11 @@ impl DataFlowGraph { /// is returned. pub(crate) fn resolve(&self, original_value_id: ValueId) -> ResolvedValueId { if R::is_resolved() { - return ResolvedValueId::new(original_value_id.raw()); + return ValueId::new(original_value_id.raw()); } match self.replaced_value_ids.get(original_value_id.as_ref()) { Some(id) => self.resolve(*id), - None => ResolvedValueId::new(original_value_id.raw()), + None => ValueId::new(original_value_id.raw()), } } @@ -350,7 +350,7 @@ impl DataFlowGraph { /// Resolve and get a value by ID fn resolve_value(&self, original_value_id: ValueId) -> &Value { - let id = self.resolve(original_value_id.unresolved()).raw(); + let id = self.resolve(original_value_id).raw(); &self.values[id] } diff --git a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index ae9681b5c06..431d06a97e4 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -43,11 +43,11 @@ impl<'f> FunctionInserter<'f> { /// Resolves a ValueId to its new, updated value. /// If there is no updated value for this id, this returns the same /// ValueId that was passed in. - pub(crate) fn resolve(&mut self, value: ValueId) -> ResolvedValueId { + pub(crate) fn resolve(&mut self, value: ValueId) -> ResolvedValueId<'f> { let value = self.function.dfg.resolve(value); match self.values.get(&value.raw()) { Some(value) => self.resolve(*value), - None => value, + None => ValueId::new(value.raw()), } } @@ -119,7 +119,7 @@ impl<'f> FunctionInserter<'f> { call_stack: CallStack, ) -> InsertInstructionResult { let results = self.function.dfg.instruction_results(id); - let results = vecmap(results, |id| self.function.dfg.resolve(*id)); + let results = vecmap(results, |id| self.function.dfg.resolve(*id).detach()); let ctrl_typevars = instruction .requires_ctrl_typevars() @@ -209,7 +209,7 @@ impl<'f> FunctionInserter<'f> { /// ValueId (from the source_function) and its new ValueId in the destination function. pub(crate) fn insert_new_instruction_results( values: &mut HashMap, - old_results: &[ResolvedValueId], + old_results: &[ResolvedValueId<'f>], new_results: &InsertInstructionResult, ) { assert_eq!(old_results.len(), new_results.len()); diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 3654d7d122d..d81daaab05f 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -15,13 +15,24 @@ use super::{ #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) enum Unresolved {} -/// Resolved marker; doesn't implement `Hash` so it can't be stored in maps. +/// Marker for resolved status. +/// +/// Doesn't implement `Hash` so it can't be stored in maps. +/// It has a lifetime so it's not easy to store it in data structures, +/// where it could become stale. Instead we can implement module specific +/// variants when we can prove that persisting them is safe because the +/// IDs are not going to be changed between use. +/// #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize /* PartialOrd, Ord, Hash */)] -pub(crate) enum Resolved {} +pub(crate) struct Resolved<'a> { + _marker: PhantomData<&'a ()>, +} + +pub(crate) type ResolvedValueId<'a> = ValueId>; pub(crate) trait IsResolved {} -impl IsResolved for Resolved {} +impl<'a> IsResolved for Resolved<'a> {} pub(crate) trait Resolution { fn is_resolved() -> bool; @@ -39,9 +50,6 @@ impl Resolution for R { } } -/// A resolved value ID is something we can directly compare. -pub(crate) type ResolvedValueId = ValueId; - /// A raw value ID that can be used as a key in maps. pub(crate) type RawValueId = Id; @@ -79,8 +87,15 @@ impl ValueId { self.id == other.id } /// Promote an unresolved ID into a resolved one. - pub(crate) fn resolved(self) -> ValueId { - ValueId::new(Id::new(self.id.to_usize())) + pub(crate) fn resolved(self) -> ValueId> { + ValueId::new(self.id) + } +} + +impl<'a> ValueId> { + /// Change the lifetime of a resolution. + pub(crate) fn detach<'b>(self) -> ValueId> { + ValueId::new(self.id) } } @@ -119,7 +134,6 @@ impl From> for ValueId { value.unresolved() } } - impl From> for ValueId { fn from(value: Id) -> Self { ValueId::new(value) diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 0a023b31e58..117f6147ba8 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -59,9 +59,7 @@ impl Function { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(super) enum ContextResolved {} -//impl IsResolved for ContextResolved {} - -impl From for ValueId { +impl From> for ValueId { fn from(value: ResolvedValueId) -> Self { ValueId::new(value.raw()) } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 191a803bb9d..724cac91527 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -14,7 +14,7 @@ pub(super) enum ContextResolved {} impl IsResolved for ContextResolved {} -impl From for ValueId { +impl From> for ValueId { fn from(value: ResolvedValueId) -> Self { ValueId::new(value.raw()) } From cc4b64de149cc471dc876f2d2b4f049237e11306 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 19 Nov 2024 00:29:19 +0000 Subject: [PATCH 17/19] Remove use of .resolved() except in tests --- .../src/brillig/brillig_gen/brillig_block.rs | 9 ++++----- .../src/brillig/brillig_gen/variable_liveness.rs | 6 +++--- .../noirc_evaluator/src/ssa/function_builder/mod.rs | 12 ++++++------ compiler/noirc_evaluator/src/ssa/ir/instruction.rs | 2 +- compiler/noirc_evaluator/src/ssa/ir/value.rs | 2 +- compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs | 6 +++--- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 076a3bbca87..0277de413ba 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -9,7 +9,7 @@ use crate::brillig::brillig_ir::{ }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::instruction::ConstrainError; -use crate::ssa::ir::value::{Resolution, ResolvedValueId}; +use crate::ssa::ir::value::{RawValueId, Resolution}; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -1575,13 +1575,12 @@ impl<'block> BrilligBlock<'block> { let item_types = typ.clone().element_types(); // Find out if we are repeating the same item over and over - let first_item: Vec = - data.iter().take(item_types.len()).map(|v| v.resolved()).collect(); + let first_item: Vec = + data.iter().take(item_types.len()).map(|v| v.raw()).collect(); let mut is_repeating = true; for item_index in (item_types.len()..data.len()).step_by(item_types.len()) { - let item: Vec<_> = - (0..item_types.len()).map(|i| data[item_index + i].resolved()).collect(); + let item: Vec<_> = (0..item_types.len()).map(|i| data[item_index + i].raw()).collect(); if first_item != item { is_repeating = false; break; diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 1562404734b..1229b37d661 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -86,7 +86,7 @@ fn variables_used_in_block(block: &BasicBlock, dfg: &DataFlowGraph) -> Variables .collect(); // We consider block parameters used, so they live up to the block that owns them. - used.extend(block.parameters().iter().map(|p| FinalValueId::from(p.resolved()))); + used.extend(block.parameters().iter().map(|p| FinalValueId::new(p.raw()))); if let Some(terminator) = block.terminator() { terminator.for_each_value(|value_id| { @@ -341,7 +341,7 @@ mod test { use super::{FinalValueId, ValueId}; fn resolved_set(it: &[ValueId]) -> FxHashSet { - FxHashSet::from_iter(it.iter().map(|v| v.resolved().into())) + FxHashSet::from_iter(it.iter().map(|v| FinalValueId::new(v.raw()))) } #[test] @@ -637,7 +637,7 @@ mod test { liveness .defined_block_params(&block_id) .iter() - .map(|p| p.resolved().into()) + .map(|p| FinalValueId::new(p.raw())) .collect::() }; diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 14589c12ef3..1bd4cfa54fe 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -521,8 +521,8 @@ mod tests { // let bits: [u1; 8] = x.to_le_bits(); let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id); - let one = builder.numeric_constant(FieldElement::one(), Type::bool()).resolved(); - let zero = builder.numeric_constant(FieldElement::zero(), Type::bool()).resolved(); + let one = builder.numeric_constant(FieldElement::one(), Type::bool()).raw(); + let zero = builder.numeric_constant(FieldElement::zero(), Type::bool()).raw(); let to_bits_id = builder.import_intrinsic_id(Intrinsic::ToBits(Endian::Little)); let input = builder.numeric_constant(FieldElement::from(7_u128), Type::field()); @@ -532,9 +532,9 @@ mod tests { builder.insert_call(to_bits_id, vec![input, length], result_types).into_owned(); let slice = builder.current_function.dfg.get_array_constant(call_results[0]).unwrap().0; - assert_eq!(slice[0].resolved(), one); - assert_eq!(slice[1].resolved(), one); - assert_eq!(slice[2].resolved(), one); - assert_eq!(slice[3].resolved(), zero); + assert_eq!(slice[0].raw(), one); + assert_eq!(slice[1].raw(), one); + assert_eq!(slice[2].raw(), one); + assert_eq!(slice[3].raw(), zero); } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 2dbf8ba0bb2..c2cdd89d745 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -814,7 +814,7 @@ impl Instruction { /// Pretend the value IDs have been resolved. #[cfg(test)] pub(crate) fn resolved(&self) -> Instruction { - self.map_values(|v| v.resolved()) + self.map_values(|v| ValueId::new(v.raw())) } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index d81daaab05f..36e5c0587a3 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -86,7 +86,7 @@ impl ValueId { pub(crate) fn unresolved_eq(&self, other: &Self) -> bool { self.id == other.id } - /// Promote an unresolved ID into a resolved one. + #[cfg(test)] pub(crate) fn resolved(self) -> ValueId> { ValueId::new(self.id) } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index e082b726d0c..049d6664fe1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -349,7 +349,7 @@ impl<'f> PerFunctionContext<'f> { let first = aliases.first(); let first = first.expect("All parameters alias at least themselves or we early return"); - let expression = Expression::Other(first.resolved().into()); + let expression = Expression::Other(ValueId::new(first.raw())); let previous = references.aliases.insert(expression.clone(), aliases.clone()); assert!(previous.is_none()); @@ -444,7 +444,7 @@ impl<'f> PerFunctionContext<'f> { Instruction::Allocate => { // Register the new reference let result = self.inserter.function.dfg.instruction_results(instruction)[0]; - let expr = Expression::Other(result.resolved().into()); + let expr = Expression::Other(ValueId::new(result.raw())); references.expressions.insert(result.raw(), expr.clone()); references.aliases.insert(expr, AliasSet::known(result)); } @@ -737,7 +737,7 @@ mod tests { // Since the mem2reg pass simplifies as it goes, the id of the allocate instruction result // is most likely no longer v0. We have to retrieve the new id here. let allocate_id = func.dfg.instruction_results(instructions[0])[0]; - assert_eq!(ret_val_id.resolved(), allocate_id.resolved()); + assert_eq!(ret_val_id.raw(), allocate_id.raw()); } fn count_stores(block: BasicBlockId, dfg: &DataFlowGraph) -> usize { From 94843cd63dd8d11bf907283410a351d1d4cb49a9 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 19 Nov 2024 00:42:25 +0000 Subject: [PATCH 18/19] Try to set stack size to 8MB --- .github/workflows/test-rust-workspace-msrv.yml | 17 +++++++++-------- .github/workflows/test-rust-workspace.yml | 11 ++++++----- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index ae016169830..22f0e42b9c4 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -3,7 +3,7 @@ name: Test (MSRV check) # TL;DR https://github.com/noir-lang/noir/issues/4384 # # This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository. -# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. +# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. on: schedule: @@ -34,7 +34,7 @@ jobs: targets: x86_64-unknown-linux-gnu # We force the ACVM crate and all of its dependencies to update their dependencies - # This ensures that we'll be able to build the crates when they're being published. + # This ensures that we'll be able to build the crates when they're being published. - name: Update Cargo.lock run: | cargo update --package acvm --aggressive @@ -53,7 +53,7 @@ jobs: - name: Build and archive tests run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst - + - name: Upload archive to workflow uses: actions/upload-artifact@v4 with: @@ -70,7 +70,7 @@ jobs: partition: [1, 2, 3, 4] steps: - uses: actions/checkout@v4 - + - name: Setup toolchain uses: dtolnay/rust-toolchain@1.74.1 with: @@ -80,13 +80,14 @@ jobs: uses: taiki-e/install-action@v2 with: tool: nextest@0.9.67 - + - name: Download archive uses: actions/download-artifact@v4 with: name: nextest-archive - name: Run tests run: | + RUST_MIN_STACK=8388608 \ cargo nextest run --archive-file nextest-archive.tar.zst \ --partition count:${{ matrix.partition }}/4 \ --no-fail-fast @@ -98,9 +99,9 @@ jobs: runs-on: ubuntu-latest # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} - needs: + needs: - run-tests - + steps: - name: Report overall success run: | @@ -112,7 +113,7 @@ jobs: env: # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} - + - name: Checkout if: ${{ failure() }} uses: actions/checkout@v4 diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index 1f3ee5e2268..ead1d3002c9 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -40,7 +40,7 @@ jobs: - name: Build and archive tests run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst - + - name: Upload archive to workflow uses: actions/upload-artifact@v4 with: @@ -57,7 +57,7 @@ jobs: partition: [1, 2, 3, 4] steps: - uses: actions/checkout@v4 - + - name: Setup toolchain uses: dtolnay/rust-toolchain@1.74.1 with: @@ -67,13 +67,14 @@ jobs: uses: taiki-e/install-action@v2 with: tool: nextest@0.9.67 - + - name: Download archive uses: actions/download-artifact@v4 with: name: nextest-archive - name: Run tests run: | + RUST_MIN_STACK=8388608 \ cargo nextest run --archive-file nextest-archive.tar.zst \ --partition count:${{ matrix.partition }}/4 \ --no-fail-fast @@ -85,9 +86,9 @@ jobs: runs-on: ubuntu-latest # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} - needs: + needs: - run-tests - + steps: - name: Report overall success run: | From cfe03b08574767bffe2fddaf6c16ae65e8a547fc Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Tue, 19 Nov 2024 10:30:29 +0000 Subject: [PATCH 19/19] Remove ContextResolved and FinalResolved where it's easy. Use just the lifetime. --- .../src/brillig/brillig_gen.rs | 19 +----- .../brillig/brillig_gen/brillig_black_box.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 25 ++++--- .../brillig_gen/brillig_block_variables.rs | 26 +++---- .../src/brillig/brillig_gen/brillig_fn.rs | 17 +++-- .../brillig_gen/constant_allocation.rs | 27 ++++---- .../brillig/brillig_gen/variable_liveness.rs | 26 +++---- compiler/noirc_evaluator/src/ssa/ir/value.rs | 15 ++-- .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 29 ++++---- .../src/ssa/opt/mem2reg/block.rs | 68 ++++++++----------- 10 files changed, 113 insertions(+), 141 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs index 8e52d9b31db..786a03031d6 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen.rs @@ -13,24 +13,7 @@ use super::brillig_ir::{ artifact::{BrilligArtifact, Label}, BrilligContext, }; -use crate::ssa::ir::{ - function::Function, - value::{IsResolved, ResolvedValueId, ValueId}, -}; - -/// Private resolution type, to limit the scope of storing them in data structures. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -struct FinalResolved {} - -impl IsResolved for FinalResolved {} - -type FinalValueId = ValueId; - -impl From> for FinalValueId { - fn from(value: ResolvedValueId<'_>) -> Self { - ValueId::new(value.raw()) - } -} +use crate::ssa::ir::function::Function; /// Converting an SSA function into Brillig bytecode. pub(crate) fn convert_ssa_function( diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index ba6c75bc38b..3685c9540f3 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -14,7 +14,7 @@ use crate::brillig::brillig_ir::{ /// Transforms SSA's black box function calls into the corresponding brillig instructions /// Extracting arguments and results from the SSA function call /// And making any necessary type conversions to adapt noir's blackbox calls to brillig's -pub(super) fn convert_black_box_call( +pub(crate) fn convert_black_box_call( brillig_context: &mut BrilligContext, bb_func: &BlackBoxFunc, function_arguments: &[BrilligVariable], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 0277de413ba..bbd9c6abaa4 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -9,7 +9,7 @@ use crate::brillig::brillig_ir::{ }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::instruction::ConstrainError; -use crate::ssa::ir::value::{RawValueId, Resolution}; +use crate::ssa::ir::value::{FinalValueId, RawValueId, Resolution}; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -31,24 +31,23 @@ use super::brillig_black_box::convert_black_box_call; use super::brillig_block_variables::BlockVariables; use super::brillig_fn::FunctionContext; use super::constant_allocation::InstructionLocation; -use super::FinalValueId; /// Generate the compilation artifacts for compiling a function into brillig bytecode. -pub(super) struct BrilligBlock<'block> { - pub(super) function_context: &'block mut FunctionContext, +pub(crate) struct BrilligBlock<'block> { + pub(crate) function_context: &'block mut FunctionContext, /// The basic block that is being converted - pub(super) block_id: BasicBlockId, + pub(crate) block_id: BasicBlockId, /// Context for creating brillig opcodes - pub(super) brillig_context: &'block mut BrilligContext, + pub(crate) brillig_context: &'block mut BrilligContext, /// Tracks the available variable during the codegen of the block - pub(super) variables: BlockVariables, + pub(crate) variables: BlockVariables, /// For each instruction, the set of values that are not used anymore after it. - pub(super) last_uses: HashMap>, + pub(crate) last_uses: HashMap>, } impl<'block> BrilligBlock<'block> { /// Converts an SSA Basic block into a sequence of Brillig opcodes - pub(super) fn compile( + pub(crate) fn compile( function_context: &'block mut FunctionContext, brillig_context: &'block mut BrilligContext, block_id: BasicBlockId, @@ -151,7 +150,7 @@ impl<'block> BrilligBlock<'block> { let target_block = &dfg[*destination_block]; for (src, dest) in arguments.iter().zip(target_block.parameters()) { // Destinations are block parameters so they should have been allocated previously. - let dest = dfg.resolve(*dest).into(); + let dest = dfg.resolve(*dest).detach(); let destination = self.variables.get_allocation(self.function_context, dest); let source = self.convert_ssa_value(*src, dfg); self.brillig_context @@ -763,7 +762,7 @@ impl<'block> BrilligBlock<'block> { } Instruction::MakeArray { elements: array, typ } => { let value_id = dfg.instruction_results(instruction_id)[0]; - if !self.variables.is_allocated(dfg.resolve(value_id).into()) { + if !self.variables.is_allocated(dfg.resolve(value_id).detach()) { let new_variable = self.variables.define_variable( self.function_context, self.brillig_context, @@ -1511,7 +1510,7 @@ impl<'block> BrilligBlock<'block> { value_id: ValueId, dfg: &DataFlowGraph, ) -> BrilligVariable { - let value_id = dfg.resolve(value_id).into(); + let value_id = dfg.resolve(value_id).detach(); let value = &dfg[value_id]; match value { @@ -1849,7 +1848,7 @@ impl<'block> BrilligBlock<'block> { } /// Returns the type of the operation considering the types of the operands -pub(super) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type, op: BinaryOp) -> Type { +pub(crate) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type, op: BinaryOp) -> Type { match (lhs_type, rhs_type) { (_, Type::Function) | (Type::Function, _) => { unreachable!("Functions are invalid in binary operations") diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 2a449b36527..ddf7d223b43 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -13,14 +13,14 @@ use crate::{ ssa::ir::{ dfg::DataFlowGraph, types::{CompositeType, Type}, - value::ValueId, + value::{FinalValueId, ValueId}, }, }; -use super::{brillig_fn::FunctionContext, FinalValueId}; +use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] -pub(super) struct BlockVariables { +pub(crate) struct BlockVariables { // Since we're generating Brillig bytecode here, there shouldn't be any more value ID replacements // and we can use the final resolved ID. available_variables: HashSet, @@ -28,12 +28,12 @@ pub(super) struct BlockVariables { impl BlockVariables { /// Creates a BlockVariables instance. It uses the variables that are live in to the block and the global available variables (block parameters) - pub(super) fn new(live_in: HashSet) -> Self { + pub(crate) fn new(live_in: HashSet) -> Self { BlockVariables { available_variables: live_in } } /// Returns all variables that have not been removed at this point. - pub(super) fn get_available_variables( + pub(crate) fn get_available_variables( &self, function_context: &FunctionContext, ) -> Vec { @@ -50,14 +50,14 @@ impl BlockVariables { } /// For a given SSA value id, define the variable and return the corresponding cached allocation. - pub(super) fn define_variable( + pub(crate) fn define_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, ) -> BrilligVariable { - let value_id = dfg.resolve(value_id).into(); + let value_id = dfg.resolve(value_id).detach(); let variable = allocate_value(value_id, brillig_context, dfg); if function_context.ssa_value_allocations.insert(value_id, variable).is_some() { @@ -70,7 +70,7 @@ impl BlockVariables { } /// Defines a variable that fits in a single register and returns the allocated register. - pub(super) fn define_single_addr_variable( + pub(crate) fn define_single_addr_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, @@ -82,7 +82,7 @@ impl BlockVariables { } /// Removes a variable so it's not used anymore within this block. - pub(super) fn remove_variable( + pub(crate) fn remove_variable( &mut self, value_id: FinalValueId, function_context: &mut FunctionContext, @@ -97,12 +97,12 @@ impl BlockVariables { } /// Checks if a variable is allocated. - pub(super) fn is_allocated(&self, value_id: FinalValueId) -> bool { + pub(crate) fn is_allocated(&self, value_id: FinalValueId) -> bool { self.available_variables.contains(&value_id) } /// For a given SSA value id, return the corresponding cached allocation. - pub(super) fn get_allocation( + pub(crate) fn get_allocation( &mut self, function_context: &FunctionContext, value_id: FinalValueId, @@ -120,12 +120,12 @@ impl BlockVariables { } /// Computes the length of an array. This will match with the indexes that SSA will issue -pub(super) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { +pub(crate) fn compute_array_length(item_typ: &CompositeType, elem_count: usize) -> usize { item_typ.len() * elem_count } /// For a given value_id, allocates the necessary registers to hold it. -pub(super) fn allocate_value( +pub(crate) fn allocate_value( value_id: FinalValueId, brillig_context: &mut BrilligContext, dfg: &DataFlowGraph, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index ec5aa5b0783..d7a0883f054 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -10,29 +10,28 @@ use crate::{ function::{Function, FunctionId}, post_order::PostOrder, types::Type, + value::FinalValueId, }, }; use fxhash::FxHashMap as HashMap; -use super::{ - constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness, FinalValueId, -}; +use super::{constant_allocation::ConstantAllocation, variable_liveness::VariableLiveness}; pub(crate) struct FunctionContext { - pub(super) function_id: FunctionId, + pub(crate) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(super) ssa_value_allocations: HashMap, + pub(crate) ssa_value_allocations: HashMap, /// The block ids of the function in reverse post order. - pub(super) blocks: Vec, + pub(crate) blocks: Vec, /// Liveness information for each variable in the function. - pub(super) liveness: VariableLiveness, + pub(crate) liveness: VariableLiveness, /// Information on where to allocate constants - pub(super) constant_allocation: ConstantAllocation, + pub(crate) constant_allocation: ConstantAllocation, } impl FunctionContext { /// Creates a new function context. It will allocate parameters for all blocks and compute the liveness of every variable. - pub(super) fn new(function: &Function) -> Self { + pub(crate) fn new(function: &Function) -> Self { let id = function.id(); let mut reverse_post_order = Vec::new(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs index 9a7af265bf7..655d292d847 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/constant_allocation.rs @@ -4,22 +4,25 @@ use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::ir::{ - basic_block::BasicBlockId, cfg::ControlFlowGraph, dfg::DataFlowGraph, dom::DominatorTree, - function::Function, instruction::InstructionId, post_order::PostOrder, value::Value, + basic_block::BasicBlockId, + cfg::ControlFlowGraph, + dfg::DataFlowGraph, + dom::DominatorTree, + function::Function, + instruction::InstructionId, + post_order::PostOrder, + value::{FinalValueId, Value}, }; -use super::{ - variable_liveness::{collect_variables_of_value, variables_used_in_instruction}, - FinalValueId, -}; +use super::variable_liveness::{collect_variables_of_value, variables_used_in_instruction}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub(super) enum InstructionLocation { +pub(crate) enum InstructionLocation { Instruction(InstructionId), Terminator, } -pub(super) struct ConstantAllocation { +pub(crate) struct ConstantAllocation { constant_usage: HashMap>>, allocation_points: HashMap>>, dominator_tree: DominatorTree, @@ -27,7 +30,7 @@ pub(super) struct ConstantAllocation { } impl ConstantAllocation { - pub(super) fn from_function(func: &Function) -> Self { + pub(crate) fn from_function(func: &Function) -> Self { let cfg = ControlFlowGraph::with_function(func); let post_order = PostOrder::with_function(func); let mut dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); @@ -44,13 +47,13 @@ impl ConstantAllocation { instance } - pub(super) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { + pub(crate) fn allocated_in_block(&self, block_id: BasicBlockId) -> Vec { self.allocation_points.get(&block_id).map_or(Vec::default(), |allocations| { allocations.iter().flat_map(|(_, constants)| constants.iter()).copied().collect() }) } - pub(super) fn allocated_at_location( + pub(crate) fn allocated_at_location( &self, block_id: BasicBlockId, location: InstructionLocation, @@ -161,7 +164,7 @@ impl ConstantAllocation { } } -pub(super) fn is_constant_value(id: FinalValueId, dfg: &DataFlowGraph) -> bool { +pub(crate) fn is_constant_value(id: FinalValueId, dfg: &DataFlowGraph) -> bool { matches!(&dfg[id], Value::NumericConstant { .. }) } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 1229b37d661..45a804c941e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -9,12 +9,12 @@ use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, post_order::PostOrder, - value::{Value, ValueId}, + value::{FinalValueId, Value, ValueId}, }; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; -use super::{constant_allocation::ConstantAllocation, FinalValueId}; +use super::constant_allocation::ConstantAllocation; /// A back edge is an edge from a node to one of its ancestors. It denotes a loop in the CFG. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -45,11 +45,11 @@ fn find_back_edges( } /// Collects the underlying variables inside a value id. It might be more than one, for example in constant arrays that are constructed with multiple vars. -pub(super) fn collect_variables_of_value( +pub(crate) fn collect_variables_of_value( value_id: ValueId, dfg: &DataFlowGraph, ) -> Option { - let value_id = dfg.resolve(value_id).into(); + let value_id = dfg.resolve(value_id).detach(); let value = &dfg[value_id]; match value { @@ -61,7 +61,7 @@ pub(super) fn collect_variables_of_value( } } -pub(super) fn variables_used_in_instruction( +pub(crate) fn variables_used_in_instruction( instruction: &Instruction, dfg: &DataFlowGraph, ) -> Variables { @@ -113,7 +113,7 @@ fn compute_used_before_def( type LastUses = HashMap; /// A struct representing the liveness of variables throughout a function. -pub(super) struct VariableLiveness { +pub(crate) struct VariableLiveness { cfg: ControlFlowGraph, post_order: PostOrder, dominator_tree: DominatorTree, @@ -127,7 +127,7 @@ pub(super) struct VariableLiveness { impl VariableLiveness { /// Computes the liveness of variables throughout a function. - pub(super) fn from_function(func: &Function, constants: &ConstantAllocation) -> Self { + pub(crate) fn from_function(func: &Function, constants: &ConstantAllocation) -> Self { let cfg = ControlFlowGraph::with_function(func); let post_order = PostOrder::with_function(func); let dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order); @@ -151,12 +151,12 @@ impl VariableLiveness { } /// The set of values that are alive before the block starts executing - pub(super) fn get_live_in(&self, block_id: &BasicBlockId) -> &Variables { + pub(crate) fn get_live_in(&self, block_id: &BasicBlockId) -> &Variables { self.live_in.get(block_id).expect("Live ins should have been calculated") } /// The set of values that are alive after the block has finished executed - pub(super) fn get_live_out(&self, block_id: &BasicBlockId) -> Variables { + pub(crate) fn get_live_out(&self, block_id: &BasicBlockId) -> Variables { let mut live_out = HashSet::default(); for successor_id in self.cfg.successors(*block_id) { live_out.extend(self.get_live_in(&successor_id)); @@ -165,14 +165,14 @@ impl VariableLiveness { } /// A map of instruction id to the set of values that die after the instruction has executed - pub(super) fn get_last_uses(&self, block_id: &BasicBlockId) -> &LastUses { + pub(crate) fn get_last_uses(&self, block_id: &BasicBlockId) -> &LastUses { self.last_uses.get(block_id).expect("Last uses should have been calculated") } /// Retrieves the list of block params the given block is defining. /// Block params are defined before the block that owns them (since they are used by the predecessor blocks). They must be defined in the immediate dominator. /// This is the last point where the block param can be allocated without it being allocated in different places in different branches. - pub(super) fn defined_block_params(&self, block_id: &BasicBlockId) -> Vec { + pub(crate) fn defined_block_params(&self, block_id: &BasicBlockId) -> Vec { self.param_definitions.get(block_id).cloned().unwrap_or_default() } @@ -244,13 +244,13 @@ impl VariableLiveness { let mut defined_vars = HashSet::default(); for parameter in self.defined_block_params(&block_id) { - defined_vars.insert(dfg.resolve(parameter).into()); + defined_vars.insert(dfg.resolve(parameter).detach()); } for instruction_id in block.instructions() { let result_values = dfg.instruction_results(*instruction_id); for result_value in result_values { - defined_vars.insert(dfg.resolve(*result_value).into()); + defined_vars.insert(dfg.resolve(*result_value).detach()); } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index 36e5c0587a3..b89585503dd 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -17,18 +17,15 @@ pub(crate) enum Unresolved {} /// Marker for resolved status. /// -/// Doesn't implement `Hash` so it can't be stored in maps. -/// It has a lifetime so it's not easy to store it in data structures, -/// where it could become stale. Instead we can implement module specific -/// variants when we can prove that persisting them is safe because the -/// IDs are not going to be changed between use. -/// -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize /* PartialOrd, Ord, Hash */)] +/// It has a lifetime so it's not easy to store it in data structures forever, +/// where it could become stale. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord, Hash)] pub(crate) struct Resolved<'a> { _marker: PhantomData<&'a ()>, } pub(crate) type ResolvedValueId<'a> = ValueId>; +pub(crate) type FinalValueId = ValueId>; pub(crate) trait IsResolved {} @@ -94,6 +91,10 @@ impl ValueId { impl<'a> ValueId> { /// Change the lifetime of a resolution. + /// + /// This is typically used to detach the lifetime of a resolved value ID + /// from the `DataFlowGraph` which was used to resolve it, so that it + /// can live in a different context. pub(crate) fn detach<'b>(self) -> ValueId> { ValueId::new(self.id) } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 049d6664fe1..85aadf64f94 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -66,7 +66,6 @@ mod block; use std::collections::{BTreeMap, BTreeSet}; -use block::ContextResolved; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::{ @@ -78,7 +77,7 @@ use crate::ssa::{ instruction::{Instruction, InstructionId, TerminatorInstruction}, post_order::PostOrder, types::Type, - value::{RawValueId, ValueId}, + value::{RawValueId, ResolvedValueId, ValueId}, }, ssa_gen::Ssa, }; @@ -111,7 +110,7 @@ struct PerFunctionContext<'f> { cfg: ControlFlowGraph, post_order: PostOrder, - blocks: BTreeMap, + blocks: BTreeMap>, inserter: FunctionInserter<'f>, @@ -270,7 +269,7 @@ impl<'f> PerFunctionContext<'f> { /// The value of each reference at the start of the given block is the unification /// of the value of the same reference at the end of its predecessor blocks. - fn find_starting_references(&mut self, block: BasicBlockId) -> Block { + fn find_starting_references(&mut self, block: BasicBlockId) -> Block<'f> { let mut predecessors = self.cfg.predecessors(block); if let Some(first_predecessor) = predecessors.next() { @@ -294,7 +293,7 @@ impl<'f> PerFunctionContext<'f> { /// This will remove any known loads in the block and track the value of references /// as they are stored to. When this function is finished, the value of each reference /// at the end of this block will be remembered in `self.blocks`. - fn analyze_block(&mut self, block: BasicBlockId, mut references: Block) { + fn analyze_block(&mut self, block: BasicBlockId, mut references: Block<'f>) { let instructions = self.inserter.function.dfg[block].take_instructions(); // If this is the entry block, take all the block parameters and assume they may @@ -383,7 +382,7 @@ impl<'f> PerFunctionContext<'f> { fn analyze_instruction( &mut self, block_id: BasicBlockId, - references: &mut Block, + references: &mut Block<'f>, mut instruction: InstructionId, ) { // If the instruction was simplified and optimized out of the program we shouldn't analyze @@ -397,7 +396,7 @@ impl<'f> PerFunctionContext<'f> { match &self.inserter.function.dfg[instruction] { Instruction::Load { address } => { - let address = ValueId::from(self.inserter.function.dfg.resolve(*address)); + let address = self.inserter.function.dfg.resolve(*address).detach(); let result = self.inserter.function.dfg.instruction_results(instruction)[0]; references.remember_dereference(self.inserter.function, address, result); @@ -414,8 +413,8 @@ impl<'f> PerFunctionContext<'f> { } } Instruction::Store { address, value } => { - let address = self.inserter.function.dfg.resolve(*address).into(); - let value = self.inserter.function.dfg.resolve(*value).into(); + let address = self.inserter.function.dfg.resolve(*address).detach(); + let value = self.inserter.function.dfg.resolve(*value).detach(); // FIXME: This causes errors in the sha256 tests // @@ -449,7 +448,7 @@ impl<'f> PerFunctionContext<'f> { references.aliases.insert(expr, AliasSet::known(result)); } Instruction::ArrayGet { array, .. } => { - let array = self.inserter.function.dfg.resolve(*array).into(); + let array = self.inserter.function.dfg.resolve(*array).detach(); let result = self.inserter.function.dfg.instruction_results(instruction)[0]; references.mark_value_used(array, self.inserter.function); @@ -461,7 +460,7 @@ impl<'f> PerFunctionContext<'f> { } } Instruction::ArraySet { array, value, .. } => { - let array = self.inserter.function.dfg.resolve(*array).into(); + let array = self.inserter.function.dfg.resolve(*array).detach(); references.mark_value_used(array, self.inserter.function); let element_type = self.inserter.function.dfg.type_of_value(*value); @@ -508,7 +507,7 @@ impl<'f> PerFunctionContext<'f> { // as a potential alias to the array itself. if Self::contains_references(typ) { let array = self.inserter.function.dfg.instruction_results(instruction)[0]; - let array = self.inserter.function.dfg.resolve(array).into(); + let array = self.inserter.function.dfg.resolve(array).detach(); let expr = Expression::ArrayElement(Box::new(Expression::Other(array))); references.expressions.insert(array.raw(), expr.clone()); @@ -536,8 +535,8 @@ impl<'f> PerFunctionContext<'f> { fn set_aliases( &self, - references: &mut Block, - address: ValueId, + references: &mut Block<'f>, + address: ResolvedValueId<'f>, new_aliases: AliasSet, ) { let expression = @@ -549,7 +548,7 @@ impl<'f> PerFunctionContext<'f> { fn mark_all_unknown(&self, values: &[ValueId], references: &mut Block) { for value in values { if self.inserter.function.dfg.value_is_reference(*value) { - let value = self.inserter.function.dfg.resolve(*value).into(); + let value = self.inserter.function.dfg.resolve(*value).detach(); references.set_unknown(value); references.mark_value_used(value, self.inserter.function); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 724cac91527..87fd8056a26 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -3,23 +3,11 @@ use std::borrow::Cow; use crate::ssa::ir::{ function::Function, instruction::{Instruction, InstructionId}, - value::{IsResolved, RawValueId, ResolvedValueId, ValueId}, + value::{RawValueId, ResolvedValueId, ValueId}, }; use super::alias_set::AliasSet; -/// Private resolution type, to limit the scope of storing them in data structures. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub(super) enum ContextResolved {} - -impl IsResolved for ContextResolved {} - -impl From> for ValueId { - fn from(value: ResolvedValueId) -> Self { - ValueId::new(value.raw()) - } -} - /// A `Block` acts as a per-block context for the mem2reg pass. /// Most notably, it contains the current alias set thought to track each /// reference value if known, and it contains the expected ReferenceValue @@ -27,45 +15,45 @@ impl From> for ValueId { /// are expected to match the values held by each ValueId at the very end /// of a block. #[derive(Debug, Default, Clone)] -pub(super) struct Block { +pub(super) struct Block<'a> { /// Maps a ValueId to the Expression it represents. /// Multiple ValueIds can map to the same Expression, e.g. /// dereferences to the same allocation. - pub(super) expressions: im::OrdMap, + pub(super) expressions: im::OrdMap>, /// Each expression is tracked as to how many aliases it /// may have. If there is only 1, we can attempt to optimize /// out any known loads to that alias. Note that "alias" here /// includes the original reference as well. - pub(super) aliases: im::OrdMap, + pub(super) aliases: im::OrdMap, AliasSet>, /// Each allocate instruction result (and some reference block parameters) /// will map to a Reference value which tracks whether the last value stored /// to the reference is known. - pub(super) references: im::OrdMap, + pub(super) references: im::OrdMap>, /// The last instance of a `Store` instruction to each address in this block - pub(super) last_stores: im::OrdMap, InstructionId>, + pub(super) last_stores: im::OrdMap, InstructionId>, } /// An `Expression` here is used to represent a canonical key /// into the aliases map since otherwise two dereferences of the /// same address will be given different ValueIds. #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub(super) enum Expression { - Dereference(Box), - ArrayElement(Box), - Other(ValueId), +pub(super) enum Expression<'a> { + Dereference(Box>), + ArrayElement(Box>), + Other(ResolvedValueId<'a>), } /// Every reference's value is either Known and can be optimized away, or Unknown. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub(super) enum ReferenceValue { +pub(super) enum ReferenceValue<'a> { Unknown, - Known(ValueId), + Known(ResolvedValueId<'a>), } -impl ReferenceValue { +impl<'a> ReferenceValue<'a> { fn unify(self, other: Self) -> Self { if self == other { self @@ -75,12 +63,12 @@ impl ReferenceValue { } } -impl Block { +impl<'a> Block<'a> { /// If the given reference id points to a known value, return the value pub(super) fn get_known_value( &self, - address: ValueId, - ) -> Option> { + address: ResolvedValueId<'a>, + ) -> Option> { if let Some(expression) = self.expressions.get(&address.raw()) { if let Some(aliases) = self.aliases.get(expression) { // We could allow multiple aliases if we check that the reference @@ -99,17 +87,17 @@ impl Block { /// If the given address is known, set its value to `ReferenceValue::Known(value)`. pub(super) fn set_known_value( &mut self, - address: ValueId, - value: ValueId, + address: ResolvedValueId<'a>, + value: ResolvedValueId<'a>, ) { self.set_value(address, ReferenceValue::Known(value)); } - pub(super) fn set_unknown(&mut self, address: ValueId) { + pub(super) fn set_unknown(&mut self, address: ResolvedValueId<'a>) { self.set_value(address, ReferenceValue::Unknown); } - fn set_value(&mut self, address: ValueId, value: ReferenceValue) { + fn set_value(&mut self, address: ResolvedValueId<'a>, value: ReferenceValue<'a>) { let expression = self.expressions.entry(address.raw()).or_insert(Expression::Other(address)); let aliases = self.aliases.entry(expression.clone()).or_default(); @@ -171,7 +159,7 @@ impl Block { pub(super) fn remember_dereference( &mut self, function: &Function, - address: ValueId, + address: ResolvedValueId<'a>, result: ValueId, ) { if function.dfg.value_is_reference(result) { @@ -190,7 +178,7 @@ impl Block { /// Iterate through each known alias of the given address and apply the function `f` to each. fn for_each_alias_of( &mut self, - address: ValueId, + address: ResolvedValueId<'a>, mut f: impl FnMut(&mut Self, ValueId) -> T, ) { if let Some(expr) = self.expressions.get(&address.raw()) { @@ -202,20 +190,20 @@ impl Block { } } - fn keep_last_stores_for(&mut self, address: ValueId, function: &Function) { + fn keep_last_stores_for(&mut self, address: ResolvedValueId<'a>, function: &Function) { self.keep_last_store(address, function); self.for_each_alias_of(address, |t, alias| { - t.keep_last_store(function.dfg.resolve(alias).into(), function); + t.keep_last_store(function.dfg.resolve(alias).detach(), function); }); } - fn keep_last_store(&mut self, address: ValueId, function: &Function) { + fn keep_last_store(&mut self, address: ResolvedValueId<'a>, function: &Function) { if let Some(instruction) = self.last_stores.remove(&address) { // Whenever we decide we want to keep a store instruction, we also need // to go through its stored value and mark that used as well. match &function.dfg[instruction] { Instruction::Store { value, .. } => { - self.mark_value_used(function.dfg.resolve(*value).into(), function); + self.mark_value_used(function.dfg.resolve(*value).detach(), function); } other => { unreachable!("last_store held an id of a non-store instruction: {other:?}") @@ -224,14 +212,14 @@ impl Block { } } - pub(super) fn mark_value_used(&mut self, value: ValueId, function: &Function) { + pub(super) fn mark_value_used(&mut self, value: ResolvedValueId<'a>, function: &Function) { self.keep_last_stores_for(value, function); // We must do a recursive check for arrays since they're the only Values which may contain // other ValueIds. if let Some((array, _)) = function.dfg.get_array_constant(value) { for value in array { - self.mark_value_used(function.dfg.resolve(value).into(), function); + self.mark_value_used(function.dfg.resolve(value).detach(), function); } } }