Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@
"dbanks",
"DCMAKE",
"decrementation",
"dedup",
"deduped",
"defi",
"deinit",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,20 +162,24 @@ impl ResetOutputComposer {
unconstrained fn get_propagated_validation_requests<let KeyValidationAmount: u32>(
self,
) -> PrivateValidationRequests {
let validation_requests = self.previous_kernel.validation_requests;
// Propagation works with arrays that have already been deduplicated.
// This is because each reset circuit iteration performs costly processing, so we eliminate duplicates up front,
// since doing so is comparatively cheap, to prevent redundant work on the same logical request.
// All corresponding hints and validation amounts are calculated after deduplication, ensuring that expensive
// operations are performed only on unique requests.

let kept_note_hash_read_requests = get_propagated_read_requests(
validation_requests.note_hash_read_requests,
self.squashed_output_array_hints.deduped_note_hash_read_requests,
self.note_hash_read_request_actions_hints,
);

let kept_nullifier_read_requests = get_propagated_read_requests(
validation_requests.nullifier_read_requests,
self.squashed_output_array_hints.deduped_nullifier_read_requests,
self.nullifier_read_request_actions_hints,
);

let scoped_key_validation_requests_and_generators = get_propagated_key_validation_requests(
validation_requests.scoped_key_validation_requests_and_generators,
self.squashed_output_array_hints.deduped_key_validation_requests,
KeyValidationAmount,
);

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
use crate::reset::{squash_transient_data, TransientDataSquashingHint};
use crate::{
components::reset_output_validator::check_duplicates::{
are_duplicate_key_validation_requests, are_duplicate_read_requests,
},
reset::{dedup_array, squash_transient_data, TransientDataSquashingHint},
};
use types::{
abis::{
kernel_circuit_public_inputs::PrivateKernelCircuitPublicInputs, note_hash::NoteHash,
nullifier::Nullifier, private_log::PrivateLogData,
validation_requests::KeyValidationRequestAndGenerator,
},
constants::{
MAX_KEY_VALIDATION_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX,
MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIERS_PER_TX,
MAX_PRIVATE_LOGS_PER_TX,
},
constants::{MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_PRIVATE_LOGS_PER_TX},
side_effect::{Counted, Scoped},
utils::arrays::ClaimedLengthArray,
};
Expand All @@ -16,6 +26,15 @@ pub struct SquashedOutputArrayHints {
pub kept_note_hashes: ClaimedLengthArray<Scoped<Counted<NoteHash>>, MAX_NOTE_HASHES_PER_TX>,
pub kept_nullifiers: ClaimedLengthArray<Scoped<Counted<Nullifier>>, MAX_NULLIFIERS_PER_TX>,
pub kept_private_logs: ClaimedLengthArray<Scoped<Counted<PrivateLogData>>, MAX_PRIVATE_LOGS_PER_TX>,
// "deduped" requests are the result after removing duplicates.
// For read requests, duplicates are identified by matching value + contract_address (ignoring counter).
// For key validation, duplicates are exact matches of all fields.
pub deduped_note_hash_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, MAX_NOTE_HASH_READ_REQUESTS_PER_TX>,
pub note_hash_read_request_dedup_hints: [u32; MAX_NOTE_HASH_READ_REQUESTS_PER_TX],
pub deduped_nullifier_read_requests: ClaimedLengthArray<Scoped<Counted<Field>>, MAX_NULLIFIER_READ_REQUESTS_PER_TX>,
pub nullifier_read_request_dedup_hints: [u32; MAX_NULLIFIER_READ_REQUESTS_PER_TX],
pub deduped_key_validation_requests: ClaimedLengthArray<Scoped<KeyValidationRequestAndGenerator>, MAX_KEY_VALIDATION_REQUESTS_PER_TX>,
pub key_validation_request_dedup_hints: [u32; MAX_KEY_VALIDATION_REQUESTS_PER_TX],
}

// Splice-removes some transient notes, nullifiers, and private logs, from the previous kernel's
Expand All @@ -37,5 +56,32 @@ pub unconstrained fn generate_squashed_output_array_hints<let TransientDataSquas
transient_data_squashing_hints,
);

SquashedOutputArrayHints { kept_note_hashes, kept_nullifiers, kept_private_logs }
// Deduplicate validation requests to reduce the number of expensive validations
// (merkle membership checks for reads, EC operations for key validation).
let (deduped_note_hash_read_requests, note_hash_read_request_dedup_hints) = dedup_array(
previous_kernel.validation_requests.note_hash_read_requests,
are_duplicate_read_requests,
);

let (deduped_nullifier_read_requests, nullifier_read_request_dedup_hints) = dedup_array(
previous_kernel.validation_requests.nullifier_read_requests,
are_duplicate_read_requests,
);

let (deduped_key_validation_requests, key_validation_request_dedup_hints) = dedup_array(
previous_kernel.validation_requests.scoped_key_validation_requests_and_generators,
are_duplicate_key_validation_requests,
);

SquashedOutputArrayHints {
kept_note_hashes,
kept_nullifiers,
kept_private_logs,
deduped_note_hash_read_requests,
note_hash_read_request_dedup_hints,
deduped_nullifier_read_requests,
nullifier_read_request_dedup_hints,
deduped_key_validation_requests,
key_validation_request_dedup_hints,
}
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
pub mod check_duplicates;
mod validate_note_logs_linked_to_note_hashes;

use check_duplicates::{are_duplicate_key_validation_requests, are_duplicate_read_requests};

use crate::{
abis::{PaddedSideEffects, PrivateKernelResetHints},
accumulated_data::{
assert_sorted_padded_transformed_array_capped_size,
assert_sorted_padded_transformed_i_array_capped_size,
},
components::reset_output_composer::SquashedOutputArrayHints,
reset::{KeyValidationRequestsValidator, ReadRequestValidator, TransientDataValidator},
reset::{
KeyValidationRequestsValidator, ReadRequestValidator, TransientDataValidator,
validate_deduped_array,
},
};
use types::{
abis::{
Expand All @@ -17,7 +23,10 @@ use types::{
private_log::{PrivateLog, PrivateLogData},
},
address::AztecAddress,
constants::{MAX_U32_VALUE, SIDE_EFFECT_MASKING_ADDRESS},
constants::{
MAX_KEY_VALIDATION_REQUESTS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX,
MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_U32_VALUE, SIDE_EFFECT_MASKING_ADDRESS,
},
hash::{
compute_note_nonce_and_unique_note_hash, compute_siloed_note_hash, compute_siloed_nullifier,
compute_siloed_private_log_first_field,
Expand Down Expand Up @@ -242,10 +251,37 @@ impl<let NoteHashPendingReadHintsLen: u32, let NoteHashSettledReadHintsLen: u32,
let validation_requests = self.previous_kernel.validation_requests;
let tree_snapshots = self.previous_kernel.constants.anchor_block_header.state.partial;
let output = self.output.validation_requests;
let hints = self.squashed_output_array_hints;

// In this function, if we're not performing a full reset, we first check that the deduped array correctly
// represents the original requests, i.e., each original item maps to a matching deduped item.
// We then perform the expensive reset validations (Merkle membership proofs and elliptic curve operations) only
// on the deduped array.
// Deduplication is only beneficial when this reset variant processes a subset of the requests (i.e., the Amount
// generics are less than MAX). In these cases, eliminating duplicates means more unique requests actually get
// validated, and fewer requests to propagate to the next kernel circuit.
// If the variant is already processing all requests, the validation loops run for MAX iterations regardless of
// the actual array length, so deduplication simply adds unnecessary overhead.

// note_hash_read_requests
let note_hash_read_requests = if (
NoteHashPendingReadAmount == MAX_NOTE_HASH_READ_REQUESTS_PER_TX
)
& (NoteHashSettledReadAmount == MAX_NOTE_HASH_READ_REQUESTS_PER_TX) {
// Skip deduplication if the variant is processing all requests.
validation_requests.note_hash_read_requests
} else {
validate_deduped_array(
validation_requests.note_hash_read_requests,
hints.deduped_note_hash_read_requests,
hints.note_hash_read_request_dedup_hints,
are_duplicate_read_requests,
);
hints.deduped_note_hash_read_requests
};

ReadRequestValidator {
read_requests: validation_requests.note_hash_read_requests,
read_requests: note_hash_read_requests,
pending_values: self.previous_kernel.end.note_hashes,
tree_root: tree_snapshots.note_hash_tree.root,
propagated_read_requests: output.note_hash_read_requests,
Expand All @@ -254,8 +290,24 @@ impl<let NoteHashPendingReadHintsLen: u32, let NoteHashSettledReadHintsLen: u32,
.validate::<NoteHashPendingReadAmount, NoteHashSettledReadAmount>();

// nullifier_read_requests
let nullifier_read_requests = if (
NullifierPendingReadAmount == MAX_NULLIFIER_READ_REQUESTS_PER_TX
)
& (NullifierSettledReadAmount == MAX_NULLIFIER_READ_REQUESTS_PER_TX) {
// Skip deduplication if the variant is processing all requests.
validation_requests.nullifier_read_requests
} else {
validate_deduped_array(
validation_requests.nullifier_read_requests,
hints.deduped_nullifier_read_requests,
hints.nullifier_read_request_dedup_hints,
are_duplicate_read_requests,
);
hints.deduped_nullifier_read_requests
};

ReadRequestValidator {
read_requests: validation_requests.nullifier_read_requests,
read_requests: nullifier_read_requests,
pending_values: self.previous_kernel.end.nullifiers,
tree_root: tree_snapshots.nullifier_tree.root,
propagated_read_requests: output.nullifier_read_requests,
Expand All @@ -264,9 +316,21 @@ impl<let NoteHashPendingReadHintsLen: u32, let NoteHashSettledReadHintsLen: u32,
.validate::<NullifierPendingReadAmount, NullifierSettledReadAmount>();

// key_validation_requests
let key_validation_requests = if KeyValidationAmount == MAX_KEY_VALIDATION_REQUESTS_PER_TX {
// Skip deduplication if the variant is processing all requests.
validation_requests.scoped_key_validation_requests_and_generators
} else {
validate_deduped_array(
validation_requests.scoped_key_validation_requests_and_generators,
hints.deduped_key_validation_requests,
hints.key_validation_request_dedup_hints,
are_duplicate_key_validation_requests,
);
hints.deduped_key_validation_requests
};

KeyValidationRequestsValidator {
key_validation_requests: validation_requests
.scoped_key_validation_requests_and_generators,
key_validation_requests,
propagated_key_validation_requests: output
.scoped_key_validation_requests_and_generators,
hints: self.reset_hints.key_validation_hints,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
use types::{
abis::validation_requests::KeyValidationRequestAndGenerator,
side_effect::{Counted, Scoped},
};

/// Two read requests are considered duplicates if they read the same value from the same contract.
/// The counter (when the read happened) is irrelevant for dedup purposes.
pub fn are_duplicate_read_requests(a: Scoped<Counted<Field>>, b: Scoped<Counted<Field>>) -> bool {
(a.inner.inner == b.inner.inner) & (a.contract_address == b.contract_address)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm still amazed that Noir makes this the same constraint cost as:

assert_eq(a.inner.inner, b.inner.inner);
assert_eq(a.contract_address, b.contract_address);

}

/// Two key validation requests are considered duplicates if they are exactly the same.
pub fn are_duplicate_key_validation_requests(
a: Scoped<KeyValidationRequestAndGenerator>,
b: Scoped<KeyValidationRequestAndGenerator>,
) -> bool {
a == b
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
use types::{traits::Empty, utils::arrays::ClaimedLengthArray};

/// Unconstrained function to generate a deduplicated array and index hints.
///
/// For each item in `original`, checks if an equal item (under `eq_fn`) already exists in the
/// deduped output. If so, records the existing index in `dedup_hints`. If not, appends the item
/// to the deduped output and records the new index.
///
/// Returns: (deduped_array, dedup_hints) where dedup_hints[i] is the index in deduped_array
/// that original[i] maps to.
pub unconstrained fn dedup_array<T, let N: u32, Env>(
original: ClaimedLengthArray<T, N>,
eq_fn: fn[Env](T, T) -> bool,
) -> (ClaimedLengthArray<T, N>, [u32; N])
where
T: Empty,
{
let mut deduped: ClaimedLengthArray<T, N> = ClaimedLengthArray::empty();
let mut hints = [0 as u32; N];

for i in 0..original.length {
let item = original.array[i];

// Search for an existing match in the deduped array.
let mut found = false;
let mut found_index = 0;
for j in 0..deduped.length {
if eq_fn(item, deduped.array[j]) {
found = true;
found_index = j;
break;
}
}

if found {
hints[i] = found_index;
} else {
hints[i] = deduped.length;
deduped.push(item);
}
}

(deduped, hints)
}

mod tests {
use super::dedup_array;
use types::utils::arrays::ClaimedLengthArray;

fn field_eq(a: Field, b: Field) -> bool {
a == b
}

#[test]
unconstrained fn all_unique() {
let original = ClaimedLengthArray { array: [11, 22, 33, 0], length: 3 };
let (deduped, hints) = dedup_array(original, field_eq);
assert_eq(deduped.length, 3);
assert_eq(deduped.array[0], 11);
assert_eq(deduped.array[1], 22);
assert_eq(deduped.array[2], 33);
assert_eq(hints[0], 0);
assert_eq(hints[1], 1);
assert_eq(hints[2], 2);
}

#[test]
unconstrained fn all_duplicates() {
let original = ClaimedLengthArray { array: [11, 11, 11, 0], length: 3 };
let (deduped, hints) = dedup_array(original, field_eq);
assert_eq(deduped.length, 1);
assert_eq(deduped.array[0], 11);
assert_eq(hints[0], 0);
assert_eq(hints[1], 0);
assert_eq(hints[2], 0);
}

#[test]
unconstrained fn mix_of_unique_and_duplicate() {
let original = ClaimedLengthArray { array: [11, 22, 11, 33, 22, 0, 0], length: 5 };
let (deduped, hints) = dedup_array(original, field_eq);
assert_eq(deduped.length, 3);
assert_eq(deduped.array[0], 11);
assert_eq(deduped.array[1], 22);
assert_eq(deduped.array[2], 33);
assert_eq(hints[0], 0);
assert_eq(hints[1], 1);
assert_eq(hints[2], 0);
assert_eq(hints[3], 2);
assert_eq(hints[4], 1);
assert_eq(hints[5], 0);
assert_eq(hints[6], 0);
}

#[test]
unconstrained fn empty_array() {
let original = ClaimedLengthArray { array: [0, 0, 0, 0], length: 0 };
let (deduped, _hints) = dedup_array(original, field_eq);
assert_eq(deduped.length, 0);
}

#[test]
unconstrained fn single_item() {
let original = ClaimedLengthArray { array: [42, 0, 0, 0], length: 1 };
let (deduped, hints) = dedup_array(original, field_eq);
assert_eq(deduped.length, 1);
assert_eq(deduped.array[0], 42);
assert_eq(hints[0], 0);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
pub mod dedup_array;
pub mod validate_deduped_array;
Loading
Loading