Skip to content

Commit af08193

Browse files
committed
Merge branch 'staging' into feat/aggregation-mode-sdk
2 parents 0bd12c6 + 9e70085 commit af08193

47 files changed

Lines changed: 1385 additions & 257 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

Makefile

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -143,14 +143,10 @@ lint_contracts:
143143
@cd contracts && npm run lint:sol
144144

145145
anvil_start:
146-
@echo "Starting Anvil..."
147-
anvil --load-state contracts/scripts/anvil/state/alignedlayer-deployed-anvil-state.json
148-
149-
anvil_start_with_block_time:
150146
@echo "Starting Anvil..."
151147
anvil --load-state contracts/scripts/anvil/state/alignedlayer-deployed-anvil-state.json --block-time 7
152148

153-
anvil_start_with_block_time_with_more_prefunded_accounts:
149+
anvil_start_with_more_prefunded_accounts:
154150
@echo "Starting Anvil..."
155151
anvil --load-state contracts/scripts/anvil/state/alignedlayer-deployed-anvil-state.json --block-time 7 -a 2000
156152

@@ -1168,7 +1164,7 @@ setup_local_aligned_all:
11681164
tmux new-session -d -s aligned_layer
11691165

11701166
tmux new-window -t aligned_layer -n anvil
1171-
tmux send-keys -t aligned_layer 'make anvil_start_with_block_time' C-m
1167+
tmux send-keys -t aligned_layer 'make anvil_start' C-m
11721168

11731169
tmux new-window -t aligned_layer -n aggregator
11741170
tmux send-keys -t aligned_layer:aggregator 'make aggregator_start' C-m

aggregation_mode/aggregation_programs/sp1/src/lib.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,37 +2,37 @@ use serde::{Deserialize, Serialize};
22
use sha3::{Digest, Keccak256};
33

44
#[derive(Serialize, Deserialize)]
5-
pub struct SP1ProofInput {
5+
pub struct SP1VkAndPubInputs {
66
pub vk: [u32; 8],
77
pub public_inputs: Vec<u8>,
88
}
99

10-
impl SP1ProofInput {
10+
impl SP1VkAndPubInputs {
1111
pub fn hash(&self) -> [u8; 32] {
1212
let mut hasher = Keccak256::new();
1313
for &word in &self.vk {
14-
hasher.update(word.to_le_bytes());
14+
hasher.update(word.to_be_bytes());
1515
}
1616
hasher.update(&self.public_inputs);
1717
hasher.finalize().into()
1818
}
1919
}
2020

2121
#[derive(Serialize, Deserialize)]
22-
pub enum ProofInput {
23-
SP1Compressed(SP1ProofInput),
22+
pub enum ProofVkAndPubInputs {
23+
SP1Compressed(SP1VkAndPubInputs),
2424
}
2525

26-
impl ProofInput {
26+
impl ProofVkAndPubInputs {
2727
pub fn hash(&self) -> [u8; 32] {
2828
match self {
29-
ProofInput::SP1Compressed(proof) => proof.hash(),
29+
ProofVkAndPubInputs::SP1Compressed(proof_data) => proof_data.hash(),
3030
}
3131
}
3232
}
3333

3434
#[derive(Serialize, Deserialize)]
3535
pub struct Input {
36-
pub proofs: Vec<ProofInput>,
36+
pub proofs_vk_and_pub_inputs: Vec<ProofVkAndPubInputs>,
3737
pub merkle_root: [u8; 32],
3838
}

aggregation_mode/aggregation_programs/sp1/src/main.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ sp1_zkvm::entrypoint!(main);
33

44
use sha2::{Digest, Sha256};
55
use sha3::Keccak256;
6-
use sp1_aggregation_program::{Input, ProofInput};
6+
use sp1_aggregation_program::{Input, ProofVkAndPubInputs};
77

88
fn combine_hashes(hash_a: &[u8; 32], hash_b: &[u8; 32]) -> [u8; 32] {
99
let mut hasher = Keccak256::new();
@@ -13,7 +13,7 @@ fn combine_hashes(hash_a: &[u8; 32], hash_b: &[u8; 32]) -> [u8; 32] {
1313
}
1414

1515
/// Computes the merkle root for the given proofs using the vk
16-
fn compute_merkle_root(proofs: &[ProofInput]) -> [u8; 32] {
16+
fn compute_merkle_root(proofs: &[ProofVkAndPubInputs]) -> [u8; 32] {
1717
let mut leaves: Vec<[u8; 32]> = proofs
1818
.chunks(2)
1919
.map(|chunk| match chunk {
@@ -41,9 +41,9 @@ pub fn main() {
4141
let input = sp1_zkvm::io::read::<Input>();
4242

4343
// Verify the proofs.
44-
for proof in input.proofs.iter() {
44+
for proof in input.proofs_vk_and_pub_inputs.iter() {
4545
match proof {
46-
ProofInput::SP1Compressed(proof) => {
46+
ProofVkAndPubInputs::SP1Compressed(proof) => {
4747
let vkey = proof.vk;
4848
let public_values = &proof.public_inputs;
4949
let public_values_digest = Sha256::digest(public_values);
@@ -52,7 +52,7 @@ pub fn main() {
5252
}
5353
}
5454

55-
let merkle_root = compute_merkle_root(&input.proofs);
55+
let merkle_root = compute_merkle_root(&input.proofs_vk_and_pub_inputs);
5656

5757
assert_eq!(merkle_root, input.merkle_root);
5858

aggregation_mode/src/aggregators/sp1_aggregator.rs

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,19 @@
1+
use std::sync::LazyLock;
2+
13
use alloy::primitives::Keccak256;
2-
use sp1_aggregation_program::{ProofInput, SP1ProofInput};
4+
use sp1_aggregation_program::{ProofVkAndPubInputs, SP1VkAndPubInputs};
35
use sp1_sdk::{
4-
HashableKey, Prover, ProverClient, SP1ProofWithPublicValues, SP1Stdin, SP1VerifyingKey,
6+
EnvProver, HashableKey, Prover, ProverClient, SP1ProofWithPublicValues, SP1Stdin,
7+
SP1VerifyingKey,
58
};
69

710
use super::lib::{AggregatedProof, ProgramOutput, ProofAggregationError};
811

912
const PROGRAM_ELF: &[u8] =
1013
include_bytes!("../../aggregation_programs/sp1/elf/sp1_aggregator_program");
1114

15+
static SP1_PROVER_CLIENT: LazyLock<EnvProver> = LazyLock::new(ProverClient::from_env);
16+
1217
pub struct SP1ProofWithPubValuesAndElf {
1318
pub proof_with_pub_values: SP1ProofWithPublicValues,
1419
pub elf: Vec<u8>,
@@ -17,9 +22,8 @@ pub struct SP1ProofWithPubValuesAndElf {
1722
impl SP1ProofWithPubValuesAndElf {
1823
pub fn hash_vk_and_pub_inputs(&self) -> [u8; 32] {
1924
let mut hasher = Keccak256::new();
20-
for &word in &self.vk().hash_u32() {
21-
hasher.update(word.to_le_bytes());
22-
}
25+
let vk_bytes = &self.vk().hash_bytes();
26+
hasher.update(vk_bytes);
2327
hasher.update(self.proof_with_pub_values.public_values.as_slice());
2428
hasher.finalize().into()
2529
}
@@ -40,15 +44,15 @@ pub(crate) fn aggregate_proofs(
4044
let mut stdin = SP1Stdin::new();
4145

4246
let mut program_input = sp1_aggregation_program::Input {
43-
proofs: vec![],
47+
proofs_vk_and_pub_inputs: vec![],
4448
merkle_root: input.merkle_root,
4549
};
4650

4751
// write vk + public inputs
4852
for proof in input.proofs.iter() {
4953
program_input
50-
.proofs
51-
.push(ProofInput::SP1Compressed(SP1ProofInput {
54+
.proofs_vk_and_pub_inputs
55+
.push(ProofVkAndPubInputs::SP1Compressed(SP1VkAndPubInputs {
5256
public_inputs: proof.proof_with_pub_values.public_values.to_vec(),
5357
vk: proof.vk().hash_u32(),
5458
}));
@@ -66,7 +70,7 @@ pub(crate) fn aggregate_proofs(
6670
}
6771

6872
#[cfg(feature = "prove")]
69-
let client = ProverClient::from_env();
73+
let client = &*SP1_PROVER_CLIENT;
7074
// If not in prove mode, create a mock proof via mock client
7175
#[cfg(not(feature = "prove"))]
7276
let client = ProverClient::builder().mock().build();
@@ -102,7 +106,7 @@ pub enum AlignedSP1VerificationError {
102106
pub(crate) fn verify(
103107
sp1_proof_with_pub_values_and_elf: &SP1ProofWithPubValuesAndElf,
104108
) -> Result<(), AlignedSP1VerificationError> {
105-
let client = ProverClient::from_env();
109+
let client = &*SP1_PROVER_CLIENT;
106110

107111
let (_pk, vk) = client.setup(&sp1_proof_with_pub_values_and_elf.elf);
108112

@@ -122,7 +126,7 @@ pub(crate) fn verify(
122126
}
123127

124128
pub fn vk_from_elf(elf: &[u8]) -> SP1VerifyingKey {
125-
let prover = ProverClient::builder().cpu().build();
129+
let prover = &*SP1_PROVER_CLIENT;
126130
let (_, vk) = prover.setup(elf);
127131
vk
128132
}

aggregation_mode/src/backend/fetcher.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ use tracing::{error, info};
1717

1818
#[derive(Debug)]
1919
pub enum ProofsFetcherError {
20-
QueryingLogs,
21-
BlockNumber,
20+
GetLogs(String),
21+
GetBlockNumber(String),
2222
}
2323

2424
pub struct ProofsFetcher {
@@ -59,7 +59,7 @@ impl ProofsFetcher {
5959
.from_block(from_block)
6060
.query()
6161
.await
62-
.map_err(|_| ProofsFetcherError::QueryingLogs)?;
62+
.map_err(|e| ProofsFetcherError::GetLogs(e.to_string()))?;
6363

6464
info!("Logs collected {}", logs.len());
6565

@@ -124,7 +124,7 @@ impl ProofsFetcher {
124124
.rpc_provider
125125
.get_block_number()
126126
.await
127-
.map_err(|_| ProofsFetcherError::BlockNumber)?;
127+
.map_err(|e| ProofsFetcherError::GetBlockNumber(e.to_string()))?;
128128

129129
let number_of_blocks_in_the_past = self.fetch_from_secs_ago / self.block_time_secs;
130130

aggregation_mode/src/backend/s3.rs

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
use aligned_sdk::core::types::VerificationData;
22

33
#[derive(Debug)]
4+
#[allow(dead_code)]
45
pub enum GetBatchProofsError {
5-
Fetching,
6-
Deserialization,
7-
EmptyBody,
8-
StatusFailed,
9-
ReqwestClientFailed,
6+
FetchingS3Batch(String),
7+
Deserialization(String),
8+
EmptyBody(String),
9+
StatusFailed((u16, String)),
10+
ReqwestClientFailed(String),
1011
}
1112

1213
// needed to make S3 bucket work
@@ -18,25 +19,32 @@ pub async fn get_aligned_batch_from_s3(
1819
let client = reqwest::Client::builder()
1920
.user_agent(DEFAULT_USER_AGENT)
2021
.build()
21-
.map_err(|_| GetBatchProofsError::ReqwestClientFailed)?;
22+
.map_err(|e| GetBatchProofsError::ReqwestClientFailed(e.to_string()))?;
2223

2324
let response = client
2425
.get(url)
2526
.send()
2627
.await
27-
.map_err(|_| GetBatchProofsError::Fetching)?;
28+
.map_err(|e| GetBatchProofsError::FetchingS3Batch(e.to_string()))?;
2829
if !response.status().is_success() {
29-
return Err(GetBatchProofsError::StatusFailed);
30+
return Err(GetBatchProofsError::StatusFailed((
31+
response.status().as_u16(),
32+
response
33+
.status()
34+
.canonical_reason()
35+
.unwrap_or("")
36+
.to_string(),
37+
)));
3038
}
3139

3240
let bytes = response
3341
.bytes()
3442
.await
35-
.map_err(|_| GetBatchProofsError::EmptyBody)?;
43+
.map_err(|e| GetBatchProofsError::EmptyBody(e.to_string()))?;
3644
let bytes: &[u8] = bytes.iter().as_slice();
3745

38-
let data: Vec<VerificationData> =
39-
ciborium::from_reader(bytes).map_err(|_| GetBatchProofsError::Deserialization)?;
46+
let data: Vec<VerificationData> = ciborium::from_reader(bytes)
47+
.map_err(|e| GetBatchProofsError::Deserialization(e.to_string()))?;
4048

4149
Ok(data)
4250
}

alerts/sender_with_alert.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,11 +111,11 @@ do
111111
--proof "./scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_${x}_groth16.proof" \
112112
--public_input "./scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_${x}_groth16.pub" \
113113
--vk "./scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_${x}_groth16.vk" \
114-
--proof_generator_addr $SENDER_ADDRESS \
115114
--private_key $PRIVATE_KEY \
116115
--rpc_url $RPC_URL \
117116
--network $NETWORK \
118117
--max_fee 0.004ether \
118+
--random_address \
119119
2>&1)
120120

121121
echo "$submit"

batcher/aligned-batcher/src/connection.rs

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -57,17 +57,16 @@ pub(crate) async fn send_batch_inclusion_data_responses(
5757
}
5858

5959
pub(crate) async fn send_message<T: Serialize>(ws_conn_sink: WsMessageSink, message: T) {
60-
match cbor_serialize(&message) {
61-
Ok(serialized_response) => {
62-
if let Err(err) = ws_conn_sink
63-
.write()
64-
.await
65-
.send(Message::binary(serialized_response))
66-
.await
67-
{
68-
error!("Error while sending message: {}", err)
69-
}
60+
if let Ok(serialized_response) =
61+
cbor_serialize(&message).inspect_err(|e| error!("Error while serializing message: {}", e))
62+
{
63+
if let Err(err) = ws_conn_sink
64+
.write()
65+
.await
66+
.send(Message::binary(serialized_response))
67+
.await
68+
{
69+
error!("Error while sending message: {}", err)
7070
}
71-
Err(e) => error!("Error while serializing message: {}", e),
7271
}
7372
}

0 commit comments

Comments
 (0)