espresso_types/v0/
sparse_mt.rs

1use std::fmt;
2
3use alloy::{hex, primitives::B256};
4use anyhow::Result;
5use ark_serialize::{
6    CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
7};
8use jf_merkle_tree_compat::DigestAlgorithm;
9use sha3::{Digest as _, Keccak256};
10
11use crate::{v0_3::RewardAmount, v0_4::RewardAccountV2};
12
13/// Custom Keccak256 node for our merkle tree
14#[derive(Default, Eq, PartialEq, Clone, Copy, Ord, PartialOrd, Hash)]
15pub struct KeccakNode(pub [u8; 32]);
16
17impl fmt::Debug for KeccakNode {
18    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
19        f.debug_tuple("KeccakNode")
20            .field(&hex::encode(self.0))
21            .finish()
22    }
23}
24
25impl From<KeccakNode> for B256 {
26    fn from(val: KeccakNode) -> Self {
27        val.0.into()
28    }
29}
30
31impl AsRef<[u8]> for KeccakNode {
32    fn as_ref(&self) -> &[u8] {
33        &self.0
34    }
35}
36
37impl CanonicalSerialize for KeccakNode {
38    fn serialize_with_mode<W: ark_serialize::Write>(
39        &self,
40        mut writer: W,
41        _compress: Compress,
42    ) -> Result<(), SerializationError> {
43        writer.write_all(&self.0)?;
44        Ok(())
45    }
46
47    fn serialized_size(&self, _compress: Compress) -> usize {
48        32
49    }
50}
51
52impl CanonicalDeserialize for KeccakNode {
53    fn deserialize_with_mode<R: Read>(
54        mut reader: R,
55        _compress: Compress,
56        _validate: Validate,
57    ) -> Result<Self, SerializationError> {
58        let mut ret = [0u8; 32];
59        reader.read_exact(&mut ret)?;
60        Ok(Self(ret))
61    }
62}
63
64impl Valid for KeccakNode {
65    fn check(&self) -> Result<(), SerializationError> {
66        Ok(())
67    }
68}
69
70/// Keccak256 hasher that matches our Solidity implementation
71#[derive(Clone, Debug, Hash, PartialEq, Eq)]
72pub struct Keccak256Hasher;
73
74impl DigestAlgorithm<RewardAmount, RewardAccountV2, KeccakNode> for Keccak256Hasher {
75    fn digest(data: &[KeccakNode]) -> Result<KeccakNode, jf_merkle_tree_compat::MerkleTreeError> {
76        let mut hasher = Keccak256::new();
77
78        // Hash the concatenated node data directly (no domain separator)
79        for node in data {
80            hasher.update(node.as_ref());
81        }
82
83        let result = hasher.finalize();
84        Ok(KeccakNode(result.into()))
85    }
86
87    fn digest_leaf(
88        _pos: &RewardAccountV2,
89        elem: &RewardAmount,
90    ) -> Result<KeccakNode, jf_merkle_tree_compat::MerkleTreeError> {
91        // First hash of the value
92        let mut hasher = Keccak256::new();
93        hasher.update(elem.0.to_be_bytes::<32>()); // 32-byte value as big-endian
94        let first_hash = hasher.finalize();
95
96        // Second hash (double hashing)
97        let mut hasher = Keccak256::new();
98        hasher.update(first_hash);
99        let result = hasher.finalize();
100
101        Ok(KeccakNode(result.into()))
102    }
103}