espresso_types/v0/
sparse_mt.rs

1use std::fmt;
2
3use alloy::hex;
4use anyhow::Result;
5use ark_serialize::{
6    CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
7};
8use jf_merkle_tree::DigestAlgorithm;
9use sha3::{Digest as _, Keccak256};
10
11use crate::{v0_3::RewardAmount, v0_4::RewardAccountV2};
12
13/// Custom Keccak256 node for our merkle tree
14#[derive(Default, Eq, PartialEq, Clone, Copy, Ord, PartialOrd, Hash)]
15pub struct KeccakNode(pub [u8; 32]);
16
17impl fmt::Debug for KeccakNode {
18    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
19        f.debug_tuple("KeccakNode")
20            .field(&hex::encode(self.0))
21            .finish()
22    }
23}
24
25impl AsRef<[u8]> for KeccakNode {
26    fn as_ref(&self) -> &[u8] {
27        &self.0
28    }
29}
30
31impl CanonicalSerialize for KeccakNode {
32    fn serialize_with_mode<W: ark_serialize::Write>(
33        &self,
34        mut writer: W,
35        _compress: Compress,
36    ) -> Result<(), SerializationError> {
37        writer.write_all(&self.0)?;
38        Ok(())
39    }
40
41    fn serialized_size(&self, _compress: Compress) -> usize {
42        32
43    }
44}
45
46impl CanonicalDeserialize for KeccakNode {
47    fn deserialize_with_mode<R: Read>(
48        mut reader: R,
49        _compress: Compress,
50        _validate: Validate,
51    ) -> Result<Self, SerializationError> {
52        let mut ret = [0u8; 32];
53        reader.read_exact(&mut ret)?;
54        Ok(Self(ret))
55    }
56}
57
58impl Valid for KeccakNode {
59    fn check(&self) -> Result<(), SerializationError> {
60        Ok(())
61    }
62}
63
64/// Keccak256 hasher that matches our Solidity implementation
65#[derive(Clone, Debug, Hash, PartialEq, Eq)]
66pub struct Keccak256Hasher;
67
68impl DigestAlgorithm<RewardAmount, RewardAccountV2, KeccakNode> for Keccak256Hasher {
69    fn digest(data: &[KeccakNode]) -> Result<KeccakNode, jf_merkle_tree::MerkleTreeError> {
70        let mut hasher = Keccak256::new();
71
72        // Hash the concatenated node data directly (no domain separator)
73        for node in data {
74            hasher.update(node.as_ref());
75        }
76
77        let result = hasher.finalize();
78        Ok(KeccakNode(result.into()))
79    }
80
81    fn digest_leaf(
82        _pos: &RewardAccountV2,
83        elem: &RewardAmount,
84    ) -> Result<KeccakNode, jf_merkle_tree::MerkleTreeError> {
85        // First hash of the value
86        let mut hasher = Keccak256::new();
87        hasher.update(elem.0.to_be_bytes::<32>()); // 32-byte value as big-endian
88        let first_hash = hasher.finalize();
89
90        // Second hash (double hashing)
91        let mut hasher = Keccak256::new();
92        hasher.update(first_hash);
93        let result = hasher.finalize();
94
95        Ok(KeccakNode(result.into()))
96    }
97}