use std::cmp::{max, min};
use std::fmt::Debug;
use std::marker::PhantomData;
use anyhow::ensure;
use filecoin_hashers::{Hasher, PoseidonArity};
use fr32::bytes_into_fr_repr_safe;
use generic_array::typenum::Unsigned;
use merkletree::merkle::get_merkle_tree_row_count;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use sha2::{Digest, Sha256};
use crate::{
api_version::ApiVersion,
crypto::{derive_porep_domain_seed, DRSAMPLE_DST},
error::Result,
parameter_cache::ParameterSetMetadata,
util::{data_at_node_offset, NODE_SIZE},
PoRepID,
};
pub const PARALLEL_MERKLE: bool = true;
pub const BASE_DEGREE: usize = 6;
pub trait Graph<H: Hasher>: Debug + Clone + PartialEq + Eq {
type Key: Debug;
fn expected_size(&self) -> usize {
self.size() * NODE_SIZE
}
fn merkle_tree_depth<U: 'static + PoseidonArity>(&self) -> u64 {
graph_height::<U>(self.size()) as u64
}
fn parents(&self, node: usize, parents: &mut [u32]) -> Result<()>;
fn size(&self) -> usize;
fn degree(&self) -> usize;
fn new(
nodes: usize,
base_degree: usize,
expansion_degree: usize,
porep_id: PoRepID,
api_version: ApiVersion,
) -> Result<Self>;
fn seed(&self) -> [u8; 28];
fn create_key(
&self,
id: &H::Domain,
node: usize,
parents: &[u32],
parents_data: &[u8],
exp_parents_data: Option<&[u8]>,
) -> Result<Self::Key>;
}
pub fn graph_height<U: Unsigned>(number_of_leafs: usize) -> usize {
get_merkle_tree_row_count(number_of_leafs, U::to_usize())
}
#[derive(Clone, Debug, PartialEq, Eq, Copy)]
pub struct BucketGraph<H: Hasher> {
nodes: usize,
base_degree: usize,
seed: [u8; 28],
api_version: ApiVersion,
_h: PhantomData<H>,
}
impl<H: Hasher> ParameterSetMetadata for BucketGraph<H> {
fn identifier(&self) -> String {
format!(
"drgraph::BucketGraph{{size: {}; degree: {}; hasher: {}}}",
self.nodes,
self.degree(),
H::name(),
)
}
fn sector_size(&self) -> u64 {
(self.nodes * NODE_SIZE) as u64
}
}
impl<H: Hasher> Graph<H> for BucketGraph<H> {
type Key = H::Domain;
fn create_key(
&self,
id: &H::Domain,
node: usize,
parents: &[u32],
base_parents_data: &[u8],
_exp_parents_data: Option<&[u8]>,
) -> Result<Self::Key> {
let mut hasher = Sha256::new();
hasher.update(AsRef::<[u8]>::as_ref(id));
if node != parents[0] as usize {
for parent in parents.iter() {
let offset = data_at_node_offset(*parent as usize);
hasher.update(&base_parents_data[offset..offset + NODE_SIZE]);
}
}
let hash = hasher.finalize();
Ok(bytes_into_fr_repr_safe(hash.as_ref()).into())
}
#[inline]
fn parents(&self, node: usize, parents: &mut [u32]) -> Result<()> {
let m = self.degree();
match node {
0 | 1 => {
for parent in parents.iter_mut().take(m) {
*parent = 0;
}
Ok(())
}
_ => {
let node = node as u32;
let mut seed = [0u8; 32];
seed[..28].copy_from_slice(&self.seed);
seed[28..].copy_from_slice(&node.to_le_bytes());
let mut rng = ChaCha8Rng::from_seed(seed);
let m_prime = m - 1;
let metagraph_node = node as u64 * m_prime as u64;
let n_buckets = (metagraph_node as f64).log2().ceil() as u64;
let (predecessor_index, other_drg_parents) = match self.api_version {
ApiVersion::V1_0_0 => (m_prime, &mut parents[..]),
ApiVersion::V1_1_0 | ApiVersion::V1_2_0 => (0, &mut parents[1..]),
};
for parent in other_drg_parents.iter_mut().take(m_prime) {
let bucket_index = (rng.gen::<u64>() % n_buckets) + 1;
let largest_distance_in_bucket = min(metagraph_node, 1 << bucket_index);
let smallest_distance_in_bucket = max(2, largest_distance_in_bucket >> 1);
let n_distances_in_bucket =
largest_distance_in_bucket - smallest_distance_in_bucket + 1;
let distance =
smallest_distance_in_bucket + (rng.gen::<u64>() % n_distances_in_bucket);
let metagraph_parent = metagraph_node - distance;
let mapped_parent = (metagraph_parent / m_prime as u64) as u32;
*parent = if mapped_parent == node {
node - 1
} else {
mapped_parent
};
}
parents[predecessor_index] = node - 1;
Ok(())
}
}
}
#[inline]
fn size(&self) -> usize {
self.nodes
}
#[inline]
fn degree(&self) -> usize {
self.base_degree
}
fn seed(&self) -> [u8; 28] {
self.seed
}
fn new(
nodes: usize,
base_degree: usize,
expansion_degree: usize,
porep_id: PoRepID,
api_version: ApiVersion,
) -> Result<Self> {
ensure!(expansion_degree == 0, "Expension degree must be zero.");
let m_prime = base_degree - 1;
let n_metagraph_nodes = nodes as u64 * m_prime as u64;
ensure!(
n_metagraph_nodes <= 1u64 << 54,
"The number of metagraph nodes must be precisely castable to `f64`"
);
let drg_seed = derive_drg_seed(porep_id);
Ok(BucketGraph {
nodes,
base_degree,
seed: drg_seed,
api_version,
_h: PhantomData,
})
}
}
pub fn derive_drg_seed(porep_id: PoRepID) -> [u8; 28] {
let mut drg_seed = [0; 28];
let raw_seed = derive_porep_domain_seed(DRSAMPLE_DST, porep_id);
drg_seed.copy_from_slice(&raw_seed[..28]);
drg_seed
}
#[cfg(test)]
mod tests {
use super::*;
use filecoin_hashers::{
blake2s::Blake2sHasher, poseidon::PoseidonHasher, sha256::Sha256Hasher,
};
use generic_array::typenum::{U0, U2, U4, U8};
use memmap2::{MmapMut, MmapOptions};
use merkletree::store::StoreConfig;
use crate::merkle::{
create_base_merkle_tree, DiskStore, MerkleProofTrait, MerkleTreeTrait, MerkleTreeWrapper,
};
pub fn mmap_from(data: &[u8]) -> MmapMut {
let mut mm = MmapOptions::new()
.len(data.len())
.map_anon()
.expect("Failed to create memory map");
mm.copy_from_slice(data);
mm
}
fn graph_bucket<H: Hasher>() {
let porep_id = |id: u8| {
let mut porep_id = [0u8; 32];
porep_id[0] = id;
porep_id
};
let legacy_porep_id = porep_id(0);
let new_porep_id = porep_id(5);
graph_bucket_aux::<H>(legacy_porep_id, ApiVersion::V1_0_0);
graph_bucket_aux::<H>(new_porep_id, ApiVersion::V1_2_0);
}
fn graph_bucket_aux<H: Hasher>(porep_id: PoRepID, api_version: ApiVersion) {
let degree = BASE_DEGREE;
for &size in &[4, 16, 256, 2048] {
let g = BucketGraph::<H>::new(size, degree, 0, porep_id, api_version)
.expect("bucket graph new failed");
assert_eq!(g.size(), size, "wrong nodes count");
let mut parents = vec![0; degree];
g.parents(0, &mut parents).expect("parents failed");
assert_eq!(parents, vec![0; degree]);
parents = vec![0; degree];
g.parents(1, &mut parents).expect("parents failed");
assert_eq!(parents, vec![0; degree]);
for i in 1..size {
let mut pa1 = vec![0; degree];
g.parents(i, &mut pa1).expect("parents failed");
let mut pa2 = vec![0; degree];
g.parents(i, &mut pa2).expect("parents failed");
assert_eq!(pa1.len(), degree);
assert_eq!(pa1, pa2, "different parents on the same node");
let mut p1 = vec![0; degree];
g.parents(i, &mut p1).expect("parents failed");
let mut p2 = vec![0; degree];
g.parents(i, &mut p2).expect("parents failed");
for parent in p1 {
assert_ne!(i, parent as usize, "self reference found");
}
match api_version {
ApiVersion::V1_0_0 => {
assert_eq!(
i - 1,
pa1[degree - 1] as usize,
"immediate predecessor was not last DRG parent"
);
}
ApiVersion::V1_1_0 | ApiVersion::V1_2_0 => {
assert_eq!(
i - 1,
pa1[0] as usize,
"immediate predecessor was not first parent"
);
}
}
}
}
}
#[test]
fn graph_bucket_sha256() {
graph_bucket::<Sha256Hasher>();
}
#[test]
fn graph_bucket_blake2s() {
graph_bucket::<Blake2sHasher>();
}
fn gen_proof<H: 'static + Hasher, U: 'static + PoseidonArity>(config: Option<StoreConfig>) {
let leafs = 64;
let porep_id = [1; 32];
let g = BucketGraph::<H>::new(leafs, BASE_DEGREE, 0, porep_id, ApiVersion::V1_2_0)
.expect("bucket graph new failed");
let data = vec![2u8; NODE_SIZE * leafs];
let mmapped = &mmap_from(&data);
let tree =
create_base_merkle_tree::<MerkleTreeWrapper<H, DiskStore<H::Domain>, U, U0, U0>>(
config,
g.size(),
mmapped,
)
.expect("failed to build tree");
let proof = tree.gen_proof(2).expect("failed to gen proof");
assert!(proof.verify());
}
#[test]
fn gen_proof_poseidon_binary() {
gen_proof::<PoseidonHasher, U2>(None);
}
#[test]
fn gen_proof_sha256_binary() {
gen_proof::<Sha256Hasher, U2>(None);
}
#[test]
fn gen_proof_blake2s_binary() {
gen_proof::<Blake2sHasher, U2>(None);
}
#[test]
fn gen_proof_poseidon_quad() {
gen_proof::<PoseidonHasher, U4>(None);
}
#[test]
fn gen_proof_sha256_quad() {
gen_proof::<Sha256Hasher, U4>(None);
}
#[test]
fn gen_proof_blake2s_quad() {
gen_proof::<Blake2sHasher, U4>(None);
}
#[test]
fn gen_proof_poseidon_oct() {
gen_proof::<PoseidonHasher, U8>(None);
}
}