Skip to content

Commit

Permalink
ControlHash keeps only info about parent rounds.
Browse files Browse the repository at this point in the history
  • Loading branch information
Marcin-Radecki committed Jan 7, 2025
1 parent 47a4d8f commit c347236
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 33 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion consensus/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aleph-bft"
version = "0.40.1"
version = "0.41.0"
edition = "2021"
authors = ["Cardinal Cryptography"]
categories = ["algorithms", "data-structures", "cryptography", "database"]
Expand Down
4 changes: 2 additions & 2 deletions consensus/src/backup/loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,10 @@ impl<H: Hasher, D: Data, S: Signature, R: AsyncRead> BackupLoader<H, D, S, R> {
));
}

let parent_ids = &full_unit.as_pre_unit().control_hash().parents_round_lookup;
let parent_ids = &full_unit.as_pre_unit().control_hash().parents;

// Sanity check: verify that all unit's parents appeared in backup before it.
for (parent_id, (_, round)) in parent_ids.iter() {
for (parent_id, round) in parent_ids.iter() {
let parent = UnitCoord::new(*round, parent_id);
if !already_loaded_coords.contains(&parent) {
return Err(LoaderError::InconsistentData(coord));
Expand Down
2 changes: 1 addition & 1 deletion consensus/src/dag/reconstruction/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ impl<U: Unit> ReconstructedUnit<U> {
/// Returns a reconstructed unit if the parents agree with the hash, errors out otherwise.
pub fn with_parents(unit: U, parents: NodeMap<(HashFor<U>, Round)>) -> Result<Self, U> {
match unit.control_hash().combined_hash
== ControlHash::<U::Hasher>::combine_hashes(&parents)
== ControlHash::<U::Hasher>::create_control_hash(&parents)
{
true => Ok(ReconstructedUnit { unit, parents }),
false => Err(unit),
Expand Down
22 changes: 10 additions & 12 deletions consensus/src/dag/reconstruction/parents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,7 @@ impl<U: Unit> ReconstructingUnit<U> {
round != 0,
"We should never try to reconstruct parents of a unit of round 0."
);
let coords = unit
.control_hash()
.parents()
.map(|(parent_index, &(_, parent_round))| UnitCoord::new(parent_round, parent_index))
.collect();
let coords = unit.control_hash().parents().collect();
(
ReconstructingUnit::Reconstructing(unit, NodeMap::with_size(n_members)),
coords,
Expand Down Expand Up @@ -78,18 +74,20 @@ impl<U: Unit> ReconstructingUnit<U> {
}
}

fn with_parents_from_network(
fn with_parents(
self,
parents_from_network: HashMap<UnitCoord, HashFor<U>>,
parents: HashMap<UnitCoord, HashFor<U>>,
) -> Result<ReconstructedUnit<U>, Self> {
let control_hash = self.control_hash().clone();
if parents_from_network.len() != control_hash.parents().count() {
if parents.len() != control_hash.parents().count() {
return Err(self);
}
let mut parents_map = NodeMap::with_size(control_hash.n_members());
for (parent_id, &(_, parent_round)) in control_hash.parents() {
match parents_from_network.get(&UnitCoord::new(parent_round, parent_id)) {
Some(parent_hash) => parents_map.insert(parent_id, (*parent_hash, parent_round)),
for parent_coord in control_hash.parents() {
match parents.get(&parent_coord) {
Some(parent_hash) => {
parents_map.insert(parent_coord.creator(), (*parent_hash, parent_coord.round()))
}
// The parents were inconsistent with the control hash.
None => return Err(self),
}
Expand Down Expand Up @@ -206,7 +204,7 @@ impl<U: Unit> Reconstruction<U> {
) -> ReconstructionResult<U> {
// If we don't have the unit, just ignore this response.
match self.reconstructing_units.remove(&unit_hash) {
Some(unit) => match unit.with_parents_from_network(parents) {
Some(unit) => match unit.with_parents(parents) {
Ok(unit) => ReconstructionResult::reconstructed(unit),
Err(unit) => {
self.reconstructing_units.insert(unit_hash, unit);
Expand Down
2 changes: 1 addition & 1 deletion consensus/src/network/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ mod tests {
data: Data,
) -> UncheckedSignedUnit<Hasher64, Data, Signature> {
let control_hash = ControlHash {
parents_round_lookup: NodeMap::with_size(7.into()),
parents: NodeMap::with_size(7.into()),
combined_hash: 0.using_encoded(Hasher64::hash),
};
let pu = PreUnit::new(creator, round, control_hash);
Expand Down
4 changes: 2 additions & 2 deletions consensus/src/testing/crash_recovery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ fn verify_backup(buf: &mut &[u8]) -> HashSet<UnitCoord> {
let unit = <UncheckedSignedUnit<Hasher64, Data, Signature>>::decode(buf).unwrap();
let full_unit = unit.as_signable();
let coord = full_unit.coord();
let parent_ids = &full_unit.as_pre_unit().control_hash().parents_round_lookup;
let parent_ids = &full_unit.as_pre_unit().control_hash().parents;

for (parent_id, &(_, round)) in parent_ids.iter() {
for (parent_id, &round) in parent_ids.iter() {
let parent = UnitCoord::new(round, parent_id);
assert!(already_saved.contains(&parent));
}
Expand Down
20 changes: 13 additions & 7 deletions consensus/src/units/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,35 +56,41 @@ impl Display for UnitCoord {
/// parents
#[derive(Clone, Eq, PartialEq, Hash, Debug, Decode, Encode)]
pub struct ControlHash<H: Hasher> {
pub(crate) parents_round_lookup: NodeMap<(H::Hash, Round)>,
pub(crate) parents: NodeMap<Round>,
pub(crate) combined_hash: H::Hash,
}

impl<H: Hasher> ControlHash<H> {
pub(crate) fn new(parents_round_lookup: &NodeMap<(H::Hash, Round)>) -> Self {
let mut parents_round_map = NodeMap::with_size(parents_round_lookup.size());
for (parent_index, (_, parent_round)) in parents_round_lookup.iter() {
parents_round_map.insert(parent_index, *parent_round);
}
ControlHash {
parents_round_lookup: parents_round_lookup.clone(),
combined_hash: Self::combine_hashes(parents_round_lookup),
parents: parents_round_map,
combined_hash: Self::create_control_hash(parents_round_lookup),
}
}

/// Calculate parent control hash, which includes all parent hashes into account.
pub(crate) fn combine_hashes(parent_map: &NodeMap<(H::Hash, Round)>) -> H::Hash {
pub(crate) fn create_control_hash(parent_map: &NodeMap<(H::Hash, Round)>) -> H::Hash {
// we include parent rounds with calculating hash but this is okay - we cannot
// have two units with the same hash but different rounds
parent_map.using_encoded(H::hash)
}

pub(crate) fn parents(&self) -> impl Iterator<Item = (NodeIndex, &(H::Hash, Round))> + '_ {
self.parents_round_lookup.iter()
pub(crate) fn parents(&self) -> impl Iterator<Item = UnitCoord> + '_ {
self.parents
.iter()
.map(|(node_index, &round)| UnitCoord::new(round, node_index))
}

pub(crate) fn n_parents(&self) -> NodeCount {
NodeCount(self.parents().count())
}

pub(crate) fn n_members(&self) -> NodeCount {
self.parents_round_lookup.size()
self.parents.size()
}
}

Expand Down
8 changes: 2 additions & 6 deletions consensus/src/units/validator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ impl<K: Keychain> Validator<K> {
return Err(ValidationError::RoundZeroWithParents(pre_unit.clone()));
}
if pre_unit.control_hash().combined_hash
!= ControlHash::<H>::combine_hashes(&NodeMap::with_size(n_members))
!= ControlHash::<H>::create_control_hash(&NodeMap::with_size(n_members))
{
return Err(ValidationError::RoundZeroBadControlHash(pre_unit.clone()));
}
Expand All @@ -138,11 +138,7 @@ impl<K: Keychain> Validator<K> {
return Err(ValidationError::NotEnoughParents(pre_unit.clone()));
}
let control_hash = &pre_unit.control_hash();
if control_hash
.parents_round_lookup
.get(pre_unit.creator())
.is_none()
{
if control_hash.parents.get(pre_unit.creator()).is_none() {
return Err(ValidationError::NotDescendantOfPreviousUnit(
pre_unit.clone(),
));
Expand Down

0 comments on commit c347236

Please sign in to comment.