Skip to content

Commit

Permalink
Implement comparable level work
Browse files Browse the repository at this point in the history
  • Loading branch information
coderofstuff committed Nov 7, 2024
1 parent 092fad5 commit b56b341
Show file tree
Hide file tree
Showing 5 changed files with 91 additions and 20 deletions.
4 changes: 2 additions & 2 deletions consensus/src/consensus/services.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::{
},
},
processes::{
block_depth::BlockDepthManager, coinbase::CoinbaseManager, ghostdag::protocol::GhostdagManager,
block_depth::BlockDepthManager, coinbase::CoinbaseManager, difficulty::level_work, ghostdag::protocol::GhostdagManager,
parents_builder::ParentsManager, pruning::PruningPointManager, pruning_proof::PruningProofManager, sync::SyncManager,
transaction_validator::TransactionValidator, traversal_manager::DagTraversalManager, window::DualWindowManager,
},
Expand Down Expand Up @@ -118,7 +118,7 @@ impl ConsensusServices {
relations_services[0].clone(),
storage.headers_store.clone(),
reachability_service.clone(),
false,
level_work(0, config.max_block_level),
);

let coinbase_manager = CoinbaseManager::new(
Expand Down
65 changes: 65 additions & 0 deletions consensus/src/processes/difficulty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ use std::{
use super::ghostdag::ordering::SortableBlock;
use itertools::Itertools;

const MAX_WORK_LEVEL: u32 = 128;

trait DifficultyManagerExtension {
fn headers_store(&self) -> &dyn HeaderStoreReader;

Expand Down Expand Up @@ -282,6 +284,15 @@ pub fn calc_work(bits: u32) -> BlueWorkType {
res.try_into().expect("Work should not exceed 2**192")
}

pub fn level_work(level: u8, max_block_level: u8) -> BlueWorkType {
// Need to make a special condition for level 0 to ensure true work is always used
if level == 0 {
return 0.into();
}
let exp = (level as u32) + 256 - (max_block_level as u32);
BlueWorkType::from_u64(1) << exp.min(MAX_WORK_LEVEL)
}

#[derive(Eq)]
struct DifficultyBlock {
timestamp: u64,
Expand All @@ -307,3 +318,57 @@ impl Ord for DifficultyBlock {
self.timestamp.cmp(&other.timestamp).then_with(|| self.sortable_block.cmp(&other.sortable_block))
}
}

#[cfg(test)]
mod tests {
use std::cmp::max;

use kaspa_consensus_core::{BlockLevel, BlueWorkType};
use kaspa_math::{Uint256, Uint320};

use crate::processes::difficulty::{calc_work, level_work, MAX_WORK_LEVEL};
use kaspa_utils::hex::ToHex;

#[test]
fn test_target_levels() {
let max_block_level: BlockLevel = 225;
for level in 1..=max_block_level {
// required pow for level
let level_target = (Uint320::from_u64(1) << ((max_block_level - level) as u32).max(MAX_WORK_LEVEL)) - Uint320::from_u64(1);
let level_target = Uint256::from_be_bytes(level_target.to_be_bytes()[8..40].try_into().unwrap());
let signed_block_level = max_block_level as i64 - level_target.bits() as i64;
let calculated_level = max(signed_block_level, 0) as BlockLevel;

let true_level_work = calc_work(level_target.compact_target_bits());
let calc_level_work = level_work(level, max_block_level);

// A "good enough" estimate of level work is within 1% diff from work with actual level target
// It's hard to calculate percentages with these large numbers, so to get around using floats
// we multiply the difference by 100. if the result is <= the calc_level_work it means
// difference must have been less than 1%
let (percent_diff, overflowed) = (true_level_work - calc_level_work).overflowing_mul(BlueWorkType::from_u64(100));
let is_good_enough = percent_diff <= calc_level_work;

println!("Level {}:", level);
println!(
" data | {} | {} | {} / {} |",
level_target.compact_target_bits(),
level_target.bits(),
calculated_level,
max_block_level
);
println!(" pow | {}", level_target.to_hex());
println!(" work | 0000000000000000{}", true_level_work.to_hex());
println!(" lvwork | 0000000000000000{}", calc_level_work.to_hex());
println!(" diff<1% | {}", !overflowed && (is_good_enough));

assert!(is_good_enough);
}
}

#[test]
fn test_base_level_work() {
// Expect that at level 0, the level work is always 0
assert_eq!(BlueWorkType::from(0), level_work(0, 255));
}
}
32 changes: 17 additions & 15 deletions consensus/src/processes/ghostdag/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub struct GhostdagManager<T: GhostdagStoreReader, S: RelationsStoreReader, U: R
pub(super) relations_store: S,
pub(super) headers_store: Arc<V>,
pub(super) reachability_service: U,
use_score_as_work: bool,
expected_level_work: BlueWorkType,
}

impl<T: GhostdagStoreReader, S: RelationsStoreReader, U: ReachabilityService, V: HeaderStoreReader> GhostdagManager<T, S, U, V> {
Expand All @@ -40,9 +40,9 @@ impl<T: GhostdagStoreReader, S: RelationsStoreReader, U: ReachabilityService, V:
relations_store: S,
headers_store: Arc<V>,
reachability_service: U,
use_score_as_work: bool,
expected_level_work: BlueWorkType,
) -> Self {
Self { genesis_hash, k, ghostdag_store, relations_store, reachability_service, headers_store, use_score_as_work }
Self { genesis_hash, k, ghostdag_store, relations_store, reachability_service, headers_store, expected_level_work }
}

pub fn genesis_ghostdag_data(&self) -> GhostdagData {
Expand Down Expand Up @@ -117,18 +117,20 @@ impl<T: GhostdagStoreReader, S: RelationsStoreReader, U: ReachabilityService, V:

let blue_score = self.ghostdag_store.get_blue_score(selected_parent).unwrap() + new_block_data.mergeset_blues.len() as u64;

let blue_work: BlueWorkType = if self.use_score_as_work {
blue_score.into()
} else {
let added_blue_work: BlueWorkType = new_block_data
.mergeset_blues
.iter()
.cloned()
.map(|hash| if hash.is_origin() { 0.into() } else { calc_work(self.headers_store.get_bits(hash).unwrap()) })
.sum();

self.ghostdag_store.get_blue_work(selected_parent).unwrap() + added_blue_work
};
let added_blue_work: BlueWorkType = new_block_data
.mergeset_blues
.iter()
.cloned()
.map(|hash| {
if hash.is_origin() {
0.into()
} else {
let true_work = calc_work(self.headers_store.get_bits(hash).unwrap());
true_work.max(self.expected_level_work)
}
})
.sum();
let blue_work: BlueWorkType = self.ghostdag_store.get_blue_work(selected_parent).unwrap() + added_blue_work;

new_block_data.finalize_score_and_work(blue_score, blue_work);

Expand Down
3 changes: 2 additions & 1 deletion consensus/src/processes/pruning_proof/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use crate::{
},
},
processes::{
difficulty::level_work,
ghostdag::{ordering::SortableBlock, protocol::GhostdagManager},
pruning_proof::PruningProofManagerInternalError,
},
Expand Down Expand Up @@ -338,7 +339,7 @@ impl PruningProofManager {
relations_service.clone(),
self.headers_store.clone(),
self.reachability_service.clone(),
level != 0,
level_work(level, self.max_block_level),
);

ghostdag_store.insert(root, Arc::new(gd_manager.genesis_ghostdag_data())).unwrap();
Expand Down
7 changes: 5 additions & 2 deletions consensus/src/processes/pruning_proof/validate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@ use crate::{
relations::{DbRelationsStore, RelationsStoreReader},
},
},
processes::{ghostdag::protocol::GhostdagManager, reachability::inquirer as reachability, relations::RelationsStoreExtensions},
processes::{
difficulty::level_work, ghostdag::protocol::GhostdagManager, reachability::inquirer as reachability,
relations::RelationsStoreExtensions,
},
};

use super::{PruningProofManager, TempProofContext};
Expand Down Expand Up @@ -194,7 +197,7 @@ impl PruningProofManager {
relations_stores[level].clone(),
headers_store.clone(),
reachability_services[level].clone(),
level != 0,
level_work(level as BlockLevel, self.max_block_level),
)
})
.collect_vec();
Expand Down

0 comments on commit b56b341

Please sign in to comment.