-
Notifications
You must be signed in to change notification settings - Fork 153
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* sighash reused trait * benches are implemented * use cache per iteration per function * fix par versions * fix benches * use upgreadable read * use concurrent cache * use hashcache * dont apply cache * rollback rwlock and indexmap. * remove scc * apply par iter to `check_scripts` * refactor check_scripts fn, fix tests * fix clippy * add bench with custom threadpool * style: fmt * suppress warnings * Merge branch 'master' into bcm-parallel-processing * renames + map err * reuse code * bench: avoid exposing cache map + iter pools in powers of 2 * simplify check_sig_op_counts * use thread pool also if a single input 1. to avoid confusion 2. since tokio blocking threads are not meant to be used for processing anyway * remove todo * clear cache instead of recreate * use and_then (so map_err can be called in a single location) * extend check scripts tests for better coverage of the par_iter case --------- Co-authored-by: Michael Sutton <[email protected]>
- Loading branch information
1 parent
1378e7b
commit 1274e9c
Showing
20 changed files
with
538 additions
and
175 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
use criterion::{black_box, criterion_group, criterion_main, Criterion, SamplingMode}; | ||
use kaspa_addresses::{Address, Prefix, Version}; | ||
use kaspa_consensus::processes::transaction_validator::transaction_validator_populated::{ | ||
check_scripts_par_iter, check_scripts_par_iter_pool, check_scripts_sequential, | ||
}; | ||
use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}; | ||
use kaspa_consensus_core::hashing::sighash_type::SIG_HASH_ALL; | ||
use kaspa_consensus_core::subnets::SubnetworkId; | ||
use kaspa_consensus_core::tx::{MutableTransaction, Transaction, TransactionInput, TransactionOutpoint, UtxoEntry}; | ||
use kaspa_txscript::caches::Cache; | ||
use kaspa_txscript::pay_to_address_script; | ||
use rand::{thread_rng, Rng}; | ||
use secp256k1::Keypair; | ||
use std::thread::available_parallelism; | ||
|
||
// You may need to add more detailed mocks depending on your actual code. | ||
fn mock_tx(inputs_count: usize, non_uniq_signatures: usize) -> (Transaction, Vec<UtxoEntry>) { | ||
let reused_values = SigHashReusedValuesUnsync::new(); | ||
let dummy_prev_out = TransactionOutpoint::new(kaspa_hashes::Hash::from_u64_word(1), 1); | ||
let mut tx = Transaction::new( | ||
0, | ||
vec![], | ||
vec![], | ||
0, | ||
SubnetworkId::from_bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), | ||
0, | ||
vec![], | ||
); | ||
let mut utxos = vec![]; | ||
let mut kps = vec![]; | ||
for _ in 0..inputs_count - non_uniq_signatures { | ||
let kp = Keypair::new(secp256k1::SECP256K1, &mut thread_rng()); | ||
tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 }); | ||
let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize()); | ||
utxos.push(UtxoEntry { | ||
amount: thread_rng().gen::<u32>() as u64, | ||
script_public_key: pay_to_address_script(&address), | ||
block_daa_score: 333, | ||
is_coinbase: false, | ||
}); | ||
kps.push(kp); | ||
} | ||
for _ in 0..non_uniq_signatures { | ||
let kp = kps.last().unwrap(); | ||
tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 }); | ||
let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize()); | ||
utxos.push(UtxoEntry { | ||
amount: thread_rng().gen::<u32>() as u64, | ||
script_public_key: pay_to_address_script(&address), | ||
block_daa_score: 444, | ||
is_coinbase: false, | ||
}); | ||
} | ||
for (i, kp) in kps.iter().enumerate().take(inputs_count - non_uniq_signatures) { | ||
let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone()); | ||
let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); | ||
let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); | ||
let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref(); | ||
// This represents OP_DATA_65 <SIGNATURE+SIGHASH_TYPE> (since signature length is 64 bytes and SIGHASH_TYPE is one byte) | ||
tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect(); | ||
} | ||
let length = tx.inputs.len(); | ||
for i in (inputs_count - non_uniq_signatures)..length { | ||
let kp = kps.last().unwrap(); | ||
let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone()); | ||
let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); | ||
let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); | ||
let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref(); | ||
// This represents OP_DATA_65 <SIGNATURE+SIGHASH_TYPE> (since signature length is 64 bytes and SIGHASH_TYPE is one byte) | ||
tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect(); | ||
} | ||
(tx, utxos) | ||
} | ||
|
||
fn benchmark_check_scripts(c: &mut Criterion) { | ||
for inputs_count in [100, 50, 25, 10, 5, 2] { | ||
for non_uniq_signatures in [0, inputs_count / 2] { | ||
let (tx, utxos) = mock_tx(inputs_count, non_uniq_signatures); | ||
let mut group = c.benchmark_group(format!("inputs: {inputs_count}, non uniq: {non_uniq_signatures}")); | ||
group.sampling_mode(SamplingMode::Flat); | ||
|
||
group.bench_function("single_thread", |b| { | ||
let tx = MutableTransaction::with_entries(&tx, utxos.clone()); | ||
let cache = Cache::new(inputs_count as u64); | ||
b.iter(|| { | ||
cache.clear(); | ||
check_scripts_sequential(black_box(&cache), black_box(&tx.as_verifiable())).unwrap(); | ||
}) | ||
}); | ||
|
||
group.bench_function("rayon par iter", |b| { | ||
let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone()); | ||
let cache = Cache::new(inputs_count as u64); | ||
b.iter(|| { | ||
cache.clear(); | ||
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable())).unwrap(); | ||
}) | ||
}); | ||
|
||
// Iterate powers of two up to available parallelism | ||
for i in (1..=(available_parallelism().unwrap().get() as f64).log2().ceil() as u32).map(|x| 2u32.pow(x) as usize) { | ||
if inputs_count >= i { | ||
group.bench_function(format!("rayon, custom thread pool, thread count {i}"), |b| { | ||
let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone()); | ||
// Create a custom thread pool with the specified number of threads | ||
let pool = rayon::ThreadPoolBuilder::new().num_threads(i).build().unwrap(); | ||
let cache = Cache::new(inputs_count as u64); | ||
b.iter(|| { | ||
cache.clear(); | ||
check_scripts_par_iter_pool(black_box(&cache), black_box(&tx.as_verifiable()), black_box(&pool)).unwrap(); | ||
}) | ||
}); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
|
||
criterion_group! { | ||
name = benches; | ||
// This can be any expression that returns a `Criterion` object. | ||
config = Criterion::default().with_output_color(true).measurement_time(std::time::Duration::new(20, 0)); | ||
targets = benchmark_check_scripts | ||
} | ||
|
||
criterion_main!(benches); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.