From 1274e9c1de6778aa4038e354412410a26f7bc6a4 Mon Sep 17 00:00:00 2001 From: Maxim <59533214+biryukovmaxim@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:00:20 +0400 Subject: [PATCH] enhance tx inputs processing (#495) * sighash reused trait * benches are implemented * use cache per iteration per function * fix par versions * fix benches * use upgreadable read * use concurrent cache * use hashcache * dont apply cache * rollback rwlock and indexmap. * remove scc * apply par iter to `check_scripts` * refactor check_scripts fn, fix tests * fix clippy * add bench with custom threadpool * style: fmt * suppress warnings * Merge branch 'master' into bcm-parallel-processing * renames + map err * reuse code * bench: avoid exposing cache map + iter pools in powers of 2 * simplify check_sig_op_counts * use thread pool also if a single input 1. to avoid confusion 2. since tokio blocking threads are not meant to be used for processing anyway * remove todo * clear cache instead of recreate * use and_then (so map_err can be called in a single location) * extend check scripts tests for better coverage of the par_iter case --------- Co-authored-by: Michael Sutton --- Cargo.lock | 2 + consensus/Cargo.toml | 5 + consensus/benches/check_scripts.rs | 126 ++++++++++++ consensus/client/src/sign.rs | 6 +- consensus/client/src/signing.rs | 10 +- consensus/core/Cargo.toml | 1 + consensus/core/src/hashing/sighash.rs | 193 +++++++++++++----- consensus/core/src/sign.rs | 22 +- .../pipeline/virtual_processor/processor.rs | 5 +- .../transaction_validator_populated.rs | 140 +++++++++++-- consensus/wasm/src/utils.rs | 6 +- crypto/txscript/src/caches.rs | 4 + crypto/txscript/src/lib.rs | 59 +++--- crypto/txscript/src/opcodes/macros.rs | 22 +- crypto/txscript/src/opcodes/mod.rs | 70 ++++--- crypto/txscript/src/standard/multisig.rs | 10 +- .../src/mempool/check_transaction_standard.rs | 9 +- wallet/core/src/account/pskb.rs | 8 +- wallet/pskt/examples/multisig.rs | 8 +- wallet/pskt/src/pskt.rs | 7 +- 20 files changed, 538 insertions(+), 175 deletions(-) create mode 100644 consensus/benches/check_scripts.rs diff --git a/Cargo.lock b/Cargo.lock index 3449a43dae..a0db546302 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2414,6 +2414,7 @@ dependencies = [ "futures-util", "indexmap 2.6.0", "itertools 0.13.0", + "kaspa-addresses", "kaspa-consensus-core", "kaspa-consensus-notify", "kaspa-consensusmanager", @@ -2475,6 +2476,7 @@ dependencies = [ name = "kaspa-consensus-core" version = "0.15.2" dependencies = [ + "arc-swap", "async-trait", "bincode", "borsh", diff --git a/consensus/Cargo.toml b/consensus/Cargo.toml index 3f4a1b456d..b9a183ea8c 100644 --- a/consensus/Cargo.toml +++ b/consensus/Cargo.toml @@ -54,11 +54,16 @@ serde_json.workspace = true flate2.workspace = true rand_distr.workspace = true kaspa-txscript-errors.workspace = true +kaspa-addresses.workspace = true [[bench]] name = "hash_benchmarks" harness = false +[[bench]] +name = "check_scripts" +harness = false + [features] html_reports = [] devnet-prealloc = ["kaspa-consensus-core/devnet-prealloc"] diff --git a/consensus/benches/check_scripts.rs b/consensus/benches/check_scripts.rs new file mode 100644 index 0000000000..d65ac63626 --- /dev/null +++ b/consensus/benches/check_scripts.rs @@ -0,0 +1,126 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion, SamplingMode}; +use kaspa_addresses::{Address, Prefix, Version}; +use kaspa_consensus::processes::transaction_validator::transaction_validator_populated::{ + check_scripts_par_iter, check_scripts_par_iter_pool, check_scripts_sequential, +}; +use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}; +use kaspa_consensus_core::hashing::sighash_type::SIG_HASH_ALL; +use kaspa_consensus_core::subnets::SubnetworkId; +use kaspa_consensus_core::tx::{MutableTransaction, Transaction, TransactionInput, TransactionOutpoint, UtxoEntry}; +use kaspa_txscript::caches::Cache; +use kaspa_txscript::pay_to_address_script; +use rand::{thread_rng, Rng}; +use secp256k1::Keypair; +use std::thread::available_parallelism; + +// You may need to add more detailed mocks depending on your actual code. +fn mock_tx(inputs_count: usize, non_uniq_signatures: usize) -> (Transaction, Vec) { + let reused_values = SigHashReusedValuesUnsync::new(); + let dummy_prev_out = TransactionOutpoint::new(kaspa_hashes::Hash::from_u64_word(1), 1); + let mut tx = Transaction::new( + 0, + vec![], + vec![], + 0, + SubnetworkId::from_bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + 0, + vec![], + ); + let mut utxos = vec![]; + let mut kps = vec![]; + for _ in 0..inputs_count - non_uniq_signatures { + let kp = Keypair::new(secp256k1::SECP256K1, &mut thread_rng()); + tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 }); + let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize()); + utxos.push(UtxoEntry { + amount: thread_rng().gen::() as u64, + script_public_key: pay_to_address_script(&address), + block_daa_score: 333, + is_coinbase: false, + }); + kps.push(kp); + } + for _ in 0..non_uniq_signatures { + let kp = kps.last().unwrap(); + tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 }); + let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize()); + utxos.push(UtxoEntry { + amount: thread_rng().gen::() as u64, + script_public_key: pay_to_address_script(&address), + block_daa_score: 444, + is_coinbase: false, + }); + } + for (i, kp) in kps.iter().enumerate().take(inputs_count - non_uniq_signatures) { + let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone()); + let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); + let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); + let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref(); + // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) + tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect(); + } + let length = tx.inputs.len(); + for i in (inputs_count - non_uniq_signatures)..length { + let kp = kps.last().unwrap(); + let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone()); + let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); + let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); + let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref(); + // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) + tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect(); + } + (tx, utxos) +} + +fn benchmark_check_scripts(c: &mut Criterion) { + for inputs_count in [100, 50, 25, 10, 5, 2] { + for non_uniq_signatures in [0, inputs_count / 2] { + let (tx, utxos) = mock_tx(inputs_count, non_uniq_signatures); + let mut group = c.benchmark_group(format!("inputs: {inputs_count}, non uniq: {non_uniq_signatures}")); + group.sampling_mode(SamplingMode::Flat); + + group.bench_function("single_thread", |b| { + let tx = MutableTransaction::with_entries(&tx, utxos.clone()); + let cache = Cache::new(inputs_count as u64); + b.iter(|| { + cache.clear(); + check_scripts_sequential(black_box(&cache), black_box(&tx.as_verifiable())).unwrap(); + }) + }); + + group.bench_function("rayon par iter", |b| { + let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone()); + let cache = Cache::new(inputs_count as u64); + b.iter(|| { + cache.clear(); + check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable())).unwrap(); + }) + }); + + // Iterate powers of two up to available parallelism + for i in (1..=(available_parallelism().unwrap().get() as f64).log2().ceil() as u32).map(|x| 2u32.pow(x) as usize) { + if inputs_count >= i { + group.bench_function(format!("rayon, custom thread pool, thread count {i}"), |b| { + let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone()); + // Create a custom thread pool with the specified number of threads + let pool = rayon::ThreadPoolBuilder::new().num_threads(i).build().unwrap(); + let cache = Cache::new(inputs_count as u64); + b.iter(|| { + cache.clear(); + check_scripts_par_iter_pool(black_box(&cache), black_box(&tx.as_verifiable()), black_box(&pool)).unwrap(); + }) + }); + } + } + } + } +} + +criterion_group! { + name = benches; + // This can be any expression that returns a `Criterion` object. + config = Criterion::default().with_output_color(true).measurement_time(std::time::Duration::new(20, 0)); + targets = benchmark_check_scripts +} + +criterion_main!(benches); diff --git a/consensus/client/src/sign.rs b/consensus/client/src/sign.rs index 4044dc5701..18ff3c8491 100644 --- a/consensus/client/src/sign.rs +++ b/consensus/client/src/sign.rs @@ -7,7 +7,7 @@ use core::iter::once; use itertools::Itertools; use kaspa_consensus_core::{ hashing::{ - sighash::{calc_schnorr_signature_hash, SigHashReusedValues}, + sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}, sighash_type::SIG_HASH_ALL, }, tx::PopulatedTransaction, @@ -44,7 +44,7 @@ pub fn sign_with_multiple_v3<'a>(tx: &'a Transaction, privkeys: &[[u8; 32]]) -> map.insert(script_pub_key_script, schnorr_key); } - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let mut additional_signatures_required = false; { let input_len = tx.inner().inputs.len(); @@ -59,7 +59,7 @@ pub fn sign_with_multiple_v3<'a>(tx: &'a Transaction, privkeys: &[[u8; 32]]) -> }; let script = script_pub_key.script(); if let Some(schnorr_key) = map.get(script) { - let sig_hash = calc_schnorr_signature_hash(&populated_transaction, i, SIG_HASH_ALL, &mut reused_values); + let sig_hash = calc_schnorr_signature_hash(&populated_transaction, i, SIG_HASH_ALL, &reused_values); let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref(); // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) diff --git a/consensus/client/src/signing.rs b/consensus/client/src/signing.rs index ef993d0118..f7fe8cee6a 100644 --- a/consensus/client/src/signing.rs +++ b/consensus/client/src/signing.rs @@ -75,7 +75,7 @@ impl SigHashCache { } } - pub fn sig_op_counts_hash(&mut self, tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues) -> Hash { + pub fn sig_op_counts_hash(&mut self, tx: &Transaction, hash_type: SigHashType, reused_values: &SigHashReusedValues) -> Hash { if hash_type.is_sighash_anyone_can_pay() { return ZERO_HASH; } @@ -185,16 +185,16 @@ pub fn calc_schnorr_signature_hash( let mut hasher = TransactionSigningHash::new(); hasher .write_u16(tx.version) - .update(previous_outputs_hash(&tx, hash_type, &mut reused_values)) - .update(sequences_hash(&tx, hash_type, &mut reused_values)) - .update(sig_op_counts_hash(&tx, hash_type, &mut reused_values)); + .update(previous_outputs_hash(&tx, hash_type, &reused_values)) + .update(sequences_hash(&tx, hash_type, &reused_values)) + .update(sig_op_counts_hash(&tx, hash_type, &reused_values)); hash_outpoint(&mut hasher, input.previous_outpoint); hash_script_public_key(&mut hasher, &utxo.script_public_key); hasher .write_u64(utxo.amount) .write_u64(input.sequence) .write_u8(input.sig_op_count) - .update(outputs_hash(&tx, hash_type, &mut reused_values, input_index)) + .update(outputs_hash(&tx, hash_type, &reused_values, input_index)) .write_u64(tx.lock_time) .update(&tx.subnetwork_id) .write_u64(tx.gas) diff --git a/consensus/core/Cargo.toml b/consensus/core/Cargo.toml index 44dbedd387..228b4ac11d 100644 --- a/consensus/core/Cargo.toml +++ b/consensus/core/Cargo.toml @@ -15,6 +15,7 @@ wasm32-sdk = [] default = [] [dependencies] +arc-swap.workspace = true async-trait.workspace = true borsh.workspace = true cfg-if.workspace = true diff --git a/consensus/core/src/hashing/sighash.rs b/consensus/core/src/hashing/sighash.rs index c1b6133e8a..e6c7ad4dd0 100644 --- a/consensus/core/src/hashing/sighash.rs +++ b/consensus/core/src/hashing/sighash.rs @@ -1,4 +1,7 @@ +use arc_swap::ArcSwapOption; use kaspa_hashes::{Hash, Hasher, HasherBase, TransactionSigningHash, TransactionSigningHashECDSA, ZERO_HASH}; +use std::cell::Cell; +use std::sync::Arc; use crate::{ subnets::SUBNETWORK_ID_NATIVE, @@ -11,72 +14,172 @@ use super::{sighash_type::SigHashType, HasherExtensions}; /// the same for all transaction inputs. /// Reuse of such values prevents the quadratic hashing problem. #[derive(Default)] -pub struct SigHashReusedValues { - previous_outputs_hash: Option, - sequences_hash: Option, - sig_op_counts_hash: Option, - outputs_hash: Option, +pub struct SigHashReusedValuesUnsync { + previous_outputs_hash: Cell>, + sequences_hash: Cell>, + sig_op_counts_hash: Cell>, + outputs_hash: Cell>, } -impl SigHashReusedValues { +impl SigHashReusedValuesUnsync { pub fn new() -> Self { - Self { previous_outputs_hash: None, sequences_hash: None, sig_op_counts_hash: None, outputs_hash: None } + Self::default() } } -pub fn previous_outputs_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues) -> Hash { +#[derive(Default)] +pub struct SigHashReusedValuesSync { + previous_outputs_hash: ArcSwapOption, + sequences_hash: ArcSwapOption, + sig_op_counts_hash: ArcSwapOption, + outputs_hash: ArcSwapOption, +} + +impl SigHashReusedValuesSync { + pub fn new() -> Self { + Self::default() + } +} + +pub trait SigHashReusedValues { + fn previous_outputs_hash(&self, set: impl Fn() -> Hash) -> Hash; + fn sequences_hash(&self, set: impl Fn() -> Hash) -> Hash; + fn sig_op_counts_hash(&self, set: impl Fn() -> Hash) -> Hash; + fn outputs_hash(&self, set: impl Fn() -> Hash) -> Hash; +} + +impl SigHashReusedValues for Arc { + fn previous_outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.as_ref().previous_outputs_hash(set) + } + + fn sequences_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.as_ref().sequences_hash(set) + } + + fn sig_op_counts_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.as_ref().sig_op_counts_hash(set) + } + + fn outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.as_ref().outputs_hash(set) + } +} + +impl SigHashReusedValues for SigHashReusedValuesUnsync { + fn previous_outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.previous_outputs_hash.get().unwrap_or_else(|| { + let hash = set(); + self.previous_outputs_hash.set(Some(hash)); + hash + }) + } + + fn sequences_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.sequences_hash.get().unwrap_or_else(|| { + let hash = set(); + self.sequences_hash.set(Some(hash)); + hash + }) + } + + fn sig_op_counts_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.sig_op_counts_hash.get().unwrap_or_else(|| { + let hash = set(); + self.sig_op_counts_hash.set(Some(hash)); + hash + }) + } + + fn outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + self.outputs_hash.get().unwrap_or_else(|| { + let hash = set(); + self.outputs_hash.set(Some(hash)); + hash + }) + } +} + +impl SigHashReusedValues for SigHashReusedValuesSync { + fn previous_outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + if let Some(value) = self.previous_outputs_hash.load().as_ref() { + return **value; + } + let hash = set(); + self.previous_outputs_hash.rcu(|_| Arc::new(hash)); + hash + } + + fn sequences_hash(&self, set: impl Fn() -> Hash) -> Hash { + if let Some(value) = self.sequences_hash.load().as_ref() { + return **value; + } + let hash = set(); + self.sequences_hash.rcu(|_| Arc::new(hash)); + hash + } + + fn sig_op_counts_hash(&self, set: impl Fn() -> Hash) -> Hash { + if let Some(value) = self.sig_op_counts_hash.load().as_ref() { + return **value; + } + let hash = set(); + self.sig_op_counts_hash.rcu(|_| Arc::new(hash)); + hash + } + + fn outputs_hash(&self, set: impl Fn() -> Hash) -> Hash { + if let Some(value) = self.outputs_hash.load().as_ref() { + return **value; + } + let hash = set(); + self.outputs_hash.rcu(|_| Arc::new(hash)); + hash + } +} + +pub fn previous_outputs_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &impl SigHashReusedValues) -> Hash { if hash_type.is_sighash_anyone_can_pay() { return ZERO_HASH; } - - if let Some(previous_outputs_hash) = reused_values.previous_outputs_hash { - previous_outputs_hash - } else { + let hash = || { let mut hasher = TransactionSigningHash::new(); for input in tx.inputs.iter() { hasher.update(input.previous_outpoint.transaction_id.as_bytes()); hasher.write_u32(input.previous_outpoint.index); } - let previous_outputs_hash = hasher.finalize(); - reused_values.previous_outputs_hash = Some(previous_outputs_hash); - previous_outputs_hash - } + hasher.finalize() + }; + reused_values.previous_outputs_hash(hash) } -pub fn sequences_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues) -> Hash { +pub fn sequences_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &impl SigHashReusedValues) -> Hash { if hash_type.is_sighash_single() || hash_type.is_sighash_anyone_can_pay() || hash_type.is_sighash_none() { return ZERO_HASH; } - - if let Some(sequences_hash) = reused_values.sequences_hash { - sequences_hash - } else { + let hash = || { let mut hasher = TransactionSigningHash::new(); for input in tx.inputs.iter() { hasher.write_u64(input.sequence); } - let sequence_hash = hasher.finalize(); - reused_values.sequences_hash = Some(sequence_hash); - sequence_hash - } + hasher.finalize() + }; + reused_values.sequences_hash(hash) } -pub fn sig_op_counts_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues) -> Hash { +pub fn sig_op_counts_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &impl SigHashReusedValues) -> Hash { if hash_type.is_sighash_anyone_can_pay() { return ZERO_HASH; } - if let Some(sig_op_counts_hash) = reused_values.sig_op_counts_hash { - sig_op_counts_hash - } else { + let hash = || { let mut hasher = TransactionSigningHash::new(); for input in tx.inputs.iter() { hasher.write_u8(input.sig_op_count); } - let sig_op_counts_hash = hasher.finalize(); - reused_values.sig_op_counts_hash = Some(sig_op_counts_hash); - sig_op_counts_hash - } + hasher.finalize() + }; + reused_values.sig_op_counts_hash(hash) } pub fn payload_hash(tx: &Transaction) -> Hash { @@ -92,7 +195,7 @@ pub fn payload_hash(tx: &Transaction) -> Hash { hasher.finalize() } -pub fn outputs_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues, input_index: usize) -> Hash { +pub fn outputs_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &impl SigHashReusedValues, input_index: usize) -> Hash { if hash_type.is_sighash_none() { return ZERO_HASH; } @@ -107,19 +210,15 @@ pub fn outputs_hash(tx: &Transaction, hash_type: SigHashType, reused_values: &mu hash_output(&mut hasher, &tx.outputs[input_index]); return hasher.finalize(); } - - // Otherwise, return hash of all outputs. Re-use hash if available. - if let Some(outputs_hash) = reused_values.outputs_hash { - outputs_hash - } else { + let hash = || { let mut hasher = TransactionSigningHash::new(); for output in tx.outputs.iter() { hash_output(&mut hasher, output); } - let outputs_hash = hasher.finalize(); - reused_values.outputs_hash = Some(outputs_hash); - outputs_hash - } + hasher.finalize() + }; + // Otherwise, return hash of all outputs. Re-use hash if available. + reused_values.outputs_hash(hash) } pub fn hash_outpoint(hasher: &mut impl Hasher, outpoint: TransactionOutpoint) { @@ -141,7 +240,7 @@ pub fn calc_schnorr_signature_hash( verifiable_tx: &impl VerifiableTransaction, input_index: usize, hash_type: SigHashType, - reused_values: &mut SigHashReusedValues, + reused_values: &impl SigHashReusedValues, ) -> Hash { let input = verifiable_tx.populated_input(input_index); let tx = verifiable_tx.tx(); @@ -170,7 +269,7 @@ pub fn calc_ecdsa_signature_hash( tx: &impl VerifiableTransaction, input_index: usize, hash_type: SigHashType, - reused_values: &mut SigHashReusedValues, + reused_values: &impl SigHashReusedValues, ) -> Hash { let hash = calc_schnorr_signature_hash(tx, input_index, hash_type, reused_values); let mut hasher = TransactionSigningHashECDSA::new(); @@ -573,9 +672,9 @@ mod tests { } } let populated_tx = PopulatedTransaction::new(&tx, entries); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); assert_eq!( - calc_schnorr_signature_hash(&populated_tx, test.input_index, test.hash_type, &mut reused_values).to_string(), + calc_schnorr_signature_hash(&populated_tx, test.input_index, test.hash_type, &reused_values).to_string(), test.expected_hash, "test {} failed", test.name diff --git a/consensus/core/src/sign.rs b/consensus/core/src/sign.rs index a40b949e35..1a87d03f17 100644 --- a/consensus/core/src/sign.rs +++ b/consensus/core/src/sign.rs @@ -1,6 +1,6 @@ use crate::{ hashing::{ - sighash::{calc_schnorr_signature_hash, SigHashReusedValues}, + sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}, sighash_type::{SigHashType, SIG_HASH_ALL}, }, tx::{SignableTransaction, VerifiableTransaction}, @@ -84,9 +84,9 @@ pub fn sign(mut signable_tx: SignableTransaction, schnorr_key: secp256k1::Keypai signable_tx.tx.inputs[i].sig_op_count = 1; } - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for i in 0..signable_tx.tx.inputs.len() { - let sig_hash = calc_schnorr_signature_hash(&signable_tx.as_verifiable(), i, SIG_HASH_ALL, &mut reused_values); + let sig_hash = calc_schnorr_signature_hash(&signable_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref(); // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) @@ -106,11 +106,11 @@ pub fn sign_with_multiple(mut mutable_tx: SignableTransaction, privkeys: Vec<[u8 mutable_tx.tx.inputs[i].sig_op_count = 1; } - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for i in 0..mutable_tx.tx.inputs.len() { let script = mutable_tx.entries[i].as_ref().unwrap().script_public_key.script(); if let Some(schnorr_key) = map.get(script) { - let sig_hash = calc_schnorr_signature_hash(&mutable_tx.as_verifiable(), i, SIG_HASH_ALL, &mut reused_values); + let sig_hash = calc_schnorr_signature_hash(&mutable_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref(); // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) @@ -132,12 +132,12 @@ pub fn sign_with_multiple_v2(mut mutable_tx: SignableTransaction, privkeys: &[[u map.insert(script_pub_key_script, schnorr_key); } - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let mut additional_signatures_required = false; for i in 0..mutable_tx.tx.inputs.len() { let script = mutable_tx.entries[i].as_ref().unwrap().script_public_key.script(); if let Some(schnorr_key) = map.get(script) { - let sig_hash = calc_schnorr_signature_hash(&mutable_tx.as_verifiable(), i, SIG_HASH_ALL, &mut reused_values); + let sig_hash = calc_schnorr_signature_hash(&mutable_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values); let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref(); // This represents OP_DATA_65 (since signature length is 64 bytes and SIGHASH_TYPE is one byte) @@ -155,9 +155,9 @@ pub fn sign_with_multiple_v2(mut mutable_tx: SignableTransaction, privkeys: &[[u /// Sign a transaction input with a sighash_type using schnorr pub fn sign_input(tx: &impl VerifiableTransaction, input_index: usize, private_key: &[u8; 32], hash_type: SigHashType) -> Vec { - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); - let hash = calc_schnorr_signature_hash(tx, input_index, hash_type, &mut reused_values); + let hash = calc_schnorr_signature_hash(tx, input_index, hash_type, &reused_values); let msg = secp256k1::Message::from_digest_slice(hash.as_bytes().as_slice()).unwrap(); let schnorr_key = secp256k1::Keypair::from_seckey_slice(secp256k1::SECP256K1, private_key).unwrap(); let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref(); @@ -167,7 +167,7 @@ pub fn sign_input(tx: &impl VerifiableTransaction, input_index: usize, private_k } pub fn verify(tx: &impl VerifiableTransaction) -> Result<(), Error> { - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for (i, (input, entry)) in tx.populated_inputs().enumerate() { if input.signature_script.is_empty() { return Err(Error::Message(format!("Signature is empty for input: {i}"))); @@ -175,7 +175,7 @@ pub fn verify(tx: &impl VerifiableTransaction) -> Result<(), Error> { let pk = &entry.script_public_key.script()[1..33]; let pk = secp256k1::XOnlyPublicKey::from_slice(pk)?; let sig = secp256k1::schnorr::Signature::from_slice(&input.signature_script[1..65])?; - let sig_hash = calc_schnorr_signature_hash(tx, i, SIG_HASH_ALL, &mut reused_values); + let sig_hash = calc_schnorr_signature_hash(tx, i, SIG_HASH_ALL, &reused_values); let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice())?; sig.verify(&msg, &pk)?; } diff --git a/consensus/src/pipeline/virtual_processor/processor.rs b/consensus/src/pipeline/virtual_processor/processor.rs index 88fee97bff..9af6879c7b 100644 --- a/consensus/src/pipeline/virtual_processor/processor.rs +++ b/consensus/src/pipeline/virtual_processor/processor.rs @@ -778,7 +778,10 @@ impl VirtualStateProcessor { let virtual_utxo_view = &virtual_read.utxo_set; let virtual_daa_score = virtual_state.daa_score; let virtual_past_median_time = virtual_state.past_median_time; - self.validate_mempool_transaction_impl(mutable_tx, virtual_utxo_view, virtual_daa_score, virtual_past_median_time, args) + // Run within the thread pool since par_iter might be internally applied to inputs + self.thread_pool.install(|| { + self.validate_mempool_transaction_impl(mutable_tx, virtual_utxo_view, virtual_daa_score, virtual_past_median_time, args) + }) } pub fn validate_mempool_transactions_in_parallel( diff --git a/consensus/src/processes/transaction_validator/transaction_validator_populated.rs b/consensus/src/processes/transaction_validator/transaction_validator_populated.rs index 4a8733d2be..dbf1aa37ea 100644 --- a/consensus/src/processes/transaction_validator/transaction_validator_populated.rs +++ b/consensus/src/processes/transaction_validator/transaction_validator_populated.rs @@ -1,18 +1,24 @@ use crate::constants::{MAX_SOMPI, SEQUENCE_LOCK_TIME_DISABLED, SEQUENCE_LOCK_TIME_MASK}; use kaspa_consensus_core::{ - hashing::sighash::SigHashReusedValues, + hashing::sighash::{SigHashReusedValuesSync, SigHashReusedValuesUnsync}, mass::Kip9Version, tx::{TransactionInput, VerifiableTransaction}, }; use kaspa_core::warn; -use kaspa_txscript::{get_sig_op_count, TxScriptEngine}; +use kaspa_txscript::{caches::Cache, get_sig_op_count, SigCacheKey, TxScriptEngine}; use kaspa_txscript_errors::TxScriptError; +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use rayon::ThreadPool; +use std::marker::Sync; use super::{ errors::{TxResult, TxRuleError}, TransactionValidator, }; +/// The threshold above which we apply parallelism to input script processing +const CHECK_SCRIPTS_PARALLELISM_THRESHOLD: usize = 1; + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TxValidationFlags { /// Perform full validation including script verification @@ -29,7 +35,7 @@ pub enum TxValidationFlags { impl TransactionValidator { pub fn validate_populated_transaction_and_get_fee( &self, - tx: &impl VerifiableTransaction, + tx: &(impl VerifiableTransaction + Sync), pov_daa_score: u64, flags: TxValidationFlags, mass_and_feerate_threshold: Option<(u64, f64)>, @@ -48,8 +54,8 @@ impl TransactionValidator { } Self::check_sequence_lock(tx, pov_daa_score)?; - // The following call is not a consensus check (it could not be one in the first place since it uses floating number) - // but rather a mempool Replace by Fee validation rule. It was placed here purposely for avoiding unneeded script checks. + // The following call is not a consensus check (it could not be one in the first place since it uses a floating number) + // but rather a mempool Replace by Fee validation rule. It is placed here purposely for avoiding unneeded script checks. Self::check_feerate_threshold(fee, mass_and_feerate_threshold)?; match flags { @@ -158,7 +164,7 @@ impl TransactionValidator { fn check_sig_op_counts(tx: &T) -> TxResult<()> { for (i, (input, entry)) in tx.populated_inputs().enumerate() { - let calculated = get_sig_op_count::(&input.signature_script, &entry.script_public_key); + let calculated = get_sig_op_count::(&input.signature_script, &entry.script_public_key); if calculated != input.sig_op_count as u64 { return Err(TxRuleError::WrongSigOpCount(i, input.sig_op_count as u64, calculated)); } @@ -166,16 +172,45 @@ impl TransactionValidator { Ok(()) } - pub fn check_scripts(&self, tx: &impl VerifiableTransaction) -> TxResult<()> { - let mut reused_values = SigHashReusedValues::new(); - for (i, (input, entry)) in tx.populated_inputs().enumerate() { - let mut engine = TxScriptEngine::from_transaction_input(tx, input, i, entry, &mut reused_values, &self.sig_cache) - .map_err(|err| map_script_err(err, input))?; - engine.execute().map_err(|err| map_script_err(err, input))?; - } + pub fn check_scripts(&self, tx: &(impl VerifiableTransaction + Sync)) -> TxResult<()> { + check_scripts(&self.sig_cache, tx) + } +} - Ok(()) +pub fn check_scripts(sig_cache: &Cache, tx: &(impl VerifiableTransaction + Sync)) -> TxResult<()> { + if tx.inputs().len() > CHECK_SCRIPTS_PARALLELISM_THRESHOLD { + check_scripts_par_iter(sig_cache, tx) + } else { + check_scripts_sequential(sig_cache, tx) + } +} + +pub fn check_scripts_sequential(sig_cache: &Cache, tx: &impl VerifiableTransaction) -> TxResult<()> { + let reused_values = SigHashReusedValuesUnsync::new(); + for (i, (input, entry)) in tx.populated_inputs().enumerate() { + TxScriptEngine::from_transaction_input(tx, input, i, entry, &reused_values, sig_cache) + .and_then(|mut e| e.execute()) + .map_err(|err| map_script_err(err, input))?; } + Ok(()) +} + +pub fn check_scripts_par_iter(sig_cache: &Cache, tx: &(impl VerifiableTransaction + Sync)) -> TxResult<()> { + let reused_values = SigHashReusedValuesSync::new(); + (0..tx.inputs().len()).into_par_iter().try_for_each(|idx| { + let (input, utxo) = tx.populated_input(idx); + TxScriptEngine::from_transaction_input(tx, input, idx, utxo, &reused_values, sig_cache) + .and_then(|mut e| e.execute()) + .map_err(|err| map_script_err(err, input)) + }) +} + +pub fn check_scripts_par_iter_pool( + sig_cache: &Cache, + tx: &(impl VerifiableTransaction + Sync), + pool: &ThreadPool, +) -> TxResult<()> { + pool.install(|| check_scripts_par_iter(sig_cache, tx)) } fn map_script_err(script_err: TxScriptError, input: &TransactionInput) -> TxRuleError { @@ -189,6 +224,7 @@ fn map_script_err(script_err: TxScriptError, input: &TransactionInput) -> TxRule #[cfg(test)] mod tests { use super::super::errors::TxRuleError; + use super::CHECK_SCRIPTS_PARALLELISM_THRESHOLD; use core::str::FromStr; use itertools::Itertools; use kaspa_consensus_core::sign::sign; @@ -202,6 +238,15 @@ mod tests { use crate::{params::MAINNET_PARAMS, processes::transaction_validator::TransactionValidator}; + /// Helper function to duplicate the last input + fn duplicate_input(tx: &Transaction, entries: &[UtxoEntry]) -> (Transaction, Vec) { + let mut tx2 = tx.clone(); + let mut entries2 = entries.to_owned(); + tx2.inputs.push(tx2.inputs.last().unwrap().clone()); + entries2.push(entries2.last().unwrap().clone()); + (tx2, entries2) + } + #[test] fn check_signature_test() { let mut params = MAINNET_PARAMS.clone(); @@ -261,6 +306,14 @@ mod tests { ); tv.check_scripts(&populated_tx).expect("Signature check failed"); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + // Duplicated sigs should fail due to wrong sighash + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::EvalFalse)) + ); } #[test] @@ -322,7 +375,18 @@ mod tests { }], ); - assert!(tv.check_scripts(&populated_tx).is_err(), "Failing Signature Test Failed"); + assert!(tv.check_scripts(&populated_tx).is_err(), "Expecting signature check to fail"); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)).expect_err("Expecting signature check to fail"); + + // Verify we are correctly testing the parallelism case (applied here as sanity for all tests) + assert!( + tx2.inputs.len() > CHECK_SCRIPTS_PARALLELISM_THRESHOLD, + "The script tests must cover the case of a tx with inputs.len() > {}", + CHECK_SCRIPTS_PARALLELISM_THRESHOLD + ); } #[test] @@ -385,6 +449,14 @@ mod tests { }], ); tv.check_scripts(&populated_tx).expect("Signature check failed"); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + // Duplicated sigs should fail due to wrong sighash + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail)) + ); } #[test] @@ -447,7 +519,14 @@ mod tests { }], ); - assert!(tv.check_scripts(&populated_tx) == Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail))); + assert_eq!(tv.check_scripts(&populated_tx), Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail))); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail)) + ); } #[test] @@ -510,7 +589,14 @@ mod tests { }], ); - assert!(tv.check_scripts(&populated_tx) == Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail))); + assert_eq!(tv.check_scripts(&populated_tx), Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail))); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::NullFail)) + ); } #[test] @@ -573,8 +659,14 @@ mod tests { }], ); - let result = tv.check_scripts(&populated_tx); - assert!(result == Err(TxRuleError::SignatureInvalid(TxScriptError::EvalFalse))); + assert_eq!(tv.check_scripts(&populated_tx), Err(TxRuleError::SignatureInvalid(TxScriptError::EvalFalse))); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::EvalFalse)) + ); } #[test] @@ -628,8 +720,14 @@ mod tests { }], ); - let result = tv.check_scripts(&populated_tx); - assert!(result == Err(TxRuleError::SignatureInvalid(TxScriptError::SignatureScriptNotPushOnly))); + assert_eq!(tv.check_scripts(&populated_tx), Err(TxRuleError::SignatureInvalid(TxScriptError::SignatureScriptNotPushOnly))); + + // Test a tx with 2 inputs to cover parallelism split points in inner script checking code + let (tx2, entries2) = duplicate_input(&tx, &populated_tx.entries); + assert_eq!( + tv.check_scripts(&PopulatedTransaction::new(&tx2, entries2)), + Err(TxRuleError::SignatureInvalid(TxScriptError::SignatureScriptNotPushOnly)) + ); } #[test] diff --git a/consensus/wasm/src/utils.rs b/consensus/wasm/src/utils.rs index 0139b573f5..b70664e1e6 100644 --- a/consensus/wasm/src/utils.rs +++ b/consensus/wasm/src/utils.rs @@ -1,5 +1,5 @@ use crate::result::Result; -use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValues}; +use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}; use kaspa_consensus_core::hashing::sighash_type::SIG_HASH_ALL; use kaspa_consensus_core::tx; @@ -9,9 +9,9 @@ pub fn script_hashes(mut mutable_tx: tx::SignableTransaction) -> Result Option { self.map.read().get(key).cloned().inspect(|_data| { self.counters.get_counts.fetch_add(1, Ordering::Relaxed); diff --git a/crypto/txscript/src/lib.rs b/crypto/txscript/src/lib.rs index b145fb90e5..f177962721 100644 --- a/crypto/txscript/src/lib.rs +++ b/crypto/txscript/src/lib.rs @@ -45,6 +45,8 @@ pub const MAX_PUB_KEYS_PER_MUTLTISIG: i32 = 20; // Note that this includes OP_RESERVED which counts as a push operation. pub const NO_COST_OPCODE: u8 = 0x60; +type DynOpcodeImplementation = Box>; + #[derive(Clone, Hash, PartialEq, Eq)] enum Signature { Secp256k1(secp256k1::schnorr::Signature), @@ -70,15 +72,14 @@ enum ScriptSource<'a, T: VerifiableTransaction> { StandAloneScripts(Vec<&'a [u8]>), } -pub struct TxScriptEngine<'a, T: VerifiableTransaction> { +pub struct TxScriptEngine<'a, T: VerifiableTransaction, Reused: SigHashReusedValues> { dstack: Stack, astack: Stack, script_source: ScriptSource<'a, T>, // Outer caches for quicker calculation - // TODO:: make it compatible with threading - reused_values: &'a mut SigHashReusedValues, + reused_values: &'a Reused, sig_cache: &'a Cache, cond_stack: Vec, // Following if stacks, and whether it is running @@ -86,30 +87,35 @@ pub struct TxScriptEngine<'a, T: VerifiableTransaction> { num_ops: i32, } -fn parse_script( +fn parse_script( script: &[u8], -) -> impl Iterator>, TxScriptError>> + '_ { +) -> impl Iterator, TxScriptError>> + '_ { script.iter().batching(|it| deserialize_next_opcode(it)) } -pub fn get_sig_op_count(signature_script: &[u8], prev_script_public_key: &ScriptPublicKey) -> u64 { +pub fn get_sig_op_count( + signature_script: &[u8], + prev_script_public_key: &ScriptPublicKey, +) -> u64 { let is_p2sh = ScriptClass::is_pay_to_script_hash(prev_script_public_key.script()); - let script_pub_key_ops = parse_script::(prev_script_public_key.script()).collect_vec(); + let script_pub_key_ops = parse_script::(prev_script_public_key.script()).collect_vec(); if !is_p2sh { return get_sig_op_count_by_opcodes(&script_pub_key_ops); } - let signature_script_ops = parse_script::(signature_script).collect_vec(); + let signature_script_ops = parse_script::(signature_script).collect_vec(); if signature_script_ops.is_empty() || signature_script_ops.iter().any(|op| op.is_err() || !op.as_ref().unwrap().is_push_opcode()) { return 0; } let p2sh_script = signature_script_ops.last().expect("checked if empty above").as_ref().expect("checked if err above").get_data(); - let p2sh_ops = parse_script::(p2sh_script).collect_vec(); + let p2sh_ops = parse_script::(p2sh_script).collect_vec(); get_sig_op_count_by_opcodes(&p2sh_ops) } -fn get_sig_op_count_by_opcodes(opcodes: &[Result>, TxScriptError>]) -> u64 { +fn get_sig_op_count_by_opcodes( + opcodes: &[Result, TxScriptError>], +) -> u64 { // TODO: Check for overflows let mut num_sigs: u64 = 0; for (i, op) in opcodes.iter().enumerate() { @@ -142,12 +148,12 @@ fn get_sig_op_count_by_opcodes(opcodes: &[Result(script: &[u8]) -> bool { - parse_script::(script).enumerate().any(|(index, op)| op.is_err() || (index == 0 && op.unwrap().value() == OpReturn)) +pub fn is_unspendable(script: &[u8]) -> bool { + parse_script::(script).enumerate().any(|(index, op)| op.is_err() || (index == 0 && op.unwrap().value() == OpReturn)) } -impl<'a, T: VerifiableTransaction> TxScriptEngine<'a, T> { - pub fn new(reused_values: &'a mut SigHashReusedValues, sig_cache: &'a Cache) -> Self { +impl<'a, T: VerifiableTransaction, Reused: SigHashReusedValues> TxScriptEngine<'a, T, Reused> { + pub fn new(reused_values: &'a Reused, sig_cache: &'a Cache) -> Self { Self { dstack: vec![], astack: vec![], @@ -164,7 +170,7 @@ impl<'a, T: VerifiableTransaction> TxScriptEngine<'a, T> { input: &'a TransactionInput, input_idx: usize, utxo_entry: &'a UtxoEntry, - reused_values: &'a mut SigHashReusedValues, + reused_values: &'a Reused, sig_cache: &'a Cache, ) -> Result { let script_public_key = utxo_entry.script_public_key.script(); @@ -185,7 +191,7 @@ impl<'a, T: VerifiableTransaction> TxScriptEngine<'a, T> { } } - pub fn from_script(script: &'a [u8], reused_values: &'a mut SigHashReusedValues, sig_cache: &'a Cache) -> Self { + pub fn from_script(script: &'a [u8], reused_values: &'a Reused, sig_cache: &'a Cache) -> Self { Self { dstack: Default::default(), astack: Default::default(), @@ -202,7 +208,7 @@ impl<'a, T: VerifiableTransaction> TxScriptEngine<'a, T> { return self.cond_stack.is_empty() || *self.cond_stack.last().expect("Checked not empty") == OpCond::True; } - fn execute_opcode(&mut self, opcode: Box>) -> Result<(), TxScriptError> { + fn execute_opcode(&mut self, opcode: DynOpcodeImplementation) -> Result<(), TxScriptError> { // Different from kaspad: Illegal and disabled opcode are checked on execute instead // Note that this includes OP_RESERVED which counts as a push operation. if !opcode.is_push_opcode() { @@ -512,6 +518,7 @@ mod tests { use crate::opcodes::codes::{OpBlake2b, OpCheckSig, OpData1, OpData2, OpData32, OpDup, OpEqual, OpPushData1, OpTrue}; use super::*; + use kaspa_consensus_core::hashing::sighash::SigHashReusedValuesUnsync; use kaspa_consensus_core::tx::{ PopulatedTransaction, ScriptPublicKey, Transaction, TransactionId, TransactionOutpoint, TransactionOutput, }; @@ -542,7 +549,7 @@ mod tests { fn run_test_script_cases(test_cases: Vec) { let sig_cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for test in test_cases { // Ensure encapsulation of variables (no leaking between tests) @@ -565,7 +572,7 @@ mod tests { let populated_tx = PopulatedTransaction::new(&tx, vec![utxo_entry.clone()]); - let mut vm = TxScriptEngine::from_transaction_input(&populated_tx, &input, 0, &utxo_entry, &mut reused_values, &sig_cache) + let mut vm = TxScriptEngine::from_transaction_input(&populated_tx, &input, 0, &utxo_entry, &reused_values, &sig_cache) .expect("Script creation failed"); assert_eq!(vm.execute(), test.expected_result); } @@ -783,7 +790,7 @@ mod tests { ]; for test in test_cases { - let check = TxScriptEngine::::check_pub_key_encoding(test.key); + let check = TxScriptEngine::::check_pub_key_encoding(test.key); if test.is_valid { assert_eq!( check, @@ -880,7 +887,10 @@ mod tests { for test in tests { assert_eq!( - get_sig_op_count::(test.signature_script, &test.prev_script_public_key), + get_sig_op_count::( + test.signature_script, + &test.prev_script_public_key + ), test.expected_sig_ops, "failed for '{}'", test.name @@ -909,7 +919,7 @@ mod tests { for test in tests { assert_eq!( - is_unspendable::(test.script_public_key), + is_unspendable::(test.script_public_key), test.expected, "failed for '{}'", test.name @@ -929,6 +939,7 @@ mod bitcoind_tests { use super::*; use crate::script_builder::ScriptBuilderError; use kaspa_consensus_core::constants::MAX_TX_IN_SEQUENCE_NUM; + use kaspa_consensus_core::hashing::sighash::SigHashReusedValuesUnsync; use kaspa_consensus_core::tx::{ PopulatedTransaction, ScriptPublicKey, Transaction, TransactionId, TransactionOutpoint, TransactionOutput, }; @@ -1019,13 +1030,13 @@ mod bitcoind_tests { // Run transaction let sig_cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let mut vm = TxScriptEngine::from_transaction_input( &populated_tx, &populated_tx.tx().inputs[0], 0, &populated_tx.entries[0], - &mut reused_values, + &reused_values, &sig_cache, ) .map_err(UnifiedError::TxScriptError)?; diff --git a/crypto/txscript/src/opcodes/macros.rs b/crypto/txscript/src/opcodes/macros.rs index b3db98829a..c4d161d400 100644 --- a/crypto/txscript/src/opcodes/macros.rs +++ b/crypto/txscript/src/opcodes/macros.rs @@ -6,9 +6,9 @@ macro_rules! opcode_serde { [[self.value()].as_slice(), length.to_le_bytes().as_slice(), self.data.as_slice()].concat() } - fn deserialize<'i, I: Iterator, T: VerifiableTransaction>( + fn deserialize<'i, I: Iterator, T: VerifiableTransaction, Reused: SigHashReusedValues>( it: &mut I, - ) -> Result>, TxScriptError> { + ) -> Result>, TxScriptError> { match it.take(size_of::<$type>()).copied().collect::>().try_into() { Ok(bytes) => { let length = <$type>::from_le_bytes(bytes) as usize; @@ -32,9 +32,9 @@ macro_rules! opcode_serde { [[self.value()].as_slice(), self.data.clone().as_slice()].concat() } - fn deserialize<'i, I: Iterator, T: VerifiableTransaction>( + fn deserialize<'i, I: Iterator, T: VerifiableTransaction, Reused: SigHashReusedValues>( it: &mut I, - ) -> Result>, TxScriptError> { + ) -> Result>, TxScriptError> { // Static length includes the opcode itself let data: Vec = it.take($length - 1).copied().collect(); Self::new(data) @@ -44,7 +44,7 @@ macro_rules! opcode_serde { macro_rules! opcode_init { ($type:ty) => { - fn new(data: Vec) -> Result>, TxScriptError> { + fn new(data: Vec) -> Result>, TxScriptError> { if data.len() > <$type>::MAX as usize { return Err(TxScriptError::MalformedPush(<$type>::MAX as usize, data.len())); } @@ -52,7 +52,7 @@ macro_rules! opcode_init { } }; ($length: literal) => { - fn new(data: Vec) -> Result>, TxScriptError> { + fn new(data: Vec) -> Result>, TxScriptError> { if data.len() != $length - 1 { return Err(TxScriptError::MalformedPush($length - 1, data.len())); } @@ -69,20 +69,20 @@ macro_rules! opcode_impl { opcode_serde!($length); } - impl OpCodeExecution for $name { - fn empty() -> Result>, TxScriptError> { + impl OpCodeExecution for $name { + fn empty() -> Result>, TxScriptError> { Self::new(vec![]) } opcode_init!($length); #[allow(unused_variables)] - fn execute(&$self, $vm: &mut TxScriptEngine) -> OpCodeResult { + fn execute(&$self, $vm: &mut TxScriptEngine) -> OpCodeResult { $code } } - impl OpCodeImplementation for $name {} + impl OpCodeImplementation for $name {} } } @@ -111,7 +111,7 @@ macro_rules! opcode_list { )? )* - pub fn deserialize_next_opcode<'i, I: Iterator, T: VerifiableTransaction>(it: &mut I) -> Option>, TxScriptError>> { + pub fn deserialize_next_opcode<'i, I: Iterator, T: VerifiableTransaction, Reused: SigHashReusedValues>(it: &mut I) -> Option>, TxScriptError>> { match it.next() { Some(opcode_num) => match opcode_num { $( diff --git a/crypto/txscript/src/opcodes/mod.rs b/crypto/txscript/src/opcodes/mod.rs index ad800d2488..f2a92fa0b5 100644 --- a/crypto/txscript/src/opcodes/mod.rs +++ b/crypto/txscript/src/opcodes/mod.rs @@ -8,6 +8,7 @@ use crate::{ }; use blake2b_simd::Params; use core::cmp::{max, min}; +use kaspa_consensus_core::hashing::sighash::SigHashReusedValues; use kaspa_consensus_core::hashing::sighash_type::SigHashType; use kaspa_consensus_core::tx::VerifiableTransaction; use sha2::{Digest, Sha256}; @@ -73,28 +74,31 @@ pub trait OpCodeMetadata: Debug { } } -pub trait OpCodeExecution { - fn empty() -> Result>, TxScriptError> +pub trait OpCodeExecution { + fn empty() -> Result>, TxScriptError> where Self: Sized; #[allow(clippy::new_ret_no_self)] - fn new(data: Vec) -> Result>, TxScriptError> + fn new(data: Vec) -> Result>, TxScriptError> where Self: Sized; - fn execute(&self, vm: &mut TxScriptEngine) -> OpCodeResult; + fn execute(&self, vm: &mut TxScriptEngine) -> OpCodeResult; } pub trait OpcodeSerialization { fn serialize(&self) -> Vec; - fn deserialize<'i, I: Iterator, T: VerifiableTransaction>( + fn deserialize<'i, I: Iterator, T: VerifiableTransaction, Reused: SigHashReusedValues>( it: &mut I, - ) -> Result>, TxScriptError> + ) -> Result>, TxScriptError> where Self: Sized; } -pub trait OpCodeImplementation: OpCodeExecution + OpCodeMetadata + OpcodeSerialization {} +pub trait OpCodeImplementation: + OpCodeExecution + OpCodeMetadata + OpcodeSerialization +{ +} impl OpCodeMetadata for OpCode { fn value(&self) -> u8 { @@ -193,13 +197,19 @@ impl OpCodeMetadata for OpCode { // Helpers for some opcodes with shared data #[inline] -fn push_data(data: Vec, vm: &mut TxScriptEngine) -> OpCodeResult { +fn push_data( + data: Vec, + vm: &mut TxScriptEngine, +) -> OpCodeResult { vm.dstack.push(data); Ok(()) } #[inline] -fn push_number(number: i64, vm: &mut TxScriptEngine) -> OpCodeResult { +fn push_number( + number: i64, + vm: &mut TxScriptEngine, +) -> OpCodeResult { vm.dstack.push_item(number); Ok(()) } @@ -958,7 +968,7 @@ opcode_list! { // converts an opcode from the list of Op0 to Op16 to its associated value #[allow(clippy::borrowed_box)] -pub fn to_small_int(opcode: &Box>) -> u8 { +pub fn to_small_int(opcode: &Box>) -> u8 { let value = opcode.value(); if value == codes::OpFalse { return 0; @@ -976,7 +986,7 @@ mod test { use crate::{opcodes, pay_to_address_script, TxScriptEngine, TxScriptError, LOCK_TIME_THRESHOLD}; use kaspa_addresses::{Address, Prefix, Version}; use kaspa_consensus_core::constants::{SOMPI_PER_KASPA, TX_VERSION}; - use kaspa_consensus_core::hashing::sighash::SigHashReusedValues; + use kaspa_consensus_core::hashing::sighash::SigHashReusedValuesUnsync; use kaspa_consensus_core::subnets::SUBNETWORK_ID_NATIVE; use kaspa_consensus_core::tx::{ PopulatedTransaction, ScriptPublicKey, Transaction, TransactionInput, TransactionOutpoint, TransactionOutput, UtxoEntry, @@ -985,21 +995,21 @@ mod test { struct TestCase<'a> { init: Stack, - code: Box>>, + code: Box, SigHashReusedValuesUnsync>>, dstack: Stack, } struct ErrorTestCase<'a> { init: Stack, - code: Box>>, + code: Box, SigHashReusedValuesUnsync>>, error: TxScriptError, } fn run_success_test_cases(tests: Vec) { let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for TestCase { init, code, dstack } in tests { - let mut vm = TxScriptEngine::new(&mut reused_values, &cache); + let mut vm = TxScriptEngine::new(&reused_values, &cache); vm.dstack = init; code.execute(&mut vm).unwrap_or_else(|_| panic!("Opcode {} should not fail", code.value())); assert_eq!(*vm.dstack, dstack, "OpCode {} Pushed wrong value", code.value()); @@ -1008,9 +1018,9 @@ mod test { fn run_error_test_cases(tests: Vec) { let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); for ErrorTestCase { init, code, error } in tests { - let mut vm = TxScriptEngine::new(&mut reused_values, &cache); + let mut vm = TxScriptEngine::new(&reused_values, &cache); vm.dstack.clone_from(&init); assert_eq!( code.execute(&mut vm) @@ -1025,7 +1035,7 @@ mod test { #[test] fn test_opcode_disabled() { - let tests: Vec>> = vec![ + let tests: Vec>> = vec![ opcodes::OpCat::empty().expect("Should accept empty"), opcodes::OpSubStr::empty().expect("Should accept empty"), opcodes::OpLeft::empty().expect("Should accept empty"), @@ -1044,8 +1054,8 @@ mod test { ]; let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); - let mut vm = TxScriptEngine::new(&mut reused_values, &cache); + let reused_values = SigHashReusedValuesUnsync::new(); + let mut vm = TxScriptEngine::new(&reused_values, &cache); for pop in tests { match pop.execute(&mut vm) { @@ -1057,7 +1067,7 @@ mod test { #[test] fn test_opcode_reserved() { - let tests: Vec>> = vec![ + let tests: Vec>> = vec![ opcodes::OpReserved::empty().expect("Should accept empty"), opcodes::OpVer::empty().expect("Should accept empty"), opcodes::OpVerIf::empty().expect("Should accept empty"), @@ -1067,8 +1077,8 @@ mod test { ]; let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); - let mut vm = TxScriptEngine::new(&mut reused_values, &cache); + let reused_values = SigHashReusedValuesUnsync::new(); + let mut vm = TxScriptEngine::new(&reused_values, &cache); for pop in tests { match pop.execute(&mut vm) { @@ -1080,7 +1090,7 @@ mod test { #[test] fn test_opcode_invalid() { - let tests: Vec>> = vec![ + let tests: Vec>> = vec![ opcodes::OpUnknown166::empty().expect("Should accept empty"), opcodes::OpUnknown167::empty().expect("Should accept empty"), opcodes::OpUnknown178::empty().expect("Should accept empty"), @@ -1158,8 +1168,8 @@ mod test { ]; let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); - let mut vm = TxScriptEngine::new(&mut reused_values, &cache); + let reused_values = SigHashReusedValuesUnsync::new(); + let mut vm = TxScriptEngine::new(&reused_values, &cache); for pop in tests { match pop.execute(&mut vm) { @@ -2739,7 +2749,7 @@ mod test { let (base_tx, input, utxo_entry) = make_mock_transaction(1); let sig_cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let code = opcodes::OpCheckLockTimeVerify::empty().expect("Should accept empty"); @@ -2751,7 +2761,7 @@ mod test { ] { let mut tx = base_tx.clone(); tx.0.lock_time = tx_lock_time; - let mut vm = TxScriptEngine::from_transaction_input(&tx, &input, 0, &utxo_entry, &mut reused_values, &sig_cache) + let mut vm = TxScriptEngine::from_transaction_input(&tx, &input, 0, &utxo_entry, &reused_values, &sig_cache) .expect("Shouldn't fail"); vm.dstack = vec![lock_time.clone()]; match code.execute(&mut vm) { @@ -2781,7 +2791,7 @@ mod test { let (tx, base_input, utxo_entry) = make_mock_transaction(1); let sig_cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let code = opcodes::OpCheckSequenceVerify::empty().expect("Should accept empty"); @@ -2794,7 +2804,7 @@ mod test { ] { let mut input = base_input.clone(); input.sequence = tx_sequence; - let mut vm = TxScriptEngine::from_transaction_input(&tx, &input, 0, &utxo_entry, &mut reused_values, &sig_cache) + let mut vm = TxScriptEngine::from_transaction_input(&tx, &input, 0, &utxo_entry, &reused_values, &sig_cache) .expect("Shouldn't fail"); vm.dstack = vec![sequence.clone()]; match code.execute(&mut vm) { diff --git a/crypto/txscript/src/standard/multisig.rs b/crypto/txscript/src/standard/multisig.rs index 79c74c7b37..cbd9dbe6da 100644 --- a/crypto/txscript/src/standard/multisig.rs +++ b/crypto/txscript/src/standard/multisig.rs @@ -74,7 +74,7 @@ mod tests { use core::str::FromStr; use kaspa_consensus_core::{ hashing::{ - sighash::{calc_ecdsa_signature_hash, calc_schnorr_signature_hash, SigHashReusedValues}, + sighash::{calc_ecdsa_signature_hash, calc_schnorr_signature_hash, SigHashReusedValuesUnsync}, sighash_type::SIG_HASH_ALL, }, subnets::SubnetworkId, @@ -154,11 +154,11 @@ mod tests { }]; let mut tx = MutableTransaction::with_entries(tx, entries); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); let sig_hash = if !is_ecdsa { - calc_schnorr_signature_hash(&tx.as_verifiable(), 0, SIG_HASH_ALL, &mut reused_values) + calc_schnorr_signature_hash(&tx.as_verifiable(), 0, SIG_HASH_ALL, &reused_values) } else { - calc_ecdsa_signature_hash(&tx.as_verifiable(), 0, SIG_HASH_ALL, &mut reused_values) + calc_ecdsa_signature_hash(&tx.as_verifiable(), 0, SIG_HASH_ALL, &reused_values) }; let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap(); let signatures: Vec<_> = inputs @@ -184,7 +184,7 @@ mod tests { let (input, entry) = tx.populated_inputs().next().unwrap(); let cache = Cache::new(10_000); - let mut engine = TxScriptEngine::from_transaction_input(&tx, input, 0, entry, &mut reused_values, &cache).unwrap(); + let mut engine = TxScriptEngine::from_transaction_input(&tx, input, 0, entry, &reused_values, &cache).unwrap(); assert_eq!(engine.execute().is_ok(), is_ok); } #[test] diff --git a/mining/src/mempool/check_transaction_standard.rs b/mining/src/mempool/check_transaction_standard.rs index e759a9e50c..060677a1e5 100644 --- a/mining/src/mempool/check_transaction_standard.rs +++ b/mining/src/mempool/check_transaction_standard.rs @@ -2,6 +2,7 @@ use crate::mempool::{ errors::{NonStandardError, NonStandardResult}, Mempool, }; +use kaspa_consensus_core::hashing::sighash::SigHashReusedValuesUnsync; use kaspa_consensus_core::{ constants::{MAX_SCRIPT_PUBLIC_KEY_VERSION, MAX_SOMPI}, mass, @@ -114,7 +115,7 @@ impl Mempool { /// It is exposed by [MiningManager] for use by transaction generators and wallets. pub(crate) fn is_transaction_output_dust(&self, transaction_output: &TransactionOutput) -> bool { // Unspendable outputs are considered dust. - if is_unspendable::(transaction_output.script_public_key.script()) { + if is_unspendable::(transaction_output.script_public_key.script()) { return true; } @@ -175,7 +176,6 @@ impl Mempool { if contextual_mass > MAXIMUM_STANDARD_TRANSACTION_MASS { return Err(NonStandardError::RejectContextualMass(transaction_id, contextual_mass, MAXIMUM_STANDARD_TRANSACTION_MASS)); } - for (i, input) in transaction.tx.inputs.iter().enumerate() { // It is safe to elide existence and index checks here since // they have already been checked prior to calling this @@ -188,7 +188,10 @@ impl Mempool { ScriptClass::PubKey => {} ScriptClass::PubKeyECDSA => {} ScriptClass::ScriptHash => { - get_sig_op_count::(&input.signature_script, &entry.script_public_key); + get_sig_op_count::( + &input.signature_script, + &entry.script_public_key, + ); let num_sig_ops = 1; if num_sig_ops > MAX_STANDARD_P2SH_SIG_OPS { return Err(NonStandardError::RejectSignatureCount(transaction_id, i, num_sig_ops, MAX_STANDARD_P2SH_SIG_OPS)); diff --git a/wallet/core/src/account/pskb.rs b/wallet/core/src/account/pskb.rs index e71d7e4796..fad6bdb4ab 100644 --- a/wallet/core/src/account/pskb.rs +++ b/wallet/core/src/account/pskb.rs @@ -9,7 +9,7 @@ use crate::tx::PaymentOutputs; use futures::stream; use kaspa_bip32::{DerivationPath, KeyFingerprint, PrivateKey}; use kaspa_consensus_client::UtxoEntry as ClientUTXO; -use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValues}; +use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}; use kaspa_consensus_core::tx::VerifiableTransaction; use kaspa_consensus_core::tx::{TransactionInput, UtxoEntry}; use kaspa_txscript::extract_script_pub_key_address; @@ -160,7 +160,7 @@ pub async fn pskb_signer_for_address( key_fingerprint: KeyFingerprint, ) -> Result { let mut signed_bundle = Bundle::new(); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); // If set, sign-for address is used for signing. // Else, all addresses from inputs are. @@ -186,7 +186,7 @@ pub async fn pskb_signer_for_address( for pskt_inner in bundle.iter().cloned() { let pskt: PSKT = PSKT::from(pskt_inner); - let mut sign = |signer_pskt: PSKT| { + let sign = |signer_pskt: PSKT| { signer_pskt .pass_signature_sync(|tx, sighash| -> Result, String> { tx.tx @@ -194,7 +194,7 @@ pub async fn pskb_signer_for_address( .iter() .enumerate() .map(|(idx, _input)| { - let hash = calc_schnorr_signature_hash(&tx.as_verifiable(), idx, sighash[idx], &mut reused_values); + let hash = calc_schnorr_signature_hash(&tx.as_verifiable(), idx, sighash[idx], &reused_values); let msg = secp256k1::Message::from_digest_slice(hash.as_bytes().as_slice()).unwrap(); // When address represents a locked UTXO, no private key is available. diff --git a/wallet/pskt/examples/multisig.rs b/wallet/pskt/examples/multisig.rs index fb011402fb..7a9ca190e5 100644 --- a/wallet/pskt/examples/multisig.rs +++ b/wallet/pskt/examples/multisig.rs @@ -1,5 +1,5 @@ use kaspa_consensus_core::{ - hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValues}, + hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync}, tx::{TransactionId, TransactionOutpoint, UtxoEntry}, }; use kaspa_txscript::{multisig_redeem_script, opcodes::codes::OpData65, pay_to_script_hash_script, script_builder::ScriptBuilder}; @@ -51,8 +51,8 @@ fn main() { println!("Serialized after setting sequence: {}", ser_updated); let signer_pskt: PSKT = serde_json::from_str(&ser_updated).expect("Failed to deserialize"); - let mut reused_values = SigHashReusedValues::new(); - let mut sign = |signer_pskt: PSKT, kp: &Keypair| { + let reused_values = SigHashReusedValuesUnsync::new(); + let sign = |signer_pskt: PSKT, kp: &Keypair| { signer_pskt .pass_signature_sync(|tx, sighash| -> Result, String> { let tx = dbg!(tx); @@ -61,7 +61,7 @@ fn main() { .iter() .enumerate() .map(|(idx, _input)| { - let hash = calc_schnorr_signature_hash(&tx.as_verifiable(), idx, sighash[idx], &mut reused_values); + let hash = calc_schnorr_signature_hash(&tx.as_verifiable(), idx, sighash[idx], &reused_values); let msg = secp256k1::Message::from_digest_slice(hash.as_bytes().as_slice()).unwrap(); Ok(SignInputOk { signature: Signature::Schnorr(kp.sign_schnorr(msg)), diff --git a/wallet/pskt/src/pskt.rs b/wallet/pskt/src/pskt.rs index 73f87a628f..93c16ccc85 100644 --- a/wallet/pskt/src/pskt.rs +++ b/wallet/pskt/src/pskt.rs @@ -3,6 +3,7 @@ //! use kaspa_bip32::{secp256k1, DerivationPath, KeyFingerprint}; +use kaspa_consensus_core::hashing::sighash::SigHashReusedValuesUnsync; use serde::{Deserialize, Serialize}; use serde_repr::{Deserialize_repr, Serialize_repr}; use std::{collections::BTreeMap, fmt::Display, fmt::Formatter, future::Future, marker::PhantomData, ops::Deref}; @@ -14,7 +15,7 @@ pub use crate::output::{Output, OutputBuilder}; pub use crate::role::{Combiner, Constructor, Creator, Extractor, Finalizer, Signer, Updater}; use kaspa_consensus_core::tx::UtxoEntry; use kaspa_consensus_core::{ - hashing::{sighash::SigHashReusedValues, sighash_type::SigHashType}, + hashing::sighash_type::SigHashType, subnets::SUBNETWORK_ID_NATIVE, tx::{MutableTransaction, SignableTransaction, Transaction, TransactionId, TransactionInput, TransactionOutput}, }; @@ -432,10 +433,10 @@ impl PSKT { { let tx = tx.as_verifiable(); let cache = Cache::new(10_000); - let mut reused_values = SigHashReusedValues::new(); + let reused_values = SigHashReusedValuesUnsync::new(); tx.populated_inputs().enumerate().try_for_each(|(idx, (input, entry))| { - TxScriptEngine::from_transaction_input(&tx, input, idx, entry, &mut reused_values, &cache)?.execute()?; + TxScriptEngine::from_transaction_input(&tx, input, idx, entry, &reused_values, &cache)?.execute()?; >::Ok(()) })?; }