From 7d1db47188fe499f8942ce8e79694b6200513b28 Mon Sep 17 00:00:00 2001 From: Hugo C <911307+hugocaillard@users.noreply.github.com> Date: Tue, 22 Oct 2024 11:52:39 +0200 Subject: [PATCH] refactor: update codec (#1524) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: update codec * feat: add types to support `StacksSignerMessage` serialization (#1583) * chore: nakamoto block header * chore: nakamotoblock * chore: signer message * chore: remove wsts * feat: implement get_stacks_height_for_tenure_height --------- Co-authored-by: Rafael Cárdenas --- Cargo.lock | 385 +---- components/clarity-repl/src/repl/datastore.rs | 14 + components/stacks-codec/Cargo.toml | 1 - components/stacks-codec/src/codec.rs | 1523 +++++++++++++++-- 4 files changed, 1419 insertions(+), 504 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c5536625..73ca8dfde 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,41 +26,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "aead" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" -dependencies = [ - "crypto-common", - "generic-array 0.14.6", -] - -[[package]] -name = "aes" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" -dependencies = [ - "cfg-if 1.0.0", - "cipher", - "cpufeatures", -] - -[[package]] -name = "aes-gcm" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" -dependencies = [ - "aead", - "aes", - "cipher", - "ctr", - "ghash", - "subtle 2.6.1", -] - [[package]] name = "ahash" version = "0.8.6" @@ -194,12 +159,6 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" -[[package]] -name = "arrayvec" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" - [[package]] name = "async-stream" version = "0.3.5" @@ -442,18 +401,6 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - [[package]] name = "block-buffer" version = "0.7.3" @@ -537,33 +484,12 @@ dependencies = [ "serde_with", ] -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - -[[package]] -name = "bs58" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5353f36341f7451062466f0b755b96ac3a9547e4d7f6b70d603fc721a7d7896" -dependencies = [ - "tinyvec", -] - [[package]] name = "bumpalo" version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" -[[package]] -name = "byte-slice-cast" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" - [[package]] name = "byte-tools" version = "0.3.1" @@ -693,16 +619,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - [[package]] name = "clap" version = "4.5.9" @@ -755,7 +671,7 @@ checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" [[package]] name = "clar2wasm" version = "0.1.0" -source = "git+https://github.com/stacks-network/clarity-wasm.git?branch=main#1a1ab12dba3dfb243363fbf9ba955d7dab060936" +source = "git+https://github.com/stacks-network/clarity-wasm.git?branch=main#e3a69d9ed5511f22305645cce7b6926b0da14216" dependencies = [ "chrono", "clap", @@ -883,7 +799,7 @@ dependencies = [ [[package]] name = "clarity" version = "2.3.0" -source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#dfdf66ac5816d3cabc116f13d5c73914dc5fd374" +source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#38bd7d24ab7b8fe9b3575fc1c8afe6dc5803e8a7" dependencies = [ "getrandom 0.2.8", "hashbrown 0.14.3", @@ -1264,7 +1180,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array 0.14.6", - "rand_core 0.6.4", "typenum", ] @@ -1315,15 +1230,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - [[package]] name = "ctrlc" version = "3.4.4" @@ -1738,18 +1644,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - [[package]] name = "fnv" version = "1.0.7" @@ -1781,12 +1675,6 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - [[package]] name = "futures" version = "0.3.26" @@ -1970,16 +1858,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "ghash" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d930750de5717d2dd0b8c0d42c076c0e884c81a73e6cab859bbd2339c71e3e40" -dependencies = [ - "opaque-debug 0.3.0", - "polyval", -] - [[package]] name = "gimli" version = "0.26.2" @@ -2450,26 +2328,6 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.107", -] - [[package]] name = "indexmap" version = "1.9.2" @@ -2498,15 +2356,6 @@ version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" -[[package]] -name = "inout" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" -dependencies = [ - "generic-array 0.14.6", -] - [[package]] name = "integer-sqrt" version = "0.1.5" @@ -2794,7 +2643,7 @@ dependencies = [ [[package]] name = "libstackerdb" version = "0.0.1" -source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#8ad0d2a2ecc767c4d1adb8d163bfdd59b4ccc523" +source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#38bd7d24ab7b8fe9b3575fc1c8afe6dc5803e8a7" dependencies = [ "clarity", "secp256k1 0.24.3", @@ -3292,54 +3141,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "p256k1" -version = "7.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40a031a559eb38c35a14096f21c366254501a06d41c4b327d2a7515d713a5b7" -dependencies = [ - "bitvec", - "bs58 0.4.0", - "cc", - "hex", - "itertools 0.10.5", - "num-traits", - "primitive-types", - "proc-macro2", - "quote", - "rand_core 0.6.4", - "rustfmt-wrapper", - "serde", - "sha2 0.10.8", - "syn 2.0.50", -] - -[[package]] -name = "parity-scale-codec" -version = "3.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dec8a8073036902368c2cdc0387e85ff9a37054d7e7c98e592145e0c92cd4fb" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 1.0.107", -] - [[package]] name = "parking_lot" version = "0.12.1" @@ -3481,28 +3282,6 @@ version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "626dec3cac7cc0e1577a2ec3fc496277ec2baa084bebad95bb6fdbfae235f84c" -[[package]] -name = "polynomial" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27abb6e4638dcecc65a92b50d7f1d87dd6dea987ba71db987b6bf881f4877e9d" -dependencies = [ - "num-traits", - "serde", -] - -[[package]] -name = "polyval" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures", - "opaque-debug 0.3.0", - "universal-hash", -] - [[package]] name = "powerfmt" version = "0.2.0" @@ -3512,7 +3291,7 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "pox-locking" version = "2.4.0" -source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#8ad0d2a2ecc767c4d1adb8d163bfdd59b4ccc523" +source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#38bd7d24ab7b8fe9b3575fc1c8afe6dc5803e8a7" dependencies = [ "clarity", "slog", @@ -3539,26 +3318,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" -dependencies = [ - "toml_edit 0.20.7", -] - [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3692,12 +3451,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - [[package]] name = "radix_trie" version = "0.2.1" @@ -4151,12 +3904,6 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - [[package]] name = "rustc_version" version = "0.4.0" @@ -4166,19 +3913,6 @@ dependencies = [ "semver 1.0.16", ] -[[package]] -name = "rustfmt-wrapper" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1adc9dfed5cc999077978cc7163b9282c5751c8d39827c4ea8c8c220ca5a440" -dependencies = [ - "serde", - "tempfile", - "thiserror", - "toml 0.8.8", - "toolchain_find", -] - [[package]] name = "rustix" version = "0.38.25" @@ -4267,15 +4001,6 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - [[package]] name = "schemars" version = "0.8.16" @@ -4822,13 +4547,12 @@ version = "2.10.0" dependencies = [ "clarity", "serde", - "wsts", ] [[package]] name = "stacks-common" version = "0.0.2" -source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#8ad0d2a2ecc767c4d1adb8d163bfdd59b4ccc523" +source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#38bd7d24ab7b8fe9b3575fc1c8afe6dc5803e8a7" dependencies = [ "chrono", "curve25519-dalek 2.0.0", @@ -4854,7 +4578,6 @@ dependencies = [ "slog-term", "time", "winapi 0.3.9", - "wsts", ] [[package]] @@ -4927,7 +4650,7 @@ dependencies = [ [[package]] name = "stackslib" version = "0.0.1" -source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#8ad0d2a2ecc767c4d1adb8d163bfdd59b4ccc523" +source = "git+https://github.com/stacks-network/stacks-core.git?branch=feat/clarity-wasm-develop#38bd7d24ab7b8fe9b3575fc1c8afe6dc5803e8a7" dependencies = [ "chrono", "clar2wasm", @@ -4963,7 +4686,6 @@ dependencies = [ "time", "url", "winapi 0.3.9", - "wsts", ] [[package]] @@ -5077,12 +4799,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - [[package]] name = "target-lexicon" version = "0.12.12" @@ -5359,7 +5075,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.21.0", + "toml_edit", ] [[package]] @@ -5371,17 +5087,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml_edit" -version = "0.20.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" -dependencies = [ - "indexmap 2.2.3", - "toml_datetime", - "winnow", -] - [[package]] name = "toml_edit" version = "0.21.0" @@ -5395,19 +5100,6 @@ dependencies = [ "winnow", ] -[[package]] -name = "toolchain_find" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc8c9a7f0a2966e1acdaf0461023d0b01471eeead645370cf4c3f5cff153f2a" -dependencies = [ - "home", - "once_cell", - "regex", - "semver 1.0.16", - "walkdir", -] - [[package]] name = "tower" version = "0.4.13" @@ -5563,18 +5255,6 @@ dependencies = [ "serde", ] -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - [[package]] name = "uncased" version = "0.9.9" @@ -5635,16 +5315,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "universal-hash" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" -dependencies = [ - "crypto-common", - "subtle 2.6.1", -] - [[package]] name = "untrusted" version = "0.9.0" @@ -5693,16 +5363,6 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" -[[package]] -name = "walkdir" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" -dependencies = [ - "same-file", - "winapi-util", -] - [[package]] name = "walrus" version = "0.20.3" @@ -6482,37 +6142,6 @@ dependencies = [ "winapi-build", ] -[[package]] -name = "wsts" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c80d57a61294350ed91e91eb20a6c34da084ec8f15d039bab79ce3efabbd1a4" -dependencies = [ - "aes-gcm", - "bs58 0.5.0", - "hashbrown 0.14.3", - "hex", - "num-traits", - "p256k1", - "polynomial", - "primitive-types", - "rand_core 0.6.4", - "serde", - "sha2 0.10.8", - "thiserror", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - [[package]] name = "yaml-rust" version = "0.4.5" diff --git a/components/clarity-repl/src/repl/datastore.rs b/components/clarity-repl/src/repl/datastore.rs index 5ff6c44e3..dbb34af36 100644 --- a/components/clarity-repl/src/repl/datastore.rs +++ b/components/clarity-repl/src/repl/datastore.rs @@ -80,6 +80,7 @@ pub struct Datastore { stacks_chain_height: u32, stacks_blocks: HashMap, sortition_lookup: HashMap, + tenure_blocks_height: HashMap, consensus_hash_lookup: HashMap, current_epoch: StacksEpochId, current_epoch_start_height: u32, @@ -363,6 +364,7 @@ impl Datastore { let sortition_lookup = HashMap::from([(sortition_id, id)]); let consensus_hash_lookup = HashMap::from([(genesis_block.consensus_hash, sortition_id)]); + let tenure_blocks_height = HashMap::from([(0, 0)]); let burn_blocks = HashMap::from([(first_burn_block_header_hash, genesis_burn_block)]); let stacks_blocks = HashMap::from([(id, genesis_block)]); @@ -374,6 +376,7 @@ impl Datastore { stacks_blocks, sortition_lookup, consensus_hash_lookup, + tenure_blocks_height, current_epoch: StacksEpochId::Epoch2_05, current_epoch_start_height: 0, constants, @@ -471,6 +474,9 @@ impl Datastore { self.burn_blocks.insert(hash, burn_block_info); self.burn_chain_height = height; self.advance_stacks_chain_tip(clarity_datastore, 1); + + self.tenure_blocks_height + .insert(self.burn_chain_height, self.stacks_chain_height); } self.burn_chain_height @@ -590,6 +596,14 @@ impl HeadersDB for Datastore { .map(|b| b.burn_block_height) } + fn get_stacks_height_for_tenure_height( + &self, + _id_bhh: &StacksBlockId, + tenure_height: u32, + ) -> Option { + self.tenure_blocks_height.get(&tenure_height).copied() + } + fn get_miner_address( &self, id_bhh: &StacksBlockId, diff --git a/components/stacks-codec/Cargo.toml b/components/stacks-codec/Cargo.toml index f711d61c5..0514e358a 100644 --- a/components/stacks-codec/Cargo.toml +++ b/components/stacks-codec/Cargo.toml @@ -9,4 +9,3 @@ description = "Stack wire format implementation" clarity = { workspace = true, features = ["canonical", "developer-mode", "log"] } serde = { version = "1", features = ["derive"] } -wsts = { version = "9.0.0", default-features = false } diff --git a/components/stacks-codec/src/codec.rs b/components/stacks-codec/src/codec.rs index d2f48a3bb..d6f14c730 100644 --- a/components/stacks-codec/src/codec.rs +++ b/components/stacks-codec/src/codec.rs @@ -7,13 +7,13 @@ use clarity::address::{ C32_ADDRESS_VERSION_MAINNET_MULTISIG, C32_ADDRESS_VERSION_MAINNET_SINGLESIG, C32_ADDRESS_VERSION_TESTNET_MULTISIG, C32_ADDRESS_VERSION_TESTNET_SINGLESIG, }; -use clarity::codec::MAX_MESSAGE_LEN; use clarity::codec::{read_next, write_next, Error as CodecError}; +use clarity::codec::{read_next_exact, MAX_MESSAGE_LEN}; use clarity::types::chainstate::{ BlockHeaderHash, BurnchainHeaderHash, ConsensusHash, StacksBlockId, StacksWorkScore, TrieHash, }; use clarity::types::chainstate::{StacksAddress, StacksPublicKey}; -use clarity::types::PrivateKey; +use clarity::types::{PrivateKey, StacksEpochId}; use clarity::util::hash::{Hash160, Sha512Trunc256Sum}; use clarity::util::retry::BoundReader; use clarity::util::secp256k1::{ @@ -37,13 +37,55 @@ use std::io::{Read, Write}; use std::ops::Deref; use std::ops::DerefMut; use std::str::FromStr; -use wsts::common::Signature as Secp256k1Signature; -use wsts::curve::point::{Compressed as Secp256k1Compressed, Point as Secp256k1Point}; -use wsts::curve::scalar::Scalar as Secp256k1Scalar; pub const MAX_BLOCK_LEN: u32 = 2 * 1024 * 1024; pub const MAX_TRANSACTION_LEN: u32 = MAX_BLOCK_LEN; +/// Define a "u8" enum +/// gives you a try_from(u8) -> Option function +#[macro_export] +macro_rules! define_u8_enum { + ($(#[$outer:meta])* + $Name:ident { + $( + $(#[$inner:meta])* + $Variant:ident = $Val:literal),+ + }) => + { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, Serialize, Deserialize)] + #[repr(u8)] + $(#[$outer])* + pub enum $Name { + $( $(#[$inner])* + $Variant = $Val),*, + } + impl $Name { + /// All members of the enum + pub const ALL: &'static [$Name] = &[$($Name::$Variant),*]; + + /// Return the u8 representation of the variant + pub fn to_u8(&self) -> u8 { + match self { + $( + $Name::$Variant => $Val, + )* + } + } + + /// Returns Some and the variant if `v` is a u8 corresponding to a variant in this enum. + /// Returns None otherwise + pub fn from_u8(v: u8) -> Option { + match v { + $( + v if v == $Name::$Variant as u8 => Some($Name::$Variant), + )* + _ => None + } + } + } + } +} + #[macro_export] macro_rules! impl_byte_array_message_codec { ($thing:ident, $len:expr) => { @@ -270,6 +312,52 @@ pub enum TransactionAuthFlags { AuthSponsored = 0x05, } +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +/// This data structure represents a list of booleans +/// as a bitvector. +/// +/// The generic argument `MAX_SIZE` specifies the maximum number of +/// elements that the bit vector can hold. It is not the _actual_ size +/// of the bitvec: if there are only 8 entries, the bitvector will +/// just have a single byte, even if the MAX_SIZE is u16::MAX. This +/// type parameter ensures that constructors and deserialization routines +/// error if input data is too long. +pub struct BitVec { + data: Vec, + len: u16, +} + +impl StacksMessageCodec for BitVec { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.len)?; + write_next(fd, &self.data) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let len = read_next(fd)?; + if len == 0 { + return Err(CodecError::DeserializeError( + "BitVec lengths must be positive".to_string(), + )); + } + if len > MAX_SIZE { + return Err(CodecError::DeserializeError(format!( + "BitVec length exceeded maximum. Max size = {MAX_SIZE}, len = {len}" + ))); + } + + let data = read_next_exact(fd, Self::data_len(len).into())?; + Ok(BitVec { data, len }) + } +} + +impl BitVec { + /// Return the number of bytes needed to store `len` bits. + fn data_len(len: u16) -> u16 { + len / 8 + if len % 8 == 0 { 0 } else { 1 } + } +} + /// Transaction signatures are validated by calculating the public key from the signature, and /// verifying that all public keys hash to the signing account's hash. To do so, we must preserve /// enough information in the auth structure to recover each public key's bytes. @@ -370,6 +458,13 @@ pub enum MultisigHashMode { P2WSH = 0x03, } +#[repr(u8)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum OrderIndependentMultisigHashMode { + P2SH = 0x05, + P2WSH = 0x07, +} + impl SinglesigHashMode { pub fn to_address_hash_mode(&self) -> AddressHashMode { match *self { @@ -420,6 +515,35 @@ impl MultisigHashMode { } } +impl OrderIndependentMultisigHashMode { + pub fn to_address_hash_mode(&self) -> AddressHashMode { + match *self { + OrderIndependentMultisigHashMode::P2SH => AddressHashMode::SerializeP2SH, + OrderIndependentMultisigHashMode::P2WSH => AddressHashMode::SerializeP2WSH, + } + } + + pub fn from_address_hash_mode(hm: AddressHashMode) -> Option { + match hm { + AddressHashMode::SerializeP2SH => Some(OrderIndependentMultisigHashMode::P2SH), + AddressHashMode::SerializeP2WSH => Some(OrderIndependentMultisigHashMode::P2WSH), + _ => None, + } + } + + pub fn from_u8(n: u8) -> Option { + match n { + x if x == OrderIndependentMultisigHashMode::P2SH as u8 => { + Some(OrderIndependentMultisigHashMode::P2SH) + } + x if x == OrderIndependentMultisigHashMode::P2WSH as u8 => { + Some(OrderIndependentMultisigHashMode::P2WSH) + } + _ => None, + } + } +} + /// A structure that encodes enough state to authenticate /// a transaction's execution against a Stacks address. /// public_keys + signatures_required determines the Principal. @@ -638,14 +762,137 @@ impl SinglesigSpendingCondition { } } +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct OrderIndependentMultisigSpendingCondition { + pub hash_mode: OrderIndependentMultisigHashMode, + pub signer: Hash160, + pub nonce: u64, // nth authorization from this account + pub tx_fee: u64, // microSTX/compute rate offered by this account + pub fields: Vec, + pub signatures_required: u16, +} + +impl OrderIndependentMultisigSpendingCondition { + pub fn push_signature( + &mut self, + key_encoding: TransactionPublicKeyEncoding, + signature: MessageSignature, + ) { + self.fields + .push(TransactionAuthField::Signature(key_encoding, signature)); + } + + pub fn push_public_key(&mut self, public_key: StacksPublicKey) { + self.fields + .push(TransactionAuthField::PublicKey(public_key)); + } + + pub fn pop_auth_field(&mut self) -> Option { + self.fields.pop() + } + + pub fn address_mainnet(&self) -> StacksAddress { + StacksAddress { + version: C32_ADDRESS_VERSION_MAINNET_MULTISIG, + bytes: self.signer, + } + } + + pub fn address_testnet(&self) -> StacksAddress { + StacksAddress { + version: C32_ADDRESS_VERSION_TESTNET_MULTISIG, + bytes: self.signer, + } + } + + /// Authenticate a spending condition against an initial sighash. + /// In doing so, recover all public keys and verify that they hash to the signer + /// via the given hash mode. + pub fn verify( + &self, + initial_sighash: &Txid, + cond_code: &TransactionAuthFlags, + ) -> Result { + let mut pubkeys = vec![]; + let mut num_sigs: u16 = 0; + let mut have_uncompressed = false; + for field in self.fields.iter() { + let pubkey = match field { + TransactionAuthField::PublicKey(ref pubkey) => { + if !pubkey.compressed() { + have_uncompressed = true; + } + *pubkey + } + TransactionAuthField::Signature(ref pubkey_encoding, ref sigbuf) => { + if *pubkey_encoding == TransactionPublicKeyEncoding::Uncompressed { + have_uncompressed = true; + } + + let (pubkey, _next_sighash) = TransactionSpendingCondition::next_verification( + initial_sighash, + cond_code, + self.tx_fee, + self.nonce, + pubkey_encoding, + sigbuf, + )?; + num_sigs = num_sigs + .checked_add(1) + .ok_or(CodecError::SigningError("Too many signatures".to_string()))?; + pubkey + } + }; + pubkeys.push(pubkey); + } + + if num_sigs < self.signatures_required { + return Err(CodecError::SigningError(format!( + "Not enough signatures. Got {num_sigs}, expected at least {req}", + req = self.signatures_required + ))); + } + + if have_uncompressed && self.hash_mode == OrderIndependentMultisigHashMode::P2WSH { + return Err(CodecError::SigningError( + "Uncompressed keys are not allowed in this hash mode".to_string(), + )); + } + + let addr_bytes = match StacksAddress::from_public_keys( + 0, + &self.hash_mode.to_address_hash_mode(), + self.signatures_required as usize, + &pubkeys, + ) { + Some(a) => a.bytes, + None => { + return Err(CodecError::SigningError( + "Failed to generate address from public keys".to_string(), + )); + } + }; + + if addr_bytes != self.signer { + return Err(CodecError::SigningError(format!( + "Signer hash does not equal hash of public key(s): {} != {}", + addr_bytes, self.signer + ))); + } + + Ok(*initial_sighash) + } +} + #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum TransactionSpendingCondition { Singlesig(SinglesigSpendingCondition), Multisig(MultisigSpendingCondition), + OrderIndependentMultisig(OrderIndependentMultisigSpendingCondition), } impl TransactionSpendingCondition { - pub fn new_singlesig_p2pkh(pubkey: Secp256k1PublicKey) -> Option { + pub fn new_singlesig_p2pkh(pubkey: StacksPublicKey) -> Option { let key_encoding = if pubkey.compressed() { TransactionPublicKeyEncoding::Compressed } else { @@ -666,9 +913,7 @@ impl TransactionSpendingCondition { )) } - pub fn new_singlesig_p2wpkh( - pubkey: Secp256k1PublicKey, - ) -> Option { + pub fn new_singlesig_p2wpkh(pubkey: StacksPublicKey) -> Option { let signer_addr = StacksAddress::from_public_keys( 0, &AddressHashMode::SerializeP2WPKH, @@ -690,12 +935,12 @@ impl TransactionSpendingCondition { pub fn new_multisig_p2sh( num_sigs: u16, - pubkeys: Vec, + pubkeys: Vec, ) -> Option { let signer_addr = StacksAddress::from_public_keys( 0, &AddressHashMode::SerializeP2SH, - num_sigs as usize, + usize::from(num_sigs), &pubkeys, )?; @@ -711,14 +956,60 @@ impl TransactionSpendingCondition { )) } + pub fn new_multisig_order_independent_p2sh( + num_sigs: u16, + pubkeys: Vec, + ) -> Option { + let signer_addr = StacksAddress::from_public_keys( + 0, + &AddressHashMode::SerializeP2SH, + usize::from(num_sigs), + &pubkeys, + )?; + + Some(TransactionSpendingCondition::OrderIndependentMultisig( + OrderIndependentMultisigSpendingCondition { + signer: signer_addr.bytes, + nonce: 0, + tx_fee: 0, + hash_mode: OrderIndependentMultisigHashMode::P2SH, + fields: vec![], + signatures_required: num_sigs, + }, + )) + } + + pub fn new_multisig_order_independent_p2wsh( + num_sigs: u16, + pubkeys: Vec, + ) -> Option { + let signer_addr = StacksAddress::from_public_keys( + 0, + &AddressHashMode::SerializeP2WSH, + usize::from(num_sigs), + &pubkeys, + )?; + + Some(TransactionSpendingCondition::OrderIndependentMultisig( + OrderIndependentMultisigSpendingCondition { + signer: signer_addr.bytes, + nonce: 0, + tx_fee: 0, + hash_mode: OrderIndependentMultisigHashMode::P2WSH, + fields: vec![], + signatures_required: num_sigs, + }, + )) + } + pub fn new_multisig_p2wsh( num_sigs: u16, - pubkeys: Vec, + pubkeys: Vec, ) -> Option { let signer_addr = StacksAddress::from_public_keys( 0, &AddressHashMode::SerializeP2WSH, - num_sigs as usize, + usize::from(num_sigs), &pubkeys, )?; @@ -768,6 +1059,17 @@ impl TransactionSpendingCondition { } num_sigs } + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => { + let mut num_sigs: u16 = 0; + for field in data.fields.iter() { + if field.is_signature() { + num_sigs = num_sigs + .checked_add(1) + .expect("Unreasonable amount of signatures"); // something is seriously wrong if this fails + } + } + num_sigs + } } } @@ -777,6 +1079,9 @@ impl TransactionSpendingCondition { TransactionSpendingCondition::Multisig(ref multisig_data) => { multisig_data.signatures_required } + TransactionSpendingCondition::OrderIndependentMultisig(ref multisig_data) => { + multisig_data.signatures_required + } } } @@ -784,6 +1089,7 @@ impl TransactionSpendingCondition { match *self { TransactionSpendingCondition::Singlesig(ref data) => data.nonce, TransactionSpendingCondition::Multisig(ref data) => data.nonce, + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => data.nonce, } } @@ -791,6 +1097,7 @@ impl TransactionSpendingCondition { match *self { TransactionSpendingCondition::Singlesig(ref data) => data.tx_fee, TransactionSpendingCondition::Multisig(ref data) => data.tx_fee, + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => data.tx_fee, } } @@ -802,6 +1109,9 @@ impl TransactionSpendingCondition { TransactionSpendingCondition::Multisig(ref mut multisig_data) => { multisig_data.nonce = n; } + TransactionSpendingCondition::OrderIndependentMultisig(ref mut multisig_data) => { + multisig_data.nonce = n; + } } } @@ -813,6 +1123,9 @@ impl TransactionSpendingCondition { TransactionSpendingCondition::Multisig(ref mut multisig_data) => { multisig_data.tx_fee = tx_fee; } + TransactionSpendingCondition::OrderIndependentMultisig(ref mut multisig_data) => { + multisig_data.tx_fee = tx_fee; + } } } @@ -820,6 +1133,9 @@ impl TransactionSpendingCondition { match *self { TransactionSpendingCondition::Singlesig(ref singlesig_data) => singlesig_data.tx_fee, TransactionSpendingCondition::Multisig(ref multisig_data) => multisig_data.tx_fee, + TransactionSpendingCondition::OrderIndependentMultisig(ref multisig_data) => { + multisig_data.tx_fee + } } } @@ -828,6 +1144,9 @@ impl TransactionSpendingCondition { match *self { TransactionSpendingCondition::Singlesig(ref data) => data.address_mainnet(), TransactionSpendingCondition::Multisig(ref data) => data.address_mainnet(), + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => { + data.address_mainnet() + } } } @@ -836,6 +1155,18 @@ impl TransactionSpendingCondition { match *self { TransactionSpendingCondition::Singlesig(ref data) => data.address_testnet(), TransactionSpendingCondition::Multisig(ref data) => data.address_testnet(), + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => { + data.address_testnet() + } + } + } + + /// Get the address for an account, given the network flag + pub fn get_address(&self, mainnet: bool) -> StacksAddress { + if mainnet { + self.address_mainnet() + } else { + self.address_testnet() } } @@ -852,6 +1183,11 @@ impl TransactionSpendingCondition { multisig_data.nonce = 0; multisig_data.fields.clear(); } + TransactionSpendingCondition::OrderIndependentMultisig(ref mut multisig_data) => { + multisig_data.tx_fee = 0; + multisig_data.nonce = 0; + multisig_data.fields.clear(); + } } } @@ -882,7 +1218,7 @@ impl TransactionSpendingCondition { pub fn make_sighash_postsign( cur_sighash: &Txid, - pubkey: &Secp256k1PublicKey, + pubkey: &StacksPublicKey, sig: &MessageSignature, ) -> Txid { // new hash combines the previous hash and all the new data this signature will add. This @@ -949,7 +1285,7 @@ impl TransactionSpendingCondition { nonce: u64, key_encoding: &TransactionPublicKeyEncoding, sig: &MessageSignature, - ) -> Result<(Secp256k1PublicKey, Txid), CodecError> { + ) -> Result<(StacksPublicKey, Txid), CodecError> { let sighash_presign = TransactionSpendingCondition::make_sighash_presign( cur_sighash, cond_code, @@ -958,7 +1294,7 @@ impl TransactionSpendingCondition { ); // verify the current signature - let mut pubk = Secp256k1PublicKey::recover_to_pubkey(sighash_presign.as_bytes(), sig) + let mut pubk = StacksPublicKey::recover_to_pubkey(sighash_presign.as_bytes(), sig) .map_err(|ve| CodecError::SigningError(ve.to_string()))?; match key_encoding { @@ -985,6 +1321,9 @@ impl TransactionSpendingCondition { TransactionSpendingCondition::Multisig(ref data) => { data.verify(initial_sighash, cond_code) } + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => { + data.verify(initial_sighash, cond_code) + } } } } @@ -998,29 +1337,49 @@ pub enum TransactionAuth { impl TransactionAuth { pub fn from_p2pkh(privk: &Secp256k1PrivateKey) -> Option { - TransactionSpendingCondition::new_singlesig_p2pkh(Secp256k1PublicKey::from_private(privk)) + TransactionSpendingCondition::new_singlesig_p2pkh(StacksPublicKey::from_private(privk)) .map(TransactionAuth::Standard) } pub fn from_p2sh(privks: &[Secp256k1PrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { - pubks.push(Secp256k1PublicKey::from_private(privk)); + pubks.push(StacksPublicKey::from_private(privk)); } TransactionSpendingCondition::new_multisig_p2sh(num_sigs, pubks) .map(TransactionAuth::Standard) } + pub fn from_order_independent_p2sh( + privks: &[Secp256k1PrivateKey], + num_sigs: u16, + ) -> Option { + let pubks = privks.iter().map(StacksPublicKey::from_private).collect(); + + TransactionSpendingCondition::new_multisig_order_independent_p2sh(num_sigs, pubks) + .map(TransactionAuth::Standard) + } + + pub fn from_order_independent_p2wsh( + privks: &[Secp256k1PrivateKey], + num_sigs: u16, + ) -> Option { + let pubks = privks.iter().map(StacksPublicKey::from_private).collect(); + + TransactionSpendingCondition::new_multisig_order_independent_p2wsh(num_sigs, pubks) + .map(TransactionAuth::Standard) + } + pub fn from_p2wpkh(privk: &Secp256k1PrivateKey) -> Option { - TransactionSpendingCondition::new_singlesig_p2wpkh(Secp256k1PublicKey::from_private(privk)) + TransactionSpendingCondition::new_singlesig_p2wpkh(StacksPublicKey::from_private(privk)) .map(TransactionAuth::Standard) } pub fn from_p2wsh(privks: &[Secp256k1PrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { - pubks.push(Secp256k1PublicKey::from_private(privk)); + pubks.push(StacksPublicKey::from_private(privk)); } TransactionSpendingCondition::new_multisig_p2wsh(num_sigs, pubks) @@ -1169,6 +1528,34 @@ impl TransactionAuth { } } } + + /// Checks if this TransactionAuth is supported in the passed epoch + /// OrderIndependent multisig is not supported before epoch 3.0 + pub fn is_supported_in_epoch(&self, epoch_id: StacksEpochId) -> bool { + match &self { + TransactionAuth::Sponsored(ref origin, ref sponsor) => { + let origin_supported = match origin { + TransactionSpendingCondition::OrderIndependentMultisig(..) => { + epoch_id >= StacksEpochId::Epoch30 + } + _ => true, + }; + let sponsor_supported = match sponsor { + TransactionSpendingCondition::OrderIndependentMultisig(..) => { + epoch_id >= StacksEpochId::Epoch30 + } + _ => true, + }; + origin_supported && sponsor_supported + } + TransactionAuth::Standard(ref origin) => match origin { + TransactionSpendingCondition::OrderIndependentMultisig(..) => { + epoch_id >= StacksEpochId::Epoch30 + } + _ => true, + }, + } + } } /// A transaction that calls into a smart contract @@ -1312,45 +1699,16 @@ pub struct TransactionSmartContract { pub code_body: StacksString, } -/// Schnorr threshold signature using types from `wsts` -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ThresholdSignature(pub wsts::common::Signature); - -impl StacksMessageCodec for ThresholdSignature { - fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { - let compressed = self.0.R.compress(); - let bytes = compressed.as_bytes(); - fd.write_all(bytes).map_err(CodecError::WriteError)?; - write_next(fd, &self.0.z.to_bytes())?; - Ok(()) - } - - fn consensus_deserialize(fd: &mut R) -> Result { - // Read curve point - let mut buf = [0u8; 33]; - fd.read_exact(&mut buf).map_err(CodecError::ReadError)?; - let r = Secp256k1Point::try_from(&Secp256k1Compressed::from(buf)) - .map_err(|_| CodecError::DeserializeError("Failed to read curve point".into()))?; - - // Read scalar - let mut buf = [0u8; 32]; - fd.read_exact(&mut buf).map_err(CodecError::ReadError)?; - let z = Secp256k1Scalar::from(buf); - - Ok(Self(Secp256k1Signature { R: r, z })) - } -} - -/// Cause of change in mining tenure -/// Depending on cause, tenure can be ended or extended -#[repr(u8)] -#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] -pub enum TenureChangeCause { - /// A valid winning block-commit - BlockFound = 0, - /// The next burnchain block is taking too long, so extend the runtime budget - Extended = 1, -} +/// Cause of change in mining tenure +/// Depending on cause, tenure can be ended or extended +#[repr(u8)] +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub enum TenureChangeCause { + /// A valid winning block-commit + BlockFound = 0, + /// The next burnchain block is taking too long, so extend the runtime budget + Extended = 1, +} impl TryFrom for TenureChangeCause { type Error = (); @@ -1402,6 +1760,25 @@ pub struct TenureChangePayload { pub pubkey_hash: Hash160, } +impl TenureChangePayload { + pub fn extend( + &self, + burn_view_consensus_hash: ConsensusHash, + last_tenure_block_id: StacksBlockId, + num_blocks_so_far: u32, + ) -> Self { + TenureChangePayload { + tenure_consensus_hash: self.tenure_consensus_hash, + prev_tenure_consensus_hash: self.tenure_consensus_hash, + burn_view_consensus_hash, + previous_tenure_end: last_tenure_block_id, + previous_tenure_blocks: num_blocks_so_far, + cause: TenureChangeCause::Extended, + pubkey_hash: self.pubkey_hash, + } + } +} + impl StacksMessageCodec for TenureChangePayload { fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { write_next(fd, &self.tenure_consensus_hash)?; @@ -1457,17 +1834,30 @@ impl TransactionPayload { match self { TransactionPayload::TokenTransfer(..) => "TokenTransfer", TransactionPayload::ContractCall(..) => "ContractCall", - TransactionPayload::SmartContract(..) => "SmartContract", + TransactionPayload::SmartContract(_, version_opt) => { + if version_opt.is_some() { + "SmartContract(Versioned)" + } else { + "SmartContract" + } + } TransactionPayload::PoisonMicroblock(..) => "PoisonMicroblock", - TransactionPayload::Coinbase(..) => "Coinbase", - TransactionPayload::TenureChange(..) => "TenureChange", + TransactionPayload::Coinbase(_, _, vrf_opt) => { + if vrf_opt.is_some() { + "Coinbase(Nakamoto)" + } else { + "Coinbase" + } + } + TransactionPayload::TenureChange(payload) => match payload.cause { + TenureChangeCause::BlockFound => "TenureChange(BlockFound)", + TenureChangeCause::Extended => "TenureChange(Extension)", + }, } } } -#[repr(u8)] -#[derive(Debug, Clone, PartialEq, Copy, Serialize, Deserialize)] -pub enum TransactionPayloadID { +define_u8_enum!(TransactionPayloadID { TokenTransfer = 0, SmartContract = 1, ContractCall = 2, @@ -1478,8 +1868,8 @@ pub enum TransactionPayloadID { VersionedSmartContract = 6, TenureChange = 7, // has a VRF proof, and may have an alt principal - NakamotoCoinbase = 8, -} + NakamotoCoinbase = 8 +}); /// Encoding of an asset type identifier #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] @@ -1665,7 +2055,11 @@ impl StacksTransaction { auth: TransactionAuth, payload: TransactionPayload, ) -> StacksTransaction { - let anchor_mode = TransactionAnchorMode::Any; + let anchor_mode = match payload { + TransactionPayload::Coinbase(..) => TransactionAnchorMode::OnChainOnly, + TransactionPayload::PoisonMicroblock(_, _) => TransactionAnchorMode::OnChainOnly, + _ => TransactionAnchorMode::Any, + }; StacksTransaction { version, @@ -1776,6 +2170,10 @@ impl StacksTransaction { privk, )?; match condition { + TransactionSpendingCondition::Singlesig(ref mut cond) => { + cond.set_signature(next_sig); + Ok(next_sighash) + } TransactionSpendingCondition::Multisig(ref mut cond) => { cond.push_signature( if privk.compress_public() { @@ -1787,9 +2185,16 @@ impl StacksTransaction { ); Ok(next_sighash) } - TransactionSpendingCondition::Singlesig(ref mut cond) => { - cond.set_signature(next_sig); - Ok(next_sighash) + TransactionSpendingCondition::OrderIndependentMultisig(ref mut cond) => { + cond.push_signature( + if privk.compress_public() { + TransactionPublicKeyEncoding::Compressed + } else { + TransactionPublicKeyEncoding::Uncompressed + }, + next_sig, + ); + Ok(*cur_sighash) } } } @@ -1800,6 +2205,9 @@ impl StacksTransaction { ) -> Option { match condition { TransactionSpendingCondition::Multisig(ref mut cond) => cond.pop_auth_field(), + TransactionSpendingCondition::OrderIndependentMultisig(ref mut cond) => { + cond.pop_auth_field() + } TransactionSpendingCondition::Singlesig(ref mut cond) => cond.pop_signature(), } } @@ -1807,12 +2215,15 @@ impl StacksTransaction { /// Append a public key to a multisig condition fn append_pubkey( condition: &mut TransactionSpendingCondition, - pubkey: &Secp256k1PublicKey, + pubkey: &StacksPublicKey, ) -> Result<(), CodecError> { match condition { TransactionSpendingCondition::Multisig(ref mut cond) => { - #[allow(clippy::clone_on_copy)] - cond.push_public_key(pubkey.clone()); + cond.push_public_key(*pubkey); + Ok(()) + } + TransactionSpendingCondition::OrderIndependentMultisig(ref mut cond) => { + cond.push_public_key(*pubkey); Ok(()) } _ => Err(CodecError::SigningError( @@ -1829,15 +2240,8 @@ impl StacksTransaction { privk: &Secp256k1PrivateKey, ) -> Result { let next_sighash = match self.auth { - TransactionAuth::Standard(ref mut origin_condition) => { - StacksTransaction::sign_and_append( - origin_condition, - cur_sighash, - &TransactionAuthFlags::AuthStandard, - privk, - )? - } - TransactionAuth::Sponsored(ref mut origin_condition, _) => { + TransactionAuth::Standard(ref mut origin_condition) + | TransactionAuth::Sponsored(ref mut origin_condition, _) => { StacksTransaction::sign_and_append( origin_condition, cur_sighash, @@ -1850,7 +2254,7 @@ impl StacksTransaction { } /// Append the next public key to the origin account authorization. - pub fn append_next_origin(&mut self, pubk: &Secp256k1PublicKey) -> Result<(), CodecError> { + pub fn append_next_origin(&mut self, pubk: &StacksPublicKey) -> Result<(), CodecError> { match self.auth { TransactionAuth::Standard(ref mut origin_condition) => { StacksTransaction::append_pubkey(origin_condition, pubk) @@ -1888,7 +2292,7 @@ impl StacksTransaction { } /// Append the next public key to the sponsor account authorization. - pub fn append_next_sponsor(&mut self, pubk: &Secp256k1PublicKey) -> Result<(), CodecError> { + pub fn append_next_sponsor(&mut self, pubk: &StacksPublicKey) -> Result<(), CodecError> { match self.auth { TransactionAuth::Standard(_) => Err(CodecError::SigningError( "Cannot appned a public key to the sponsor of a standard auth condition" @@ -1969,7 +2373,7 @@ impl StacksTransaction { let mut tx_bytes = vec![]; self.consensus_serialize(&mut tx_bytes) .expect("BUG: Failed to serialize a transaction object"); - tx_bytes.len() as u64 + u64::try_from(tx_bytes.len()).expect("tx len exceeds 2^64 bytes") } pub fn consensus_deserialize_with_len( @@ -2012,6 +2416,25 @@ impl StacksTransaction { // if the payload is a proof of a poisoned microblock stream, or is a coinbase, then this _must_ be anchored. // Otherwise, if the offending leader is the next leader, they can just orphan their proof // of malfeasance. + match payload { + TransactionPayload::PoisonMicroblock(_, _) => { + if anchor_mode != TransactionAnchorMode::OnChainOnly { + return Err(CodecError::DeserializeError( + "Failed to parse transaction: invalid anchor mode for PoisonMicroblock" + .to_string(), + )); + } + } + TransactionPayload::Coinbase(..) => { + if anchor_mode != TransactionAnchorMode::OnChainOnly { + return Err(CodecError::DeserializeError( + "Failed to parse transaction: invalid anchor mode for Coinbase".to_string(), + )); + } + } + _ => {} + } + let post_condition_mode = match post_condition_mode_u8 { x if x == TransactionPostConditionMode::Allow as u8 => { TransactionPostConditionMode::Allow @@ -2026,19 +2449,37 @@ impl StacksTransaction { ))); } }; + let tx = StacksTransaction { + version, + chain_id, + auth, + anchor_mode, + post_condition_mode, + post_conditions, + payload, + }; - Ok(( - StacksTransaction { - version, - chain_id, - auth, - anchor_mode, - post_condition_mode, - post_conditions, - payload, - }, - fd.num_read(), - )) + Ok((tx, fd.num_read())) + } + + /// Try to convert to a coinbase payload + pub fn try_as_coinbase( + &self, + ) -> Option<(&CoinbasePayload, Option<&PrincipalData>, Option<&VRFProof>)> { + match &self.payload { + TransactionPayload::Coinbase(payload, recipient_opt, vrf_proof_opt) => { + Some((payload, recipient_opt.as_ref(), vrf_proof_opt.as_ref())) + } + _ => None, + } + } + + /// Try to convert to a tenure change payload + pub fn try_as_tenure_change(&self) -> Option<&TenureChangePayload> { + match &self.payload { + TransactionPayload::TenureChange(tc_payload) => Some(tc_payload), + _ => None, + } } } @@ -2251,7 +2692,7 @@ pub struct StacksMicroblock { pub struct StacksBlockHeader { pub version: u8, pub total_work: StacksWorkScore, // NOTE: this is the work done on the chain tip this block builds on (i.e. take this from the parent) - pub proof: String, + pub proof: VRFProof, pub parent_block: BlockHeaderHash, // NOTE: even though this is also present in the burn chain, we need this here for super-light clients that don't even have burn chain headers pub parent_microblock: BlockHeaderHash, pub parent_microblock_sequence: u16, @@ -2314,21 +2755,203 @@ impl StacksMicroblockHeader { } } +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct NakamotoBlockHeader { + pub version: u8, + /// The total number of StacksBlock and NakamotoBlocks preceding + /// this block in this block's history. + pub chain_length: u64, + /// Total amount of BTC spent producing the sortition that + /// selected this block's miner. + pub burn_spent: u64, + /// The consensus hash of the burnchain block that selected this tenure. The consensus hash + /// uniquely identifies this tenure, including across all Bitcoin forks. + pub consensus_hash: ConsensusHash, + /// The index block hash of the immediate parent of this block. + /// This is the hash of the parent block's hash and consensus hash. + pub parent_block_id: StacksBlockId, + /// The root of a SHA512/256 merkle tree over all this block's + /// contained transactions + pub tx_merkle_root: Sha512Trunc256Sum, + /// The MARF trie root hash after this block has been processed + pub state_index_root: TrieHash, + /// A Unix time timestamp of when this block was mined, according to the miner. + /// For the signers to consider a block valid, this timestamp must be: + /// * Greater than the timestamp of its parent block + /// * At most 15 seconds into the future + pub timestamp: u64, + /// Recoverable ECDSA signature from the tenure's miner. + pub miner_signature: MessageSignature, + /// The set of recoverable ECDSA signatures over + /// the block header from the signer set active during the tenure. + /// (ordered by reward set order) + pub signer_signature: Vec, + /// A bitvec which conveys whether reward addresses should be punished (by burning their PoX rewards) + /// or not in this block. + /// + /// The maximum number of entries in the bitvec is 4000. + pub pox_treatment: BitVec<4000>, +} + +impl StacksMessageCodec for NakamotoBlockHeader { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.version)?; + write_next(fd, &self.chain_length)?; + write_next(fd, &self.burn_spent)?; + write_next(fd, &self.consensus_hash)?; + write_next(fd, &self.parent_block_id)?; + write_next(fd, &self.tx_merkle_root)?; + write_next(fd, &self.state_index_root)?; + write_next(fd, &self.timestamp)?; + write_next(fd, &self.miner_signature)?; + write_next(fd, &self.signer_signature)?; + write_next(fd, &self.pox_treatment)?; + + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + Ok(NakamotoBlockHeader { + version: read_next(fd)?, + chain_length: read_next(fd)?, + burn_spent: read_next(fd)?, + consensus_hash: read_next(fd)?, + parent_block_id: read_next(fd)?, + tx_merkle_root: read_next(fd)?, + state_index_root: read_next(fd)?, + timestamp: read_next(fd)?, + miner_signature: read_next(fd)?, + signer_signature: read_next(fd)?, + pox_treatment: read_next(fd)?, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct NakamotoBlock { + pub header: NakamotoBlockHeader, + pub txs: Vec, +} + +impl StacksMessageCodec for NakamotoBlock { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.header)?; + write_next(fd, &self.txs) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let (header, txs) = { + let mut bound_read = BoundReader::from_reader(fd, u64::from(MAX_MESSAGE_LEN)); + let header: NakamotoBlockHeader = read_next(&mut bound_read)?; + let txs: Vec<_> = read_next(&mut bound_read)?; + (header, txs) + }; + + // // all transactions are unique + // if !StacksBlock::validate_transactions_unique(&txs) { + // warn!("Invalid block: Found duplicate transaction"; + // "consensus_hash" => %header.consensus_hash, + // "stacks_block_hash" => %header.block_hash(), + // "stacks_block_id" => %header.block_id() + // ); + // return Err(CodecError::DeserializeError( + // "Invalid block: found duplicate transaction".to_string(), + // )); + // } + + // // header and transactions must be consistent + // let txid_vecs = txs.iter().map(|tx| tx.txid().as_bytes().to_vec()).collect(); + + // let merkle_tree = MerkleTree::new(&txid_vecs); + // let tx_merkle_root: Sha512Trunc256Sum = merkle_tree.root(); + + // if tx_merkle_root != header.tx_merkle_root { + // warn!("Invalid block: Tx Merkle root mismatch"; + // "consensus_hash" => %header.consensus_hash, + // "stacks_block_hash" => %header.block_hash(), + // "stacks_block_id" => %header.block_id() + // ); + // return Err(CodecError::DeserializeError( + // "Invalid block: tx Merkle root mismatch".to_string(), + // )); + // } + + Ok(NakamotoBlock { header, txs }) + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +/// A vote across the signer set for a block +pub struct NakamotoBlockVote { + pub signer_signature_hash: Sha512Trunc256Sum, + pub rejected: bool, +} + +impl StacksMessageCodec for NakamotoBlockVote { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.signer_signature_hash)?; + if self.rejected { + write_next(fd, &1u8)?; + } + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let signer_signature_hash = read_next(fd)?; + let rejected_byte: Option = read_next(fd).ok(); + let rejected = rejected_byte.is_some(); + Ok(Self { + signer_signature_hash, + rejected, + }) + } +} + // values a miner uses to produce the next block pub const MINER_BLOCK_CONSENSUS_HASH: ConsensusHash = ConsensusHash([1u8; 20]); pub const MINER_BLOCK_HEADER_HASH: BlockHeaderHash = BlockHeaderHash([1u8; 32]); +#[derive(Debug, Clone, PartialEq)] +pub enum StacksBlockHeaderTypes { + Epoch2(StacksBlockHeader), + Nakamoto(NakamotoBlockHeader), +} + +impl From for StacksBlockHeaderTypes { + fn from(value: StacksBlockHeader) -> Self { + Self::Epoch2(value) + } +} + +impl From for StacksBlockHeaderTypes { + fn from(value: NakamotoBlockHeader) -> Self { + Self::Nakamoto(value) + } +} + #[derive(Debug, Clone, PartialEq)] pub struct StacksHeaderInfo { - pub anchored_header: StacksBlockHeader, + /// Stacks block header + pub anchored_header: StacksBlockHeaderTypes, + /// Last microblock header (Stacks 2.x only; this is None in Stacks 3.x) pub microblock_tail: Option, + /// Height of this Stacks block pub stacks_block_height: u64, + /// MARF root hash of the headers DB (not consensus critical) pub index_root: TrieHash, + /// consensus hash of the burnchain block in which this miner was selected to produce this block pub consensus_hash: ConsensusHash, + /// Hash of the burnchain block in which this miner was selected to produce this block pub burn_header_hash: BurnchainHeaderHash, + /// Height of the burnchain block pub burn_header_height: u32, + /// Timestamp of the burnchain block pub burn_header_timestamp: u64, + /// Size of the block corresponding to `anchored_header` in bytes pub anchored_block_size: u64, + /// The burnchain tip that is passed to Clarity while processing this block. + /// This should always be `Some()` for Nakamoto blocks and `None` for 2.x blocks + pub burn_view: Option, } /// A record of a coin reward for a miner. There will be at most two of these for a miner: one for @@ -2507,18 +3130,18 @@ fn clarity_version_consensus_deserialize( impl StacksMessageCodec for TransactionPayload { fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { - match *self { - TransactionPayload::TokenTransfer(ref address, ref amount, ref memo) => { + match self { + TransactionPayload::TokenTransfer(address, amount, memo) => { write_next(fd, &(TransactionPayloadID::TokenTransfer as u8))?; write_next(fd, address)?; write_next(fd, amount)?; write_next(fd, memo)?; } - TransactionPayload::ContractCall(ref cc) => { + TransactionPayload::ContractCall(cc) => { write_next(fd, &(TransactionPayloadID::ContractCall as u8))?; cc.consensus_serialize(fd)?; } - TransactionPayload::SmartContract(ref sc, ref version_opt) => { + TransactionPayload::SmartContract(sc, version_opt) => { if let Some(version) = version_opt { // caller requests a specific Clarity version write_next(fd, &(TransactionPayloadID::VersionedSmartContract as u8))?; @@ -2530,45 +3153,105 @@ impl StacksMessageCodec for TransactionPayload { sc.consensus_serialize(fd)?; } } - _ => { - unreachable!() + TransactionPayload::PoisonMicroblock(h1, h2) => { + write_next(fd, &(TransactionPayloadID::PoisonMicroblock as u8))?; + h1.consensus_serialize(fd)?; + h2.consensus_serialize(fd)?; + } + TransactionPayload::Coinbase(buf, recipient_opt, vrf_opt) => { + match (recipient_opt, vrf_opt) { + (None, None) => { + // stacks 2.05 and earlier only use this path + write_next(fd, &(TransactionPayloadID::Coinbase as u8))?; + write_next(fd, buf)?; + } + (Some(recipient), None) => { + write_next(fd, &(TransactionPayloadID::CoinbaseToAltRecipient as u8))?; + write_next(fd, buf)?; + write_next(fd, &Value::Principal(recipient.clone()))?; + } + (None, Some(vrf_proof)) => { + // nakamoto coinbase + // encode principal as (optional principal) + write_next(fd, &(TransactionPayloadID::NakamotoCoinbase as u8))?; + write_next(fd, buf)?; + write_next(fd, &Value::none())?; + write_next(fd, vrf_proof)?; + } + (Some(recipient), Some(vrf_proof)) => { + write_next(fd, &(TransactionPayloadID::NakamotoCoinbase as u8))?; + write_next(fd, buf)?; + write_next( + fd, + &Value::some(Value::Principal(recipient.clone())).expect( + "FATAL: failed to encode recipient principal as `optional`", + ), + )?; + write_next(fd, vrf_proof)?; + } + } + } + TransactionPayload::TenureChange(tc) => { + write_next(fd, &(TransactionPayloadID::TenureChange as u8))?; + tc.consensus_serialize(fd)?; } } Ok(()) } fn consensus_deserialize(fd: &mut R) -> Result { - let type_id: u8 = read_next(fd)?; + let type_id_u8 = read_next(fd)?; + let type_id = TransactionPayloadID::from_u8(type_id_u8).ok_or_else(|| { + CodecError::DeserializeError(format!( + "Failed to parse transaction -- unknown payload ID {type_id_u8}" + )) + })?; let payload = match type_id { - x if x == TransactionPayloadID::TokenTransfer as u8 => { + TransactionPayloadID::TokenTransfer => { let principal = read_next(fd)?; let amount = read_next(fd)?; let memo = read_next(fd)?; TransactionPayload::TokenTransfer(principal, amount, memo) } - x if x == TransactionPayloadID::ContractCall as u8 => { + TransactionPayloadID::ContractCall => { let payload: TransactionContractCall = read_next(fd)?; TransactionPayload::ContractCall(payload) } - x if x == TransactionPayloadID::SmartContract as u8 => { + TransactionPayloadID::SmartContract => { let payload: TransactionSmartContract = read_next(fd)?; TransactionPayload::SmartContract(payload, None) } - x if x == TransactionPayloadID::VersionedSmartContract as u8 => { + TransactionPayloadID::VersionedSmartContract => { let version = clarity_version_consensus_deserialize(fd)?; let payload: TransactionSmartContract = read_next(fd)?; TransactionPayload::SmartContract(payload, Some(version)) } - x if x == TransactionPayloadID::PoisonMicroblock as u8 => { - let micrblock1: StacksMicroblockHeader = read_next(fd)?; - let micrblock2: StacksMicroblockHeader = read_next(fd)?; - TransactionPayload::PoisonMicroblock(micrblock1, micrblock2) + TransactionPayloadID::PoisonMicroblock => { + let h1: StacksMicroblockHeader = read_next(fd)?; + let h2: StacksMicroblockHeader = read_next(fd)?; + + // must differ in some field + if h1 == h2 { + return Err(CodecError::DeserializeError( + "Failed to parse transaction -- microblock headers match".to_string(), + )); + } + + // must have the same sequence number or same block parent + if h1.sequence != h2.sequence && h1.prev_block != h2.prev_block { + return Err(CodecError::DeserializeError( + "Failed to parse transaction -- microblock headers do not identify a fork" + .to_string(), + )); + } + + TransactionPayload::PoisonMicroblock(h1, h2) } - x if x == TransactionPayloadID::Coinbase as u8 => { + TransactionPayloadID::Coinbase => { let payload: CoinbasePayload = read_next(fd)?; TransactionPayload::Coinbase(payload, None, None) } - x if x == TransactionPayloadID::CoinbaseToAltRecipient as u8 => { + TransactionPayloadID::CoinbaseToAltRecipient => { let payload: CoinbasePayload = read_next(fd)?; let principal_value: Value = read_next(fd)?; let recipient = match principal_value { @@ -2580,7 +3263,8 @@ impl StacksMessageCodec for TransactionPayload { TransactionPayload::Coinbase(payload, Some(recipient), None) } - x if x == TransactionPayloadID::NakamotoCoinbase as u8 => { + // TODO: gate this! + TransactionPayloadID::NakamotoCoinbase => { let payload: CoinbasePayload = read_next(fd)?; let principal_value_opt: Value = read_next(fd)?; let recipient_opt = if let Value::Optional(optional_data) = principal_value_opt { @@ -2599,16 +3283,10 @@ impl StacksMessageCodec for TransactionPayload { let vrf_proof: VRFProof = read_next(fd)?; TransactionPayload::Coinbase(payload, recipient_opt, Some(vrf_proof)) } - x if x == TransactionPayloadID::TenureChange as u8 => { + TransactionPayloadID::TenureChange => { let payload: TenureChangePayload = read_next(fd)?; TransactionPayload::TenureChange(payload) } - _ => { - return Err(CodecError::DeserializeError(format!( - "Failed to parse transaction -- unknown payload ID {}", - type_id - ))); - } }; Ok(payload) @@ -2821,6 +3499,9 @@ impl StacksMessageCodec for TransactionSpendingCondition { TransactionSpendingCondition::Multisig(ref data) => { data.consensus_serialize(fd)?; } + TransactionSpendingCondition::OrderIndependentMultisig(ref data) => { + data.consensus_serialize(fd)?; + } } Ok(()) } @@ -2839,6 +3520,10 @@ impl StacksMessageCodec for TransactionSpendingCondition { } else if MultisigHashMode::from_u8(hash_mode_u8).is_some() { let cond = MultisigSpendingCondition::consensus_deserialize(&mut rrd)?; TransactionSpendingCondition::Multisig(cond) + } else if OrderIndependentMultisigHashMode::from_u8(hash_mode_u8).is_some() { + let cond = + OrderIndependentMultisigSpendingCondition::consensus_deserialize(&mut rrd)?; + TransactionSpendingCondition::OrderIndependentMultisig(cond) } else { return Err(CodecError::DeserializeError(format!( "Failed to parse spending condition: invalid hash mode {}", @@ -2987,6 +3672,88 @@ impl StacksMessageCodec for MultisigSpendingCondition { } } +impl StacksMessageCodec for OrderIndependentMultisigSpendingCondition { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &(self.hash_mode.clone() as u8))?; + write_next(fd, &self.signer)?; + write_next(fd, &self.nonce)?; + write_next(fd, &self.tx_fee)?; + write_next(fd, &self.fields)?; + write_next(fd, &self.signatures_required)?; + Ok(()) + } + + fn consensus_deserialize( + fd: &mut R, + ) -> Result { + let hash_mode_u8: u8 = read_next(fd)?; + let hash_mode = OrderIndependentMultisigHashMode::from_u8(hash_mode_u8).ok_or( + CodecError::DeserializeError(format!( + "Failed to parse multisig spending condition: unknown hash mode {}", + hash_mode_u8 + )), + )?; + + let signer: Hash160 = read_next(fd)?; + let nonce: u64 = read_next(fd)?; + let tx_fee: u64 = read_next(fd)?; + let fields: Vec = { + let mut bound_read = BoundReader::from_reader(fd, MAX_MESSAGE_LEN as u64); + read_next(&mut bound_read) + }?; + + let signatures_required: u16 = read_next(fd)?; + + // read and decode _exactly_ num_signatures signature buffers + let mut num_sigs_given: u16 = 0; + let mut have_uncompressed = false; + for f in fields.iter() { + match *f { + TransactionAuthField::Signature(ref key_encoding, _) => { + num_sigs_given = + num_sigs_given + .checked_add(1) + .ok_or(CodecError::DeserializeError( + "Failed to parse order independent multisig spending condition: too many signatures" + .to_string(), + ))?; + if *key_encoding == TransactionPublicKeyEncoding::Uncompressed { + have_uncompressed = true; + } + } + TransactionAuthField::PublicKey(ref pubk) => { + if !pubk.compressed() { + have_uncompressed = true; + } + } + }; + } + + // must be given the right number of signatures + if num_sigs_given < signatures_required { + let msg = format!( + "Failed to deserialize order independent multisig spending condition: got {num_sigs_given} sigs, expected at least {signatures_required}" + ); + return Err(CodecError::DeserializeError(msg)); + } + + // must all be compressed if we're using P2WSH + if have_uncompressed && hash_mode == OrderIndependentMultisigHashMode::P2WSH { + let msg = "Failed to deserialize order independent multisig spending condition: expected compressed keys only".to_string(); + return Err(CodecError::DeserializeError(msg)); + } + + Ok(OrderIndependentMultisigSpendingCondition { + signer, + nonce, + tx_fee, + hash_mode, + fields, + signatures_required, + }) + } +} + /// A container for public keys (compressed secp256k1 public keys) pub struct StacksPublicKeyBuffer(pub [u8; 33]); impl_array_newtype!(StacksPublicKeyBuffer, u8, 33); @@ -3090,3 +3857,509 @@ impl StacksMessageCodec for StacksTransaction { StacksTransaction::consensus_deserialize_with_len(fd).map(|(result, _)| result) } } + +define_u8_enum!( +/// Enum representing the SignerMessage type prefix +SignerMessageTypePrefix { + /// Block Proposal message from miners + BlockProposal = 0, + /// Block Response message from signers + BlockResponse = 1, + /// Block Pushed message from miners + BlockPushed = 2, + /// Mock block proposal message from Epoch 2.5 miners + MockProposal = 3, + /// Mock block signature message from Epoch 2.5 signers + MockSignature = 4, + /// Mock block message from Epoch 2.5 miners + MockBlock = 5 +}); + +impl TryFrom for SignerMessageTypePrefix { + type Error = CodecError; + fn try_from(value: u8) -> Result { + Self::from_u8(value).ok_or_else(|| { + CodecError::DeserializeError(format!("Unknown signer message type prefix: {value}")) + }) + } +} + +impl From<&SignerMessage> for SignerMessageTypePrefix { + fn from(message: &SignerMessage) -> Self { + match message { + SignerMessage::BlockProposal(_) => SignerMessageTypePrefix::BlockProposal, + SignerMessage::BlockResponse(_) => SignerMessageTypePrefix::BlockResponse, + SignerMessage::BlockPushed(_) => SignerMessageTypePrefix::BlockPushed, + SignerMessage::MockProposal(_) => SignerMessageTypePrefix::MockProposal, + SignerMessage::MockSignature(_) => SignerMessageTypePrefix::MockSignature, + SignerMessage::MockBlock(_) => SignerMessageTypePrefix::MockBlock, + } + } +} + +define_u8_enum!( +/// Enum representing the BlockResponse type prefix +BlockResponseTypePrefix { + /// An accepted block response + Accepted = 0, + /// A rejected block response + Rejected = 1 +}); + +impl TryFrom for BlockResponseTypePrefix { + type Error = CodecError; + fn try_from(value: u8) -> Result { + Self::from_u8(value).ok_or_else(|| { + CodecError::DeserializeError(format!("Unknown block response type prefix: {value}")) + }) + } +} + +impl From<&BlockResponse> for BlockResponseTypePrefix { + fn from(block_response: &BlockResponse) -> Self { + match block_response { + BlockResponse::Accepted(_) => BlockResponseTypePrefix::Accepted, + BlockResponse::Rejected(_) => BlockResponseTypePrefix::Rejected, + } + } +} + +// This enum is used to supply a `reason_code` for validation +// rejection responses. This is serialized as an enum with string +// type (in jsonschema terminology). +define_u8_enum![ValidateRejectCode { + BadBlockHash = 0, + BadTransaction = 1, + InvalidBlock = 2, + ChainstateError = 3, + UnknownParent = 4, + NonCanonicalTenure = 5, + NoSuchTenure = 6 +}]; + +impl TryFrom for ValidateRejectCode { + type Error = CodecError; + fn try_from(value: u8) -> Result { + Self::from_u8(value) + .ok_or_else(|| CodecError::DeserializeError(format!("Unknown type prefix: {value}"))) + } +} + +define_u8_enum!( +/// Enum representing the reject code type prefix +RejectCodeTypePrefix { + /// The block was rejected due to validation issues + ValidationFailed = 0, + /// The block was rejected due to connectivity issues with the signer + ConnectivityIssues = 1, + /// The block was rejected in a prior round + RejectedInPriorRound = 2, + /// The block was rejected due to no sortition view + NoSortitionView = 3, + /// The block was rejected due to a mismatch with expected sortition view + SortitionViewMismatch = 4, + /// The block was rejected due to a testing directive + TestingDirective = 5 +}); + +impl TryFrom for RejectCodeTypePrefix { + type Error = CodecError; + fn try_from(value: u8) -> Result { + Self::from_u8(value).ok_or_else(|| { + CodecError::DeserializeError(format!("Unknown reject code type prefix: {value}")) + }) + } +} + +impl From<&RejectCode> for RejectCodeTypePrefix { + fn from(reject_code: &RejectCode) -> Self { + match reject_code { + RejectCode::ValidationFailed(_) => RejectCodeTypePrefix::ValidationFailed, + RejectCode::ConnectivityIssues => RejectCodeTypePrefix::ConnectivityIssues, + RejectCode::RejectedInPriorRound => RejectCodeTypePrefix::RejectedInPriorRound, + RejectCode::NoSortitionView => RejectCodeTypePrefix::NoSortitionView, + RejectCode::SortitionViewMismatch => RejectCodeTypePrefix::SortitionViewMismatch, + RejectCode::TestingDirective => RejectCodeTypePrefix::TestingDirective, + } + } +} + +/// This enum is used to supply a `reason_code` for block rejections +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum RejectCode { + /// RPC endpoint Validation failed + ValidationFailed(ValidateRejectCode), + /// No Sortition View to verify against + NoSortitionView, + /// The block was rejected due to connectivity issues with the signer + ConnectivityIssues, + /// The block was rejected in a prior round + RejectedInPriorRound, + /// The block was rejected due to a mismatch with expected sortition view + SortitionViewMismatch, + /// The block was rejected due to a testing directive + TestingDirective, +} + +impl StacksMessageCodec for RejectCode { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &(RejectCodeTypePrefix::from(self) as u8))?; + // Do not do a single match here as we may add other variants in the future and don't want to miss adding it + match self { + RejectCode::ValidationFailed(code) => write_next(fd, &(*code as u8))?, + RejectCode::ConnectivityIssues + | RejectCode::RejectedInPriorRound + | RejectCode::NoSortitionView + | RejectCode::SortitionViewMismatch + | RejectCode::TestingDirective => { + // No additional data to serialize / deserialize + } + }; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let type_prefix_byte = read_next::(fd)?; + let type_prefix = RejectCodeTypePrefix::try_from(type_prefix_byte)?; + let code = match type_prefix { + RejectCodeTypePrefix::ValidationFailed => RejectCode::ValidationFailed( + ValidateRejectCode::try_from(read_next::(fd)?).map_err(|e| { + CodecError::DeserializeError(format!( + "Failed to decode validation reject code: {:?}", + &e + )) + })?, + ), + RejectCodeTypePrefix::ConnectivityIssues => RejectCode::ConnectivityIssues, + RejectCodeTypePrefix::RejectedInPriorRound => RejectCode::RejectedInPriorRound, + RejectCodeTypePrefix::NoSortitionView => RejectCode::NoSortitionView, + RejectCodeTypePrefix::SortitionViewMismatch => RejectCode::SortitionViewMismatch, + RejectCodeTypePrefix::TestingDirective => RejectCode::TestingDirective, + }; + Ok(code) + } +} + +/// A rejection response from a signer for a proposed block +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct BlockRejection { + /// The reason for the rejection + pub reason: String, + /// The reason code for the rejection + pub reason_code: RejectCode, + /// The signer signature hash of the block that was rejected + pub signer_signature_hash: Sha512Trunc256Sum, + /// The signer's signature across the rejection + pub signature: MessageSignature, + /// The chain id + pub chain_id: u32, +} + +impl StacksMessageCodec for BlockRejection { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.reason.as_bytes().to_vec())?; + write_next(fd, &self.reason_code)?; + write_next(fd, &self.signer_signature_hash)?; + write_next(fd, &self.chain_id)?; + write_next(fd, &self.signature)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let reason_bytes = read_next::, _>(fd)?; + let reason = String::from_utf8(reason_bytes).map_err(|e| { + CodecError::DeserializeError(format!("Failed to decode reason string: {:?}", &e)) + })?; + let reason_code = read_next::(fd)?; + let signer_signature_hash = read_next::(fd)?; + let chain_id = read_next::(fd)?; + let signature = read_next::(fd)?; + Ok(Self { + reason, + reason_code, + signer_signature_hash, + chain_id, + signature, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +/// BlockProposal sent to signers +pub struct BlockProposal { + /// The block itself + pub block: NakamotoBlock, + /// The burn height the block is mined during + pub burn_height: u64, + /// The reward cycle the block is mined during + pub reward_cycle: u64, +} + +impl StacksMessageCodec for BlockProposal { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + self.block.consensus_serialize(fd)?; + self.burn_height.consensus_serialize(fd)?; + self.reward_cycle.consensus_serialize(fd)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let block = NakamotoBlock::consensus_deserialize(fd)?; + let burn_height = u64::consensus_deserialize(fd)?; + let reward_cycle = u64::consensus_deserialize(fd)?; + Ok(BlockProposal { + block, + burn_height, + reward_cycle, + }) + } +} + +/// The response that a signer sends back to observing miners +/// either accepting or rejecting a Nakamoto block with the corresponding reason +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub enum BlockResponse { + /// The Nakamoto block was accepted and therefore signed + Accepted((Sha512Trunc256Sum, MessageSignature)), + /// The Nakamoto block was rejected and therefore not signed + Rejected(BlockRejection), +} + +impl StacksMessageCodec for BlockResponse { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &(BlockResponseTypePrefix::from(self) as u8))?; + match self { + BlockResponse::Accepted((hash, sig)) => { + write_next(fd, hash)?; + write_next(fd, sig)?; + } + BlockResponse::Rejected(rejection) => { + write_next(fd, rejection)?; + } + }; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let type_prefix_byte = read_next::(fd)?; + let type_prefix = BlockResponseTypePrefix::try_from(type_prefix_byte)?; + let response = match type_prefix { + BlockResponseTypePrefix::Accepted => { + let hash = read_next::(fd)?; + let sig = read_next::(fd)?; + BlockResponse::Accepted((hash, sig)) + } + BlockResponseTypePrefix::Rejected => { + let rejection = read_next::(fd)?; + BlockResponse::Rejected(rejection) + } + }; + Ok(response) + } +} + +/// A mock signature for the stacks node to be used for mock signing. +/// This is only used by Epoch 2.5 signers to simulate the signing of a block for every sortition. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct MockSignature { + /// The signer's signature across the mock proposal + signature: MessageSignature, + /// The mock block proposal that was signed across + pub mock_proposal: MockProposal, +} + +impl StacksMessageCodec for MockSignature { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.signature)?; + self.mock_proposal.consensus_serialize(fd)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let signature = read_next::(fd)?; + let mock_proposal = MockProposal::consensus_deserialize(fd)?; + Ok(Self { + signature, + mock_proposal, + }) + } +} + +/// The signer relevant peer information from the stacks node +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct PeerInfo { + /// The burn block height + pub burn_block_height: u64, + /// The consensus hash of the stacks tip + pub stacks_tip_consensus_hash: ConsensusHash, + /// The stacks tip + pub stacks_tip: BlockHeaderHash, + /// The stacks tip height + pub stacks_tip_height: u64, + /// The pox consensus + pub pox_consensus: ConsensusHash, + /// The server version + pub server_version: String, + /// The network id + pub network_id: u32, +} + +impl StacksMessageCodec for PeerInfo { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + write_next(fd, &self.burn_block_height)?; + write_next(fd, self.stacks_tip_consensus_hash.as_bytes())?; + write_next(fd, &self.stacks_tip)?; + write_next(fd, &self.stacks_tip_height)?; + write_next(fd, &(self.server_version.as_bytes().len() as u8))?; + fd.write_all(self.server_version.as_bytes()) + .map_err(CodecError::WriteError)?; + write_next(fd, &self.pox_consensus)?; + write_next(fd, &self.network_id)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let burn_block_height = read_next::(fd)?; + let stacks_tip_consensus_hash = read_next::(fd)?; + let stacks_tip = read_next::(fd)?; + let stacks_tip_height = read_next::(fd)?; + let len_byte: u8 = read_next(fd)?; + let mut bytes = vec![0u8; len_byte as usize]; + fd.read_exact(&mut bytes).map_err(CodecError::ReadError)?; + // must encode a valid string + let server_version = String::from_utf8(bytes).map_err(|_e| { + CodecError::DeserializeError( + "Failed to parse server version name: could not contruct from utf8".to_string(), + ) + })?; + let pox_consensus = read_next::(fd)?; + let network_id = read_next(fd)?; + Ok(Self { + burn_block_height, + stacks_tip_consensus_hash, + stacks_tip, + stacks_tip_height, + server_version, + pox_consensus, + network_id, + }) + } +} + +/// A mock block proposal for Epoch 2.5 mock signing +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct MockProposal { + /// The view of the stacks node peer information at the time of the mock proposal + pub peer_info: PeerInfo, + /// The miner's signature across the peer info + signature: MessageSignature, +} + +impl StacksMessageCodec for MockProposal { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + self.peer_info.consensus_serialize(fd)?; + write_next(fd, &self.signature)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let peer_info = PeerInfo::consensus_deserialize(fd)?; + let signature = read_next::(fd)?; + Ok(Self { + peer_info, + signature, + }) + } +} + +/// The mock block data for epoch 2.5 miners to broadcast to simulate block signing +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct MockBlock { + /// The mock proposal that was signed across + pub mock_proposal: MockProposal, + /// The mock signatures that the miner received + pub mock_signatures: Vec, +} + +impl StacksMessageCodec for MockBlock { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + self.mock_proposal.consensus_serialize(fd)?; + write_next(fd, &self.mock_signatures)?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let mock_proposal = MockProposal::consensus_deserialize(fd)?; + let mock_signatures = read_next::, _>(fd)?; + Ok(Self { + mock_proposal, + mock_signatures, + }) + } +} + +/// The messages being sent through the stacker db contracts +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum SignerMessage { + /// The block proposal from miners for signers to observe and sign + BlockProposal(BlockProposal), + /// The block response from signers for miners to observe + BlockResponse(BlockResponse), + /// A block pushed from miners to the signers set + BlockPushed(NakamotoBlock), + /// A mock signature from the epoch 2.5 signers + MockSignature(MockSignature), + /// A mock message from the epoch 2.5 miners + MockProposal(MockProposal), + /// A mock block from the epoch 2.5 miners + MockBlock(MockBlock), +} + +impl StacksMessageCodec for SignerMessage { + fn consensus_serialize(&self, fd: &mut W) -> Result<(), CodecError> { + SignerMessageTypePrefix::from(self) + .to_u8() + .consensus_serialize(fd)?; + match self { + SignerMessage::BlockProposal(block_proposal) => block_proposal.consensus_serialize(fd), + SignerMessage::BlockResponse(block_response) => block_response.consensus_serialize(fd), + SignerMessage::BlockPushed(block) => block.consensus_serialize(fd), + SignerMessage::MockSignature(signature) => signature.consensus_serialize(fd), + SignerMessage::MockProposal(message) => message.consensus_serialize(fd), + SignerMessage::MockBlock(block) => block.consensus_serialize(fd), + }?; + Ok(()) + } + + fn consensus_deserialize(fd: &mut R) -> Result { + let type_prefix_byte = u8::consensus_deserialize(fd)?; + let type_prefix = SignerMessageTypePrefix::try_from(type_prefix_byte)?; + let message = match type_prefix { + SignerMessageTypePrefix::BlockProposal => { + let block_proposal = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::BlockProposal(block_proposal) + } + SignerMessageTypePrefix::BlockResponse => { + let block_response = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::BlockResponse(block_response) + } + SignerMessageTypePrefix::BlockPushed => { + let block = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::BlockPushed(block) + } + SignerMessageTypePrefix::MockProposal => { + let message = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::MockProposal(message) + } + SignerMessageTypePrefix::MockSignature => { + let signature = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::MockSignature(signature) + } + SignerMessageTypePrefix::MockBlock => { + let block = StacksMessageCodec::consensus_deserialize(fd)?; + SignerMessage::MockBlock(block) + } + }; + Ok(message) + } +}