From 30a9acb4431bbc279cdd31b7a00d85631d941beb Mon Sep 17 00:00:00 2001 From: Keith Date: Tue, 27 Aug 2024 10:56:41 -0400 Subject: [PATCH 001/213] Add cargo audit to CI --- .github/workflows/check-rust.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 797ad4df4..fd1705e38 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -208,6 +208,16 @@ jobs: - name: cargo clippy --workspace --all-targets --all-features -- -D warnings run: cargo clippy --workspace --all-targets --all-features -- -D warnings + + cargo-audit: + name: cargo audit + runs-on: SubtensorCI + steps: + - name: Check-out repositoroy under $GITHUB_WORKSPACE + uses: actions/checkout@v4 + + - name: Audit check + uses: rustsec/audit-check@v1.4.1 # runs cargo test --workspace cargo-test: name: cargo test From 80d2073dc6d3a853b1ce63abb6719e21af3f6c35 Mon Sep 17 00:00:00 2001 From: Keith Date: Tue, 27 Aug 2024 11:16:25 -0400 Subject: [PATCH 002/213] Use GH token for audit check --- .github/workflows/check-rust.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index fd1705e38..0f33d75bb 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -208,7 +208,7 @@ jobs: - name: cargo clippy --workspace --all-targets --all-features -- -D warnings run: cargo clippy --workspace --all-targets --all-features -- -D warnings - + # runs cargo audit cargo-audit: name: cargo audit runs-on: SubtensorCI @@ -218,6 +218,8 @@ jobs: - name: Audit check uses: rustsec/audit-check@v1.4.1 + with: + token: ${{ secrets.GITHUB_TOKEN }} # runs cargo test --workspace cargo-test: name: cargo test From 3404a5bcbae21fdf89942f83f521b8adae3c6220 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 27 Aug 2024 12:35:58 -0400 Subject: [PATCH 003/213] fix benchmark_dissolve_network --- pallets/subtensor/src/benchmarks.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pallets/subtensor/src/benchmarks.rs b/pallets/subtensor/src/benchmarks.rs index 4af039ad6..160474e79 100644 --- a/pallets/subtensor/src/benchmarks.rs +++ b/pallets/subtensor/src/benchmarks.rs @@ -312,7 +312,8 @@ benchmarks! { let amount_to_be_staked = 100_000_000_000_000u64; Subtensor::::add_balance_to_coldkey_account(&coldkey.clone(), amount_to_be_staked); assert_ok!(Subtensor::::register_network(RawOrigin::Signed(coldkey.clone()).into(), None)); - }: dissolve_network(RawOrigin::Signed(coldkey), 1) + let c1 = coldkey.clone(); + }: dissolve_network(RawOrigin::Signed(c1), coldkey, 1) // swap_hotkey { From 26bb8f6bac9e4e02908dc0ecd139a12466437134 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 27 Aug 2024 12:54:41 -0400 Subject: [PATCH 004/213] fix test_user_ad_network_with_identify_fields_ok() --- pallets/subtensor/tests/root.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pallets/subtensor/tests/root.rs b/pallets/subtensor/tests/root.rs index 0c621739b..e7e948ddc 100644 --- a/pallets/subtensor/tests/root.rs +++ b/pallets/subtensor/tests/root.rs @@ -1052,10 +1052,7 @@ fn test_user_add_network_with_identity_fields_ok() { assert_eq!(stored_identity_2.subnet_contact, subnet_contact_2); // Now remove the first network. - assert_ok!(SubtensorModule::user_remove_network( - RuntimeOrigin::signed(coldkey_1), - 1 - )); + assert_ok!(SubtensorModule::user_remove_network(coldkey_1, 1)); // Verify that the first network and identity have been removed. assert!(SubnetIdentities::::get(1).is_none()); From 8176f57f70ffbadd97b0a895cd9c2ecebd5034aa Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 27 Aug 2024 14:31:31 -0400 Subject: [PATCH 005/213] simplify cargo test CI step, always check benchmarks + other feats --- .github/workflows/check-rust.yml | 53 ++------------------------------ 1 file changed, 2 insertions(+), 51 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 797ad4df4..b088744cb 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -254,57 +254,8 @@ jobs: with: key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} - - name: cargo test --workspace - run: cargo test --workspace - - # runs cargo test --workspace --features=runtime-benchmarks - cargo-test-benchmarks: - name: cargo test w/benchmarks - runs-on: SubtensorCI - strategy: - matrix: - rust-branch: - - stable - rust-target: - - x86_64-unknown-linux-gnu - # - x86_64-apple-darwin - os: - - ubuntu-latest - # - macos-latest - include: - - os: ubuntu-latest - # - os: macos-latest - env: - RELEASE_NAME: development - # RUSTFLAGS: -A warnings - RUSTV: ${{ matrix.rust-branch }} - RUST_BACKTRACE: full - RUST_BIN_DIR: target/${{ matrix.rust-target }} - SKIP_WASM_BUILD: 1 - TARGET: ${{ matrix.rust-target }} - steps: - - name: Check-out repository under $GITHUB_WORKSPACE - uses: actions/checkout@v4 - - - name: Install dependencies - run: | - sudo apt-get update && - sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler - - - name: Install Rust ${{ matrix.rust-branch }} - uses: actions-rs/toolchain@v1.0.6 - with: - toolchain: ${{ matrix.rust-branch }} - components: rustfmt, clippy - profile: minimal - - - name: Utilize Rust shared cached - uses: Swatinem/rust-cache@v2.2.1 - with: - key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} - - - name: cargo test --workspace --features=runtime-benchmarks - run: cargo test --workspace --features=runtime-benchmarks + - name: cargo test --workspace --all-features + run: cargo test --workspace --all-features # ensures cargo fix has no trivial changes that can be applied cargo-fix: From 8641afc82fee9abfd3bb9355ea0ee46a962cb449 Mon Sep 17 00:00:00 2001 From: Keith Date: Tue, 27 Aug 2024 17:23:12 -0400 Subject: [PATCH 006/213] Do not use GH action for cargo audit --- .github/workflows/check-rust.yml | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 0f33d75bb..fdbcfb46f 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -216,10 +216,29 @@ jobs: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 - - name: Audit check - uses: rustsec/audit-check@v1.4.1 + - name: Install dependencies + run: | + sudo apt-get update && + sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler + + - name: Install Rust ${{ matrix.rust-branch }} + uses: actions-rs/toolchain@v1.0.6 + with: + toolchain: ${{ matrix.rust-branch }} + components: rustfmt, clippy + profile: minimal + + - name: Utilize Shared Rust Cache + uses: Swatinem/rust-cache@v2.2.1 with: - token: ${{ secrets.GITHUB_TOKEN }} + key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} + + - name: Install cargo-audit + run: cargo install cargo-audit + + - name: cargo audit + run: cargo audit + # runs cargo test --workspace cargo-test: name: cargo test From 7081d6d4997f5ca12d1db66582cf14d5a208abac Mon Sep 17 00:00:00 2001 From: Keith Date: Tue, 27 Aug 2024 17:35:21 -0400 Subject: [PATCH 007/213] Add data for installing Rust --- .github/workflows/check-rust.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index fdbcfb46f..ebcbecd8c 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -212,6 +212,16 @@ jobs: cargo-audit: name: cargo audit runs-on: SubtensorCI + strategy: + matrix: + rust-branch: + - stable + rust-target: + - x86_64-unknown-linux-gnu + # - x86_64-apple-darwin + os: + - ubuntu-latest + # - macos-latest steps: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 From ffc762bf93a3bcb3da759073058579a2e3391a3a Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:38:17 -0400 Subject: [PATCH 008/213] add root weights proxy --- runtime/src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 9ad0624d0..8603ce6eb 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -629,6 +629,7 @@ pub enum ProxyType { Registration, Transfer, SmallTransfer, + RootWeights, } // Transfers below SMALL_TRANSFER_LIMIT are considered small transfers pub const SMALL_TRANSFER_LIMIT: Balance = 500_000_000; // 0.5 TAO @@ -673,6 +674,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) + | RuntimeCall::RootWeights(..) ), ProxyType::Triumvirate => matches!( c, @@ -695,6 +697,10 @@ impl InstanceFilter for ProxyType { RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::register { .. }) ), + ProxyType::RootWeights => matches!( + c, + RuntimeCall::SubtensorModule(pallet_subtensor::Call::set_root_weights { .. }) + ), } } fn is_superset(&self, o: &Self) -> bool { From 78f1d941b86340e90fa4d4cd3f3a6a3ff45967aa Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:38:26 -0400 Subject: [PATCH 009/213] fmt --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 8603ce6eb..1f2c8a4eb 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -674,7 +674,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) - | RuntimeCall::RootWeights(..) + | RuntimeCall::RootWeights(..) ), ProxyType::Triumvirate => matches!( c, From a23860c87dcfd9be8a33a2089c200bfa74e13bc4 Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:45:11 -0400 Subject: [PATCH 010/213] fix typo --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 1f2c8a4eb..77b9111f8 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -674,7 +674,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) - | RuntimeCall::RootWeights(..) + | RuntimeCall::SubtensorModule(pallet_subtensor::Call::set_root_weights { .. }) ), ProxyType::Triumvirate => matches!( c, From 52d3c46a0e08dfadc38d5c65a42aeb41a9ef4951 Mon Sep 17 00:00:00 2001 From: Keith Date: Wed, 28 Aug 2024 21:23:15 -0400 Subject: [PATCH 011/213] Add token argument to the publish script --- scripts/publish.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/publish.sh b/scripts/publish.sh index 3eb0fc6a5..8b2671787 100644 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -1,28 +1,28 @@ #!/bin/bash set -ex cd support/macros -cargo publish +cargo publish --token $1 cd ../.. cd pallets/commitments -cargo publish +cargo publish --token $1 cd .. cd collective -cargo publish +cargo publish --token $1 cd .. cd registry -cargo publish +cargo publish --token $1 cd .. cd subtensor -cargo publish +cargo publish --token $1 cd runtime-api -cargo publish +cargo publish --token $1 cd ../.. cd admin-utils -cargo publish +cargo publish --token $1 cd ../.. cd runtime -cargo publish +cargo publish --token $1 cd .. cd node -cargo publish +cargo publish --token $1 echo "published successfully." From 506e78078ee24c9edf9377eaf9ad5542c711ec67 Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Thu, 8 Aug 2024 15:55:34 +0100 Subject: [PATCH 012/213] add neuron certificate handling --- pallets/subtensor/rpc/src/lib.rs | 22 ++++- pallets/subtensor/runtime-api/src/lib.rs | 1 + pallets/subtensor/src/lib.rs | 24 ++++++ pallets/subtensor/src/macros/dispatches.rs | 85 ++++++++++++++++++- pallets/subtensor/src/rpc_info/neuron_info.rs | 17 ++++ pallets/subtensor/src/subnets/serving.rs | 17 ++++ pallets/subtensor/tests/serving.rs | 50 +++++++++++ runtime/src/lib.rs | 46 +++------- 8 files changed, 224 insertions(+), 38 deletions(-) diff --git a/pallets/subtensor/rpc/src/lib.rs b/pallets/subtensor/rpc/src/lib.rs index 2445a5eda..ebf7dcc35 100644 --- a/pallets/subtensor/rpc/src/lib.rs +++ b/pallets/subtensor/rpc/src/lib.rs @@ -41,7 +41,13 @@ pub trait SubtensorCustomApi { fn get_neurons(&self, netuid: u16, at: Option) -> RpcResult>; #[method(name = "neuronInfo_getNeuron")] fn get_neuron(&self, netuid: u16, uid: u16, at: Option) -> RpcResult>; - + #[method(name = "neuronInfo_getNeuronCertificate")] + fn get_neuron_certificate( + &self, + netuid: u16, + uid: u16, + at: Option, + ) -> RpcResult>; #[method(name = "subnetInfo_getSubnetInfo")] fn get_subnet_info(&self, netuid: u16, at: Option) -> RpcResult>; #[method(name = "subnetInfo_getSubnetsInfo")] @@ -187,6 +193,20 @@ where .map_err(|e| Error::RuntimeError(format!("Unable to get neuron info: {:?}", e)).into()) } + fn get_neuron_certificate( + &self, + netuid: u16, + uid: u16, + at: Option<::Hash>, + ) -> RpcResult> { + let api = self.client.runtime_api(); + let at = at.unwrap_or_else(|| self.client.info().best_hash); + + api.get_neuron_certificate(at, netuid, uid).map_err(|e| { + Error::RuntimeError(format!("Unable to get neuron certificate: {:?}", e)).into() + }) + } + fn get_subnet_info( &self, netuid: u16, diff --git a/pallets/subtensor/runtime-api/src/lib.rs b/pallets/subtensor/runtime-api/src/lib.rs index ca43384b8..d2b6ca1fd 100644 --- a/pallets/subtensor/runtime-api/src/lib.rs +++ b/pallets/subtensor/runtime-api/src/lib.rs @@ -14,6 +14,7 @@ sp_api::decl_runtime_apis! { pub trait NeuronInfoRuntimeApi { fn get_neurons(netuid: u16) -> Vec; fn get_neuron(netuid: u16, uid: u16) -> Vec; + fn get_neuron_certificate(netuid: u16, uid: u16) -> Vec; fn get_neurons_lite(netuid: u16) -> Vec; fn get_neuron_lite(netuid: u16, uid: u16) -> Vec; } diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 2985736c8..6b6a8d62d 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -129,6 +129,15 @@ pub mod pallet { pub placeholder2: u8, } + /// Struct for NeuronCertificate. + pub type NeuronCertificateOf = NeuronCertificate; + /// Data structure for NeuronCertificate information. + #[derive(Decode, Encode, Default, TypeInfo, PartialEq, Eq, Clone, Debug)] + pub struct NeuronCertificate { + /// The neuron certificate. + pub certificate: Vec, + } + /// Struct for Prometheus. pub type PrometheusInfoOf = PrometheusInfo; @@ -1162,6 +1171,17 @@ pub mod pallet { /// --- MAP ( netuid, hotkey ) --> axon_info pub type Axons = StorageDoubleMap<_, Identity, u16, Blake2_128Concat, T::AccountId, AxonInfoOf, OptionQuery>; + /// --- MAP ( netuid, hotkey ) --> certificate + #[pallet::storage] + pub(super) type NeuronCertificates = StorageDoubleMap< + _, + Identity, + u16, + Blake2_128Concat, + T::AccountId, + NeuronCertificateOf, + OptionQuery, + >; #[pallet::storage] /// --- MAP ( netuid, hotkey ) --> prometheus_info pub type Prometheus = StorageDoubleMap< @@ -1538,6 +1558,10 @@ where let transaction_fee = 0; Ok((CallType::Serve, transaction_fee, who.clone())) } + Some(Call::serve_axon_tls { .. }) => { + let transaction_fee = 0; + Ok((CallType::Serve, transaction_fee, who.clone())) + } Some(Call::register_network { .. }) => { let transaction_fee = 0; Ok((CallType::RegisterNetwork, transaction_fee, who.clone())) diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index a97e4494d..378c0c721 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -435,7 +435,7 @@ mod dispatches { Self::do_remove_stake(origin, hotkey, amount_unstaked) } - /// Serves or updates axon /promethteus information for the neuron associated with the caller. If the caller is + /// Serves or updates axon /prometheus information for the neuron associated with the caller. If the caller is /// already registered the metadata is updated. If the caller is not registered this call throws NotRegistered. /// /// # Args: @@ -511,6 +511,89 @@ mod dispatches { protocol, placeholder1, placeholder2, + None, + ) + } + + /// Same as `serve_axon` but takes a certificate as an extra optional argument. + /// Serves or updates axon /prometheus information for the neuron associated with the caller. If the caller is + /// already registered the metadata is updated. If the caller is not registered this call throws NotRegistered. + /// + /// # Args: + /// * 'origin': (Origin): + /// - The signature of the caller. + /// + /// * 'netuid' (u16): + /// - The u16 network identifier. + /// + /// * 'version' (u64): + /// - The bittensor version identifier. + /// + /// * 'ip' (u64): + /// - The endpoint ip information as a u128 encoded integer. + /// + /// * 'port' (u16): + /// - The endpoint port information as a u16 encoded integer. + /// + /// * 'ip_type' (u8): + /// - The endpoint ip version as a u8, 4 or 6. + /// + /// * 'protocol' (u8): + /// - UDP:1 or TCP:0 + /// + /// * 'placeholder1' (u8): + /// - Placeholder for further extra params. + /// + /// * 'placeholder2' (u8): + /// - Placeholder for further extra params. + /// + /// # Event: + /// * AxonServed; + /// - On successfully serving the axon info. + /// + /// # Raises: + /// * 'SubNetworkDoesNotExist': + /// - Attempting to set weights on a non-existent network. + /// + /// * 'NotRegistered': + /// - Attempting to set weights from a non registered account. + /// + /// * 'InvalidIpType': + /// - The ip type is not 4 or 6. + /// + /// * 'InvalidIpAddress': + /// - The numerically encoded ip address does not resolve to a proper ip. + /// + /// * 'ServingRateLimitExceeded': + /// - Attempting to set prometheus information withing the rate limit min. + /// + #[pallet::call_index(40)] + #[pallet::weight((Weight::from_parts(46_000_000, 0) + .saturating_add(T::DbWeight::get().reads(4)) + .saturating_add(T::DbWeight::get().writes(1)), DispatchClass::Normal, Pays::No))] + pub fn serve_axon_tls( + origin: OriginFor, + netuid: u16, + version: u32, + ip: u128, + port: u16, + ip_type: u8, + protocol: u8, + placeholder1: u8, + placeholder2: u8, + certificate: Vec, + ) -> DispatchResult { + Self::do_serve_axon( + origin, + netuid, + version, + ip, + port, + ip_type, + protocol, + placeholder1, + placeholder2, + Some(certificate), ) } diff --git a/pallets/subtensor/src/rpc_info/neuron_info.rs b/pallets/subtensor/src/rpc_info/neuron_info.rs index cadd4b6e3..065855b04 100644 --- a/pallets/subtensor/src/rpc_info/neuron_info.rs +++ b/pallets/subtensor/src/rpc_info/neuron_info.rs @@ -147,6 +147,23 @@ impl Pallet { Some(neuron) } + pub fn get_neuron_certificate(netuid: u16, uid: u16) -> Option { + if !Self::if_subnet_exist(netuid) { + return None; + } + + let hotkey = match Self::get_hotkey_for_net_and_uid(netuid, uid) { + Ok(h) => h, + Err(_) => return None, + }; + + if Self::has_neuron_certificate(netuid, &hotkey) { + NeuronCertificates::::get(netuid, hotkey) + } else { + None + } + } + pub fn get_neuron(netuid: u16, uid: u16) -> Option> { if !Self::if_subnet_exist(netuid) { return None; diff --git a/pallets/subtensor/src/subnets/serving.rs b/pallets/subtensor/src/subnets/serving.rs index 1a9240c36..469478dd2 100644 --- a/pallets/subtensor/src/subnets/serving.rs +++ b/pallets/subtensor/src/subnets/serving.rs @@ -31,6 +31,9 @@ impl Pallet { /// * 'placeholder2' (u8): /// - Placeholder for further extra params. /// + /// * 'certificate' (Option>): + /// - Certificate for mutual Tls connection between neurons + /// /// # Event: /// * AxonServed; /// - On successfully serving the axon info. @@ -61,6 +64,7 @@ impl Pallet { protocol: u8, placeholder1: u8, placeholder2: u8, + certificate: Option>, ) -> dispatch::DispatchResult { // We check the callers (hotkey) signature. let hotkey_id = ensure_signed(origin)?; @@ -86,6 +90,15 @@ impl Pallet { Error::::ServingRateLimitExceeded ); + // Check certificate + if let Some(certificate) = certificate { + NeuronCertificates::::insert( + netuid, + hotkey_id.clone(), + NeuronCertificate { certificate }, + ) + } + // We insert the axon meta. prev_axon.block = Self::get_current_block_as_u64(); prev_axon.version = version; @@ -239,6 +252,10 @@ impl Pallet { Axons::::contains_key(netuid, hotkey) } + pub fn has_neuron_certificate(netuid: u16, hotkey: &T::AccountId) -> bool { + return NeuronCertificates::::contains_key(netuid, hotkey); + } + pub fn has_prometheus_info(netuid: u16, hotkey: &T::AccountId) -> bool { Prometheus::::contains_key(netuid, hotkey) } diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index 49a963951..b2d3b4e55 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -99,6 +99,56 @@ fn test_serving_ok() { }); } +#[test] +fn test_serving_tls_ok() { + new_test_ext().execute_with(|| { + let hotkey_account_id = U256::from(1); + let uid: u16 = 0; + let netuid: u16 = 1; + let tempo: u16 = 13; + let version: u32 = 2; + let ip: u128 = 1676056785; + let port: u16 = 128; + let ip_type: u8 = 4; + let modality: u16 = 0; + let protocol: u8 = 0; + let placeholder1: u8 = 0; + let placeholder2: u8 = 0; + let certificate: Vec = "CERT".as_bytes().to_vec(); + add_network(netuid, tempo, modality); + register_ok_neuron(netuid, hotkey_account_id, U256::from(66), 0); + assert_ok!(SubtensorModule::serve_axon_tls( + <::RuntimeOrigin>::signed(hotkey_account_id), + netuid, + version, + ip, + port, + ip_type, + protocol, + placeholder1, + placeholder2, + certificate.clone() + )); + let stored_certificate = SubtensorModule::get_neuron_certificate(netuid, uid); + assert_eq!(stored_certificate.unwrap().certificate, certificate); + let new_certificate = "UPDATED_CERT".as_bytes().to_vec(); + assert_ok!(SubtensorModule::serve_axon_tls( + <::RuntimeOrigin>::signed(hotkey_account_id), + netuid, + version, + ip, + port, + ip_type, + protocol, + placeholder1, + placeholder2, + new_certificate.clone() + )); + let stored_certificate = SubtensorModule::get_neuron_certificate(netuid, uid); + assert_eq!(stored_certificate.unwrap().certificate, new_certificate) + }); +} + #[test] fn test_serving_set_metadata_update() { new_test_ext(1).execute_with(|| { diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index ca8f83911..186d95911 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -142,7 +142,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 195, + spec_version: 196, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, @@ -1363,13 +1363,7 @@ impl_runtime_apis! { } fn get_delegate(delegate_account_vec: Vec) -> Vec { - let _result = SubtensorModule::get_delegate(delegate_account_vec); - if _result.is_some() { - let result = _result.expect("Could not get DelegateInfo"); - result.encode() - } else { - vec![] - } + SubtensorModule::get_delegate(delegate_account_vec).map(|r| r.encode()).unwrap_or(vec![]) } fn get_delegated(delegatee_account_vec: Vec) -> Vec { @@ -1385,13 +1379,7 @@ impl_runtime_apis! { } fn get_neuron_lite(netuid: u16, uid: u16) -> Vec { - let _result = SubtensorModule::get_neuron_lite(netuid, uid); - if _result.is_some() { - let result = _result.expect("Could not get NeuronInfoLite"); - result.encode() - } else { - vec![] - } + SubtensorModule::get_neuron_lite(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) } fn get_neurons(netuid: u16) -> Vec { @@ -1400,25 +1388,17 @@ impl_runtime_apis! { } fn get_neuron(netuid: u16, uid: u16) -> Vec { - let _result = SubtensorModule::get_neuron(netuid, uid); - if _result.is_some() { - let result = _result.expect("Could not get NeuronInfo"); - result.encode() - } else { - vec![] - } + SubtensorModule::get_neuron(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) + } + + fn get_neuron_certificate(netuid: u16, uid: u16) -> Vec { + SubtensorModule::get_neuron_certificate(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) } } impl subtensor_custom_rpc_runtime_api::SubnetInfoRuntimeApi for Runtime { fn get_subnet_info(netuid: u16) -> Vec { - let _result = SubtensorModule::get_subnet_info(netuid); - if _result.is_some() { - let result = _result.expect("Could not get SubnetInfo"); - result.encode() - } else { - vec![] - } + SubtensorModule::get_subnet_info(netuid).map(|r| r.encode()).unwrap_or(vec![]) } fn get_subnets_info() -> Vec { @@ -1442,13 +1422,7 @@ impl_runtime_apis! { } fn get_subnet_hyperparams(netuid: u16) -> Vec { - let _result = SubtensorModule::get_subnet_hyperparams(netuid); - if _result.is_some() { - let result = _result.expect("Could not get SubnetHyperparams"); - result.encode() - } else { - vec![] - } + SubtensorModule::get_subnet_hyperparams(netuid).map(|r| r.encode()).unwrap_or(vec![]) } } From dbbd79af8eeeeb23fd41267fb331c3f73a79838c Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Mon, 26 Aug 2024 22:38:22 +0800 Subject: [PATCH 013/213] add missing argument description --- pallets/subtensor/src/macros/dispatches.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index 378c0c721..c4b985a49 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -547,6 +547,9 @@ mod dispatches { /// * 'placeholder2' (u8): /// - Placeholder for further extra params. /// + /// * 'certificate' (Vec): + /// - TLS certificate for inter neuron communitation. + /// /// # Event: /// * AxonServed; /// - On successfully serving the axon info. From 51df2e410d0fbf1805fe7c71d95ec326fcb6faff Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Mon, 26 Aug 2024 23:28:28 +0800 Subject: [PATCH 014/213] remove rpc call --- pallets/subtensor/rpc/src/lib.rs | 21 --------------------- pallets/subtensor/runtime-api/src/lib.rs | 1 - pallets/subtensor/tests/serving.rs | 2 +- runtime/src/lib.rs | 4 ---- 4 files changed, 1 insertion(+), 27 deletions(-) diff --git a/pallets/subtensor/rpc/src/lib.rs b/pallets/subtensor/rpc/src/lib.rs index ebf7dcc35..d99388193 100644 --- a/pallets/subtensor/rpc/src/lib.rs +++ b/pallets/subtensor/rpc/src/lib.rs @@ -41,13 +41,6 @@ pub trait SubtensorCustomApi { fn get_neurons(&self, netuid: u16, at: Option) -> RpcResult>; #[method(name = "neuronInfo_getNeuron")] fn get_neuron(&self, netuid: u16, uid: u16, at: Option) -> RpcResult>; - #[method(name = "neuronInfo_getNeuronCertificate")] - fn get_neuron_certificate( - &self, - netuid: u16, - uid: u16, - at: Option, - ) -> RpcResult>; #[method(name = "subnetInfo_getSubnetInfo")] fn get_subnet_info(&self, netuid: u16, at: Option) -> RpcResult>; #[method(name = "subnetInfo_getSubnetsInfo")] @@ -193,20 +186,6 @@ where .map_err(|e| Error::RuntimeError(format!("Unable to get neuron info: {:?}", e)).into()) } - fn get_neuron_certificate( - &self, - netuid: u16, - uid: u16, - at: Option<::Hash>, - ) -> RpcResult> { - let api = self.client.runtime_api(); - let at = at.unwrap_or_else(|| self.client.info().best_hash); - - api.get_neuron_certificate(at, netuid, uid).map_err(|e| { - Error::RuntimeError(format!("Unable to get neuron certificate: {:?}", e)).into() - }) - } - fn get_subnet_info( &self, netuid: u16, diff --git a/pallets/subtensor/runtime-api/src/lib.rs b/pallets/subtensor/runtime-api/src/lib.rs index d2b6ca1fd..ca43384b8 100644 --- a/pallets/subtensor/runtime-api/src/lib.rs +++ b/pallets/subtensor/runtime-api/src/lib.rs @@ -14,7 +14,6 @@ sp_api::decl_runtime_apis! { pub trait NeuronInfoRuntimeApi { fn get_neurons(netuid: u16) -> Vec; fn get_neuron(netuid: u16, uid: u16) -> Vec; - fn get_neuron_certificate(netuid: u16, uid: u16) -> Vec; fn get_neurons_lite(netuid: u16) -> Vec; fn get_neuron_lite(netuid: u16, uid: u16) -> Vec; } diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index b2d3b4e55..a6a26a690 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -101,7 +101,7 @@ fn test_serving_ok() { #[test] fn test_serving_tls_ok() { - new_test_ext().execute_with(|| { + new_test_ext(1).execute_with(|| { let hotkey_account_id = U256::from(1); let uid: u16 = 0; let netuid: u16 = 1; diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 186d95911..d41a82186 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -1390,10 +1390,6 @@ impl_runtime_apis! { fn get_neuron(netuid: u16, uid: u16) -> Vec { SubtensorModule::get_neuron(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) } - - fn get_neuron_certificate(netuid: u16, uid: u16) -> Vec { - SubtensorModule::get_neuron_certificate(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) - } } impl subtensor_custom_rpc_runtime_api::SubnetInfoRuntimeApi for Runtime { From 1b7402df35704e24d1aa50a59cfe3bdd39febaae Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Tue, 27 Aug 2024 19:10:50 +0800 Subject: [PATCH 015/213] clear neuron --- pallets/subtensor/src/lib.rs | 2 ++ pallets/subtensor/src/subnets/uids.rs | 3 +++ pallets/subtensor/src/swap/swap_hotkey.rs | 12 ++++++++++++ 3 files changed, 17 insertions(+) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 6b6a8d62d..2c3a618d1 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -73,6 +73,7 @@ pub mod pallet { use sp_runtime::traits::{Dispatchable, TrailingZeroInput}; use sp_std::vec; use sp_std::vec::Vec; + use subtensor_macros::freeze_struct; #[cfg(not(feature = "std"))] use alloc::boxed::Box; @@ -132,6 +133,7 @@ pub mod pallet { /// Struct for NeuronCertificate. pub type NeuronCertificateOf = NeuronCertificate; /// Data structure for NeuronCertificate information. + #[freeze_struct("e6193a76002d491")] #[derive(Decode, Encode, Default, TypeInfo, PartialEq, Eq, Clone, Debug)] pub struct NeuronCertificate { /// The neuron certificate. diff --git a/pallets/subtensor/src/subnets/uids.rs b/pallets/subtensor/src/subnets/uids.rs index fff358f1c..2a5ceedb4 100644 --- a/pallets/subtensor/src/subnets/uids.rs +++ b/pallets/subtensor/src/subnets/uids.rs @@ -45,6 +45,9 @@ impl Pallet { Uids::::insert(netuid, new_hotkey.clone(), uid_to_replace); // Make uid - hotkey association. BlockAtRegistration::::insert(netuid, uid_to_replace, block_number); // Fill block at registration. IsNetworkMember::::insert(new_hotkey.clone(), netuid, true); // Fill network is member. + + // 4. Clear neuron certificates + NeuronCertificates::::remove(netuid, old_hotkey.clone()); } /// Appends the uid to the network. diff --git a/pallets/subtensor/src/swap/swap_hotkey.rs b/pallets/subtensor/src/swap/swap_hotkey.rs index 793e34bff..ca3d0b5a7 100644 --- a/pallets/subtensor/src/swap/swap_hotkey.rs +++ b/pallets/subtensor/src/swap/swap_hotkey.rs @@ -276,6 +276,18 @@ impl Pallet { weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); } } + + // 9.7. Swap neuron TLS certificates. + // NeuronCertificates( netuid, hotkey ) -> Vec -- the neuron certificate for the hotkey. + if is_network_member { + if let Ok(old_neuron_certificates) = + NeuronCertificates::::try_get(netuid, old_hotkey) + { + NeuronCertificates::::remove(netuid, old_hotkey); + NeuronCertificates::::insert(netuid, new_hotkey, old_neuron_certificates); + weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); + } + } } // 10. Swap Stake. From b5d7c0e97ec5ce40f39e481db92779f82e563ab8 Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Wed, 28 Aug 2024 16:28:17 +0800 Subject: [PATCH 016/213] clippy tests --- pallets/subtensor/src/lib.rs | 4 +-- pallets/subtensor/src/subnets/serving.rs | 6 ++--- pallets/subtensor/tests/serving.rs | 10 ++++--- pallets/subtensor/tests/swap_hotkey.rs | 34 ++++++++++++++++++++++++ pallets/subtensor/tests/uids.rs | 11 ++++++++ 5 files changed, 56 insertions(+), 9 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 2c3a618d1..4f553ed7a 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -73,7 +73,7 @@ pub mod pallet { use sp_runtime::traits::{Dispatchable, TrailingZeroInput}; use sp_std::vec; use sp_std::vec::Vec; - use subtensor_macros::freeze_struct; + use subtensor_macros::freeze_struct; #[cfg(not(feature = "std"))] use alloc::boxed::Box; @@ -1175,7 +1175,7 @@ pub mod pallet { StorageDoubleMap<_, Identity, u16, Blake2_128Concat, T::AccountId, AxonInfoOf, OptionQuery>; /// --- MAP ( netuid, hotkey ) --> certificate #[pallet::storage] - pub(super) type NeuronCertificates = StorageDoubleMap< + pub type NeuronCertificates = StorageDoubleMap< _, Identity, u16, diff --git a/pallets/subtensor/src/subnets/serving.rs b/pallets/subtensor/src/subnets/serving.rs index 469478dd2..e6c1a62d7 100644 --- a/pallets/subtensor/src/subnets/serving.rs +++ b/pallets/subtensor/src/subnets/serving.rs @@ -31,8 +31,8 @@ impl Pallet { /// * 'placeholder2' (u8): /// - Placeholder for further extra params. /// - /// * 'certificate' (Option>): - /// - Certificate for mutual Tls connection between neurons + /// * 'certificate' (Option>): + /// - Certificate for mutual Tls connection between neurons /// /// # Event: /// * AxonServed; @@ -253,7 +253,7 @@ impl Pallet { } pub fn has_neuron_certificate(netuid: u16, hotkey: &T::AccountId) -> bool { - return NeuronCertificates::::contains_key(netuid, hotkey); + NeuronCertificates::::contains_key(netuid, hotkey) } pub fn has_prometheus_info(netuid: u16, hotkey: &T::AccountId) -> bool { diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index a6a26a690..a0988fc02 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -129,8 +129,9 @@ fn test_serving_tls_ok() { placeholder2, certificate.clone() )); - let stored_certificate = SubtensorModule::get_neuron_certificate(netuid, uid); - assert_eq!(stored_certificate.unwrap().certificate, certificate); + let stored_certificate = + SubtensorModule::get_neuron_certificate(netuid, uid).expect("Certificate should exist"); + assert_eq!(stored_certificate.certificate, certificate); let new_certificate = "UPDATED_CERT".as_bytes().to_vec(); assert_ok!(SubtensorModule::serve_axon_tls( <::RuntimeOrigin>::signed(hotkey_account_id), @@ -144,8 +145,9 @@ fn test_serving_tls_ok() { placeholder2, new_certificate.clone() )); - let stored_certificate = SubtensorModule::get_neuron_certificate(netuid, uid); - assert_eq!(stored_certificate.unwrap().certificate, new_certificate) + let stored_certificate = + SubtensorModule::get_neuron_certificate(netuid, uid).expect("Certificate should exist"); + assert_eq!(stored_certificate.certificate, new_certificate) }); } diff --git a/pallets/subtensor/tests/swap_hotkey.rs b/pallets/subtensor/tests/swap_hotkey.rs index bff738b86..845338be9 100644 --- a/pallets/subtensor/tests/swap_hotkey.rs +++ b/pallets/subtensor/tests/swap_hotkey.rs @@ -311,6 +311,40 @@ fn test_swap_axons() { }); } +// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_swap_certificates --exact --nocapture +#[test] +fn test_swap_certificates() { + new_test_ext(1).execute_with(|| { + let old_hotkey = U256::from(1); + let new_hotkey = U256::from(2); + let coldkey = U256::from(3); + let netuid = 0u16; + let certificate = NeuronCertificate { + certificate: vec![1, 2, 3], + }; + let mut weight = Weight::zero(); + + add_network(netuid, 0, 1); + IsNetworkMember::::insert(old_hotkey, netuid, true); + NeuronCertificates::::insert(netuid, old_hotkey, certificate.clone()); + + assert_ok!(SubtensorModule::perform_hotkey_swap( + &old_hotkey, + &new_hotkey, + &coldkey, + &mut weight + )); + + assert!(!NeuronCertificates::::contains_key( + netuid, old_hotkey + )); + assert_eq!( + NeuronCertificates::::get(netuid, new_hotkey), + Some(certificate) + ); + }); +} + // SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_swap_weight_commits --exact --nocapture #[test] fn test_swap_weight_commits() { diff --git a/pallets/subtensor/tests/uids.rs b/pallets/subtensor/tests/uids.rs index 82adc6b8a..743471b5d 100644 --- a/pallets/subtensor/tests/uids.rs +++ b/pallets/subtensor/tests/uids.rs @@ -3,6 +3,7 @@ use crate::mock::*; use frame_support::assert_ok; use frame_system::Config; +use pallet_subtensor::*; use sp_core::U256; mod mock; @@ -32,6 +33,9 @@ fn test_replace_neuron() { let new_hotkey_account_id = U256::from(2); let _new_colkey_account_id = U256::from(12345); + let certificate = NeuronCertificate { + certificate: vec![1, 2, 3], + }; //add network add_network(netuid, tempo, 0); @@ -51,6 +55,9 @@ fn test_replace_neuron() { let neuron_uid = SubtensorModule::get_uid_for_net_and_hotkey(netuid, &hotkey_account_id); assert_ok!(neuron_uid); + // Set a neuron certificate for it + NeuronCertificates::::insert(netuid, hotkey_account_id, certificate); + // Replace the neuron. SubtensorModule::replace_neuron( netuid, @@ -77,6 +84,10 @@ fn test_replace_neuron() { &new_hotkey_account_id )); assert_eq!(curr_hotkey.unwrap(), new_hotkey_account_id); + + // Check neuron certificate was reset + let certificate = SubtensorModule::get_neuron_certificate(netuid, neuron_uid.unwrap()); + assert_eq!(certificate, None); }); } From 05d0d8de412def161fb22ed051e8607582544182 Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Sat, 31 Aug 2024 15:48:43 +0800 Subject: [PATCH 017/213] rebase clippy --- runtime/src/lib.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index d41a82186..0d952c2bb 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -1363,7 +1363,7 @@ impl_runtime_apis! { } fn get_delegate(delegate_account_vec: Vec) -> Vec { - SubtensorModule::get_delegate(delegate_account_vec).map(|r| r.encode()).unwrap_or(vec![]) + SubtensorModule::get_delegate(delegate_account_vec).map(|r| r.encode()).unwrap_or_default() } fn get_delegated(delegatee_account_vec: Vec) -> Vec { @@ -1379,7 +1379,7 @@ impl_runtime_apis! { } fn get_neuron_lite(netuid: u16, uid: u16) -> Vec { - SubtensorModule::get_neuron_lite(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) + SubtensorModule::get_neuron_lite(netuid, uid).map(|r| r.encode()).unwrap_or_default() } fn get_neurons(netuid: u16) -> Vec { @@ -1388,13 +1388,13 @@ impl_runtime_apis! { } fn get_neuron(netuid: u16, uid: u16) -> Vec { - SubtensorModule::get_neuron(netuid, uid).map(|r| r.encode()).unwrap_or(vec![]) + SubtensorModule::get_neuron(netuid, uid).map(|r| r.encode()).unwrap_or_default() } } impl subtensor_custom_rpc_runtime_api::SubnetInfoRuntimeApi for Runtime { fn get_subnet_info(netuid: u16) -> Vec { - SubtensorModule::get_subnet_info(netuid).map(|r| r.encode()).unwrap_or(vec![]) + SubtensorModule::get_subnet_info(netuid).map(|r| r.encode()).unwrap_or_default() } fn get_subnets_info() -> Vec { @@ -1418,7 +1418,7 @@ impl_runtime_apis! { } fn get_subnet_hyperparams(netuid: u16) -> Vec { - SubtensorModule::get_subnet_hyperparams(netuid).map(|r| r.encode()).unwrap_or(vec![]) + SubtensorModule::get_subnet_hyperparams(netuid).map(|r| r.encode()).unwrap_or_default() } } From ab0a240530a4ba7396119ddd80896bb3a3ae8c9d Mon Sep 17 00:00:00 2001 From: Samuel Dare Date: Mon, 2 Sep 2024 15:05:59 +0400 Subject: [PATCH 018/213] chore: bump spec version --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 77b9111f8..bc815f57b 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -142,7 +142,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 194, + spec_version: 196, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, From 2aad6fe60e4c6f77841452bea46717dcb0f9f1e3 Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Mon, 2 Sep 2024 21:20:00 +0800 Subject: [PATCH 019/213] cleanup get_neuron_certificate --- pallets/subtensor/src/rpc_info/neuron_info.rs | 17 ----------------- pallets/subtensor/src/subnets/serving.rs | 4 ---- pallets/subtensor/tests/serving.rs | 10 +++++----- pallets/subtensor/tests/uids.rs | 2 +- 4 files changed, 6 insertions(+), 27 deletions(-) diff --git a/pallets/subtensor/src/rpc_info/neuron_info.rs b/pallets/subtensor/src/rpc_info/neuron_info.rs index 065855b04..cadd4b6e3 100644 --- a/pallets/subtensor/src/rpc_info/neuron_info.rs +++ b/pallets/subtensor/src/rpc_info/neuron_info.rs @@ -147,23 +147,6 @@ impl Pallet { Some(neuron) } - pub fn get_neuron_certificate(netuid: u16, uid: u16) -> Option { - if !Self::if_subnet_exist(netuid) { - return None; - } - - let hotkey = match Self::get_hotkey_for_net_and_uid(netuid, uid) { - Ok(h) => h, - Err(_) => return None, - }; - - if Self::has_neuron_certificate(netuid, &hotkey) { - NeuronCertificates::::get(netuid, hotkey) - } else { - None - } - } - pub fn get_neuron(netuid: u16, uid: u16) -> Option> { if !Self::if_subnet_exist(netuid) { return None; diff --git a/pallets/subtensor/src/subnets/serving.rs b/pallets/subtensor/src/subnets/serving.rs index e6c1a62d7..22550fb93 100644 --- a/pallets/subtensor/src/subnets/serving.rs +++ b/pallets/subtensor/src/subnets/serving.rs @@ -252,10 +252,6 @@ impl Pallet { Axons::::contains_key(netuid, hotkey) } - pub fn has_neuron_certificate(netuid: u16, hotkey: &T::AccountId) -> bool { - NeuronCertificates::::contains_key(netuid, hotkey) - } - pub fn has_prometheus_info(netuid: u16, hotkey: &T::AccountId) -> bool { Prometheus::::contains_key(netuid, hotkey) } diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index a0988fc02..17b8d2144 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -103,7 +103,6 @@ fn test_serving_ok() { fn test_serving_tls_ok() { new_test_ext(1).execute_with(|| { let hotkey_account_id = U256::from(1); - let uid: u16 = 0; let netuid: u16 = 1; let tempo: u16 = 13; let version: u32 = 2; @@ -129,8 +128,9 @@ fn test_serving_tls_ok() { placeholder2, certificate.clone() )); - let stored_certificate = - SubtensorModule::get_neuron_certificate(netuid, uid).expect("Certificate should exist"); + + let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) + .expect("Certificate should exist"); assert_eq!(stored_certificate.certificate, certificate); let new_certificate = "UPDATED_CERT".as_bytes().to_vec(); assert_ok!(SubtensorModule::serve_axon_tls( @@ -145,8 +145,8 @@ fn test_serving_tls_ok() { placeholder2, new_certificate.clone() )); - let stored_certificate = - SubtensorModule::get_neuron_certificate(netuid, uid).expect("Certificate should exist"); + let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) + .expect("Certificate should exist"); assert_eq!(stored_certificate.certificate, new_certificate) }); } diff --git a/pallets/subtensor/tests/uids.rs b/pallets/subtensor/tests/uids.rs index 743471b5d..827d4ec1a 100644 --- a/pallets/subtensor/tests/uids.rs +++ b/pallets/subtensor/tests/uids.rs @@ -86,7 +86,7 @@ fn test_replace_neuron() { assert_eq!(curr_hotkey.unwrap(), new_hotkey_account_id); // Check neuron certificate was reset - let certificate = SubtensorModule::get_neuron_certificate(netuid, neuron_uid.unwrap()); + let certificate = NeuronCertificates::::get(netuid, hotkey_account_id); assert_eq!(certificate, None); }); } From eb11185fa2dfae53ef514f8fbde13fd26306aaa8 Mon Sep 17 00:00:00 2001 From: Keith Date: Wed, 4 Sep 2024 09:09:47 -0500 Subject: [PATCH 020/213] Update polkadot dependencies to v1.15.2-rc1 --- Cargo.lock | 2785 ++++++++++++++++++++++++++++++---------------------- Cargo.toml | 142 +-- 2 files changed, 1684 insertions(+), 1243 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee0933379..bfb7366be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -77,7 +77,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", ] @@ -89,7 +89,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -198,20 +198,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "aquamarine" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da02abba9f9063d786eab1509833ebb2fac0f966862ca59439c76b9c566760" -dependencies = [ - "include_dir", - "itertools 0.10.5", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "aquamarine" version = "0.5.0" @@ -237,18 +223,6 @@ dependencies = [ "ark-std", ] -[[package]] -name = "ark-bls12-377-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20c7021f180a0cbea0380eba97c2af3c57074cdaffe0eef7e840e1c9f2841e55" -dependencies = [ - "ark-bls12-377", - "ark-ec", - "ark-models-ext", - "ark-std", -] - [[package]] name = "ark-bls12-381" version = "0.4.0" @@ -261,45 +235,6 @@ dependencies = [ "ark-std", ] -[[package]] -name = "ark-bls12-381-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1dc4b3d08f19e8ec06e949712f95b8361e43f1391d94f65e4234df03480631c" -dependencies = [ - "ark-bls12-381", - "ark-ec", - "ark-ff", - "ark-models-ext", - "ark-serialize", - "ark-std", -] - -[[package]] -name = "ark-bw6-761" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e0605daf0cc5aa2034b78d008aaf159f56901d92a52ee4f6ecdfdac4f426700" -dependencies = [ - "ark-bls12-377", - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-bw6-761-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccee5fba47266f460067588ee1bf070a9c760bf2050c1c509982c5719aadb4f2" -dependencies = [ - "ark-bw6-761", - "ark-ec", - "ark-ff", - "ark-models-ext", - "ark-std", -] - [[package]] name = "ark-ec" version = "0.4.2" @@ -314,60 +249,9 @@ dependencies = [ "hashbrown 0.13.2", "itertools 0.10.5", "num-traits", - "rayon", "zeroize", ] -[[package]] -name = "ark-ed-on-bls12-377" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10d901b9ac4b38f9c32beacedfadcdd64e46f8d7f8e88c1ae1060022cf6f6c6" -dependencies = [ - "ark-bls12-377", - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ed-on-bls12-377-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524a4fb7540df2e1a8c2e67a83ba1d1e6c3947f4f9342cc2359fc2e789ad731d" -dependencies = [ - "ark-ec", - "ark-ed-on-bls12-377", - "ark-ff", - "ark-models-ext", - "ark-std", -] - -[[package]] -name = "ark-ed-on-bls12-381-bandersnatch" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cde0f2aa063a2a5c28d39b47761aa102bda7c13c84fc118a61b87c7b2f785c" -dependencies = [ - "ark-bls12-381", - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ed-on-bls12-381-bandersnatch-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15185f1acb49a07ff8cbe5f11a1adc5a93b19e211e325d826ae98e98e124346" -dependencies = [ - "ark-ec", - "ark-ed-on-bls12-381-bandersnatch", - "ark-ff", - "ark-models-ext", - "ark-std", -] - [[package]] name = "ark-ff" version = "0.4.2" @@ -411,19 +295,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ark-models-ext" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9eab5d4b5ff2f228b763d38442adc9b084b0a465409b059fac5c2308835ec2" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", -] - [[package]] name = "ark-poly" version = "0.4.2" @@ -437,35 +308,6 @@ dependencies = [ "hashbrown 0.13.2", ] -[[package]] -name = "ark-scale" -version = "0.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f69c00b3b529be29528a6f2fd5fa7b1790f8bed81b9cdca17e326538545a179" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-serialize", - "ark-std", - "parity-scale-codec", - "scale-info", -] - -[[package]] -name = "ark-secret-scalar" -version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-serialize", - "ark-std", - "ark-transcript", - "digest 0.10.7", - "getrandom_or_panic", - "zeroize", -] - [[package]] name = "ark-serialize" version = "0.4.2" @@ -497,28 +339,8 @@ checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", "rand", - "rayon", -] - -[[package]] -name = "ark-transcript" -version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "digest 0.10.7", - "rand_core 0.6.4", - "sha3", ] -[[package]] -name = "array-bytes" -version = "4.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6" - [[package]] name = "array-bytes" version = "6.2.3" @@ -543,8 +365,24 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0" dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", + "asn1-rs-derive 0.4.0", + "asn1-rs-impl 0.1.0", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5493c3bedbacf7fd7382c6346bbd66687d12bbaad3a89a2d2c303ee6cf20b048" +dependencies = [ + "asn1-rs-derive 0.5.1", + "asn1-rs-impl 0.2.0", "displaydoc", "nom", "num-traits", @@ -562,7 +400,19 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", + "synstructure 0.13.1", ] [[package]] @@ -576,6 +426,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "asn1-rs-impl" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "async-channel" version = "1.9.0" @@ -614,7 +475,7 @@ checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ "event-listener 5.3.1", "event-listener-strategy", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -638,7 +499,24 @@ dependencies = [ "futures-sink", "futures-util", "memchr", - "pin-project-lite 0.2.14", + "pin-project-lite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "attohttpc" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" +dependencies = [ + "http 0.2.12", + "log", + "url", ] [[package]] @@ -662,29 +540,6 @@ dependencies = [ "rustc-demangle", ] -[[package]] -name = "bandersnatch_vrfs" -version = "0.0.4" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" -dependencies = [ - "ark-bls12-381", - "ark-ec", - "ark-ed-on-bls12-381-bandersnatch", - "ark-ff", - "ark-serialize", - "ark-std", - "dleq_vrf", - "fflonk", - "merlin", - "rand_chacha", - "rand_core 0.6.4", - "ring 0.1.0", - "sha2 0.10.8", - "sp-ark-bls12-381", - "sp-ark-ed-on-bls12-381-bandersnatch", - "zeroize", -] - [[package]] name = "base-x" version = "0.2.11" @@ -709,6 +564,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "base64ct" version = "1.6.0" @@ -1091,9 +952,22 @@ checksum = "b9b68e3193982cd54187d71afdb2a271ad4cf8af157858e9cb911b91321de143" dependencies = [ "core2", "multibase", - "multihash", + "multihash 0.17.0", + "serde", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "cid" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd94671561e36e4e7de75f753f577edafb0e7c05d6e4547229fdf7938fbcd2c3" +dependencies = [ + "core2", + "multibase", + "multihash 0.18.1", "serde", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -1184,6 +1058,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "comfy-table" version = "7.1.1" @@ -1195,22 +1079,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "common" -version = "0.1.0" -source = "git+https://github.com/w3f/ring-proof#665f5f51af5734c7b6d90b985dd6861d4c5b4752" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "fflonk", - "getrandom_or_panic", - "merlin", - "rand_chacha", -] - [[package]] name = "common-path" version = "1.0.0" @@ -1260,7 +1128,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom", "once_cell", "tiny-keccak", ] @@ -1424,6 +1292,21 @@ dependencies = [ "wasmtime-types", ] +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.4.2" @@ -1471,7 +1354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array 0.14.7", - "rand_core 0.6.4", + "rand_core", "subtle 2.6.0", "zeroize", ] @@ -1483,7 +1366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array 0.14.7", - "rand_core 0.6.4", + "rand_core", "typenum 1.17.0", ] @@ -1516,19 +1399,6 @@ dependencies = [ "cipher 0.4.4", ] -[[package]] -name = "curve25519-dalek" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle 2.6.0", - "zeroize", -] - [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -1690,7 +1560,7 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e" dependencies = [ - "asn1-rs", + "asn1-rs 0.5.2", "displaydoc", "nom", "num-bigint", @@ -1699,31 +1569,34 @@ dependencies = [ ] [[package]] -name = "deranged" -version = "0.3.11" +name = "der-parser" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" dependencies = [ - "powerfmt", - "serde", + "asn1-rs 0.6.2", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", ] [[package]] -name = "derivative" -version = "2.2.0" +name = "deranged" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", + "powerfmt", + "serde", ] [[package]] -name = "derive-syn-parse" -version = "0.1.5" +name = "derivative" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", @@ -1843,22 +1716,6 @@ dependencies = [ "syn 2.0.71", ] -[[package]] -name = "dleq_vrf" -version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-scale", - "ark-secret-scalar", - "ark-serialize", - "ark-std", - "ark-transcript", - "arrayvec", - "zeroize", -] - [[package]] name = "docify" version = "0.2.8" @@ -1875,7 +1732,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a081e51fb188742f5a7a1164ad752121abcb22874b21e2c3b0dd040c515fdad" dependencies = [ "common-path", - "derive-syn-parse 0.2.0", + "derive-syn-parse", "once_cell", "proc-macro2", "quote", @@ -1956,9 +1813,9 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ - "curve25519-dalek 4.1.3", + "curve25519-dalek", "ed25519", - "rand_core 0.6.4", + "rand_core", "serde", "sha2 0.10.8", "subtle 2.6.0", @@ -1967,15 +1824,16 @@ dependencies = [ [[package]] name = "ed25519-zebra" -version = "3.1.0" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c24f403d068ad0b359e577a77f92392118be3f3c927538f2bb544a5ecd828c6" +checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" dependencies = [ - "curve25519-dalek 3.2.0", - "hashbrown 0.12.3", + "curve25519-dalek", + "ed25519", + "hashbrown 0.14.5", "hex", - "rand_core 0.6.4", - "sha2 0.9.9", + "rand_core", + "sha2 0.10.8", "zeroize", ] @@ -1998,7 +1856,7 @@ dependencies = [ "generic-array 0.14.7", "group", "pkcs8", - "rand_core 0.6.4", + "rand_core", "sec1", "serdect", "subtle 2.6.0", @@ -2023,6 +1881,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "enum-as-inner" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "enumflags2" version = "0.7.10" @@ -2119,7 +1989,7 @@ checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2129,7 +1999,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ "event-listener 5.3.1", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2190,23 +2060,10 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle 2.6.0", ] -[[package]] -name = "fflonk" -version = "0.1.0" -source = "git+https://github.com/w3f/fflonk#1e854f35e9a65d08b11a86291405cdc95baa0a35" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "merlin", -] - [[package]] name = "fiat-crypto" version = "0.2.9" @@ -2279,17 +2136,6 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" -[[package]] -name = "flate2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" -dependencies = [ - "crc32fast", - "libz-sys", - "miniz_oxide", -] - [[package]] name = "float-cmp" version = "0.9.0" @@ -2305,10 +2151,25 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "fork-tree" -version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "13.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", ] @@ -2322,6 +2183,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "forwarded-header-value" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835f84f38484cc86f110a805655697908257fb9a7af005234060891557198e9" +dependencies = [ + "nonempty", + "thiserror", +] + [[package]] name = "fragile" version = "2.0.0" @@ -2330,8 +2201,8 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-support-procedural", @@ -2347,19 +2218,18 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", + "sp-storage", "static_assertions", ] [[package]] name = "frame-benchmarking-cli" -version = "32.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "42.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "Inflector", - "array-bytes 6.2.3", + "array-bytes", "chrono", "clap", "comfy-table", @@ -2368,7 +2238,7 @@ dependencies = [ "frame-system", "gethostname", "handlebars", - "itertools 0.10.5", + "itertools 0.11.0", "lazy_static", "linked-hash-map", "log", @@ -2376,6 +2246,7 @@ dependencies = [ "rand", "rand_pcg", "sc-block-builder", + "sc-chain-spec", "sc-cli", "sc-client-api", "sc-client-db", @@ -2388,25 +2259,26 @@ dependencies = [ "sp-blockchain", "sp-core", "sp-database", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", + "sp-genesis-builder", "sp-inherents", "sp-io", "sp-keystore", "sp-runtime", "sp-state-machine", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage", "sp-trie", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface", "thiserror", "thousands", ] [[package]] name = "frame-executive" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "aquamarine 0.3.3", + "aquamarine", "frame-support", "frame-system", "frame-try-runtime", @@ -2416,8 +2288,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing", ] [[package]] @@ -2434,10 +2305,10 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" -version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.5.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "docify", "frame-support", "frame-system", @@ -2449,11 +2320,11 @@ dependencies = [ [[package]] name = "frame-support" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "aquamarine 0.5.0", - "array-bytes 6.2.3", + "aquamarine", + "array-bytes", "bitflags 1.3.2", "docify", "environmental", @@ -2473,7 +2344,7 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-crypto-hashing-proc-macro", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive", "sp-genesis-builder", "sp-inherents", "sp-io", @@ -2481,8 +2352,8 @@ dependencies = [ "sp-runtime", "sp-staking", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", + "sp-tracing", "sp-weights", "static_assertions", "tt-call", @@ -2490,17 +2361,17 @@ dependencies = [ [[package]] name = "frame-support-procedural" -version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "30.0.2" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "Inflector", "cfg-expr", - "derive-syn-parse 0.2.0", + "derive-syn-parse", "expander", "frame-support-procedural-tools", - "itertools 0.10.5", + "itertools 0.11.0", "macro_magic", - "proc-macro-warning", + "proc-macro-warning 1.0.2", "proc-macro2", "quote", "sp-crypto-hashing", @@ -2509,8 +2380,8 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" -version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "13.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", @@ -2521,8 +2392,8 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" -version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "12.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "proc-macro2", "quote", @@ -2531,8 +2402,8 @@ dependencies = [ [[package]] name = "frame-system" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "cfg-if", "docify", @@ -2544,15 +2415,15 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", "sp-version", "sp-weights", ] [[package]] name = "frame-system-benchmarking" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -2561,28 +2432,27 @@ dependencies = [ "scale-info", "sp-core", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "frame-system-rpc-runtime-api" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ + "docify", "parity-scale-codec", "sp-api", ] [[package]] name = "frame-try-runtime" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "parity-scale-codec", "sp-api", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -2625,6 +2495,16 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-bounded" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b07bbbe7d7e78809544c6f718d875627addc73a7c3582447abc052cd3dc67e0" +dependencies = [ + "futures-timer", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.30" @@ -2666,7 +2546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" dependencies = [ "futures-core", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2682,13 +2562,12 @@ dependencies = [ [[package]] name = "futures-rustls" -version = "0.22.2" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2411eed028cdf8c8034eaf21f9915f956b6c3abec4d4c7949ee67f0721127bd" +checksum = "35bd3cf68c183738046838e300353e4716c674dc5e56890de4826801a6622a28" dependencies = [ "futures-io", - "rustls 0.20.9", - "webpki", + "rustls 0.21.12", ] [[package]] @@ -2722,7 +2601,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.14", + "pin-project-lite", "pin-utils", "slab", ] @@ -2766,17 +2645,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -2785,7 +2653,7 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] @@ -2795,7 +2663,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" dependencies = [ "rand", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -2868,7 +2736,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core 0.6.4", + "rand_core", "subtle 2.6.0", ] @@ -2883,7 +2751,26 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.12", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", "indexmap 2.2.6", "slab", "tokio", @@ -3069,6 +2956,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.6" @@ -3076,15 +2974,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", - "pin-project-lite 0.2.14", + "http 0.2.12", + "pin-project-lite", ] [[package]] -name = "http-range-header" -version = "0.3.1" +name = "http-body" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] [[package]] name = "httparse" @@ -3114,13 +3029,13 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.14", + "pin-project-lite", "socket2 0.5.7", "tokio", "tower-service", @@ -3128,6 +3043,26 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -3135,8 +3070,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http", - "hyper", + "http 0.2.12", + "hyper 0.14.29", "log", "rustls 0.21.12", "rustls-native-certs", @@ -3144,6 +3079,23 @@ dependencies = [ "tokio-rustls", ] +[[package]] +name = "hyper-util" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "tokio", + "tower", + "tower-service", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -3184,6 +3136,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.5.0" @@ -3223,6 +3185,25 @@ dependencies = [ "windows", ] +[[package]] +name = "igd-next" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "064d90fec10d541084e7b39ead8875a5a80d9114a2b18791565253bae25f49e4" +dependencies = [ + "async-trait", + "attohttpc", + "bytes", + "futures", + "http 0.2.12", + "hyper 0.14.29", + "log", + "rand", + "tokio", + "url", + "xmltree", +] + [[package]] name = "impl-codec" version = "0.6.0" @@ -3389,6 +3370,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.12.1" @@ -3428,10 +3418,24 @@ version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfdb12a2381ea5b2e68c3469ec604a007b367778cdb14d09612c8069ebd616ad" dependencies = [ - "jsonrpsee-core", - "jsonrpsee-proc-macros", - "jsonrpsee-server", - "jsonrpsee-types", + "jsonrpsee-core 0.22.5", + "jsonrpsee-proc-macros 0.22.5", + "jsonrpsee-server 0.22.5", + "jsonrpsee-types 0.22.5", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b089779ad7f80768693755a031cc14a7766aba707cbe886674e3f79e9b7e47" +dependencies = [ + "jsonrpsee-core 0.23.2", + "jsonrpsee-proc-macros 0.23.2", + "jsonrpsee-server 0.23.2", + "jsonrpsee-types 0.23.2", "tokio", "tracing", ] @@ -3446,8 +3450,33 @@ dependencies = [ "async-trait", "beef", "futures-util", - "hyper", - "jsonrpsee-types", + "hyper 0.14.29", + "jsonrpsee-types 0.22.5", + "parking_lot 0.12.3", + "rand", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79712302e737d23ca0daa178e752c9334846b08321d439fd89af9a384f8c830b" +dependencies = [ + "anyhow", + "async-trait", + "beef", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "jsonrpsee-types 0.23.2", "parking_lot 0.12.3", "rand", "rustc-hash", @@ -3471,6 +3500,19 @@ dependencies = [ "syn 2.0.71", ] +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7895f186d5921065d96e16bd795e5ca89ac8356ec423fafc6e3d7cf8ec11aee4" +dependencies = [ + "heck 0.5.0", + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "jsonrpsee-server" version = "0.22.5" @@ -3478,15 +3520,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12d8b6a9674422a8572e0b0abb12feeb3f2aeda86528c80d0350c2bd0923ab41" dependencies = [ "futures-util", - "http", - "hyper", - "jsonrpsee-core", - "jsonrpsee-types", + "http 0.2.12", + "hyper 0.14.29", + "jsonrpsee-core 0.22.5", + "jsonrpsee-types 0.22.5", "pin-project", "route-recognizer", "serde", "serde_json", - "soketto", + "soketto 0.7.1", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tracing", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "654afab2e92e5d88ebd8a39d6074483f3f2bfdf91c5ac57fe285e7127cdd4f51" +dependencies = [ + "anyhow", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-util", + "jsonrpsee-core 0.23.2", + "jsonrpsee-types 0.23.2", + "pin-project", + "route-recognizer", + "serde", + "serde_json", + "soketto 0.8.0", "thiserror", "tokio", "tokio-stream", @@ -3508,6 +3578,19 @@ dependencies = [ "thiserror", ] +[[package]] +name = "jsonrpsee-types" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c465fbe385238e861fdc4d1c85e04ada6c1fd246161d26385c1b311724d2af" +dependencies = [ + "beef", + "http 1.1.0", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "k256" version = "0.13.3" @@ -3606,14 +3689,15 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libp2p" -version = "0.51.4" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f35eae38201a993ece6bdc823292d6abd1bffed1c4d0f4a3517d2bd8e1d917fe" +checksum = "e94495eb319a85b70a68b85e2389a95bb3555c71c49025b78c691a854a7e6464" dependencies = [ "bytes", + "either", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -3630,18 +3714,21 @@ dependencies = [ "libp2p-request-response", "libp2p-swarm", "libp2p-tcp", + "libp2p-upnp", "libp2p-wasm-ext", "libp2p-websocket", "libp2p-yamux", - "multiaddr", + "multiaddr 0.18.1", "pin-project", + "rw-stream-sink", + "thiserror", ] [[package]] name = "libp2p-allow-block-list" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "510daa05efbc25184458db837f6f9a5143888f1caa742426d92e1833ddd38a50" +checksum = "55b46558c5c0bf99d3e2a1a38fd54ff5476ca66dd1737b12466a1824dd219311" dependencies = [ "libp2p-core", "libp2p-identity", @@ -3651,9 +3738,9 @@ dependencies = [ [[package]] name = "libp2p-connection-limits" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4caa33f1d26ed664c4fe2cca81a08c8e07d4c1c04f2f4ac7655c2dd85467fda0" +checksum = "2f5107ad45cb20b2f6c3628c7b6014b996fcb13a88053f4569c872c6e30abf58" dependencies = [ "libp2p-core", "libp2p-identity", @@ -3663,9 +3750,9 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.39.2" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c1df63c0b582aa434fb09b2d86897fa2b419ffeccf934b36f87fcedc8e835c2" +checksum = "dd44289ab25e4c9230d9246c475a22241e301b23e8f4061d3bdef304a1a99713" dependencies = [ "either", "fnv", @@ -3674,8 +3761,8 @@ dependencies = [ "instant", "libp2p-identity", "log", - "multiaddr", - "multihash", + "multiaddr 0.18.1", + "multihash 0.19.1", "multistream-select", "once_cell", "parking_lot 0.12.3", @@ -3685,18 +3772,20 @@ dependencies = [ "rw-stream-sink", "smallvec", "thiserror", - "unsigned-varint", + "unsigned-varint 0.7.2", "void", ] [[package]] name = "libp2p-dns" -version = "0.39.0" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146ff7034daae62077c415c2376b8057368042df6ab95f5432ad5e88568b1554" +checksum = "e6a18db73084b4da2871438f6239fef35190b05023de7656e877c18a00541a3b" dependencies = [ + "async-trait", "futures", "libp2p-core", + "libp2p-identity", "log", "parking_lot 0.12.3", "smallvec", @@ -3705,19 +3794,20 @@ dependencies = [ [[package]] name = "libp2p-identify" -version = "0.42.2" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5455f472243e63b9c497ff320ded0314254a9eb751799a39c283c6f20b793f3c" +checksum = "45a96638a0a176bec0a4bcaebc1afa8cf909b114477209d7456ade52c61cd9cd" dependencies = [ "asynchronous-codec", "either", "futures", + "futures-bounded", "futures-timer", "libp2p-core", "libp2p-identity", "libp2p-swarm", "log", - "lru 0.10.1", + "lru 0.12.4", "quick-protobuf", "quick-protobuf-codec", "smallvec", @@ -3727,27 +3817,27 @@ dependencies = [ [[package]] name = "libp2p-identity" -version = "0.1.3" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276bb57e7af15d8f100d3c11cbdd32c6752b7eef4ba7a18ecf464972c07abcce" +checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" dependencies = [ - "bs58 0.4.0", + "bs58 0.5.1", "ed25519-dalek", - "log", - "multiaddr", - "multihash", + "hkdf", + "multihash 0.19.1", "quick-protobuf", "rand", "sha2 0.10.8", "thiserror", + "tracing", "zeroize", ] [[package]] name = "libp2p-kad" -version = "0.43.3" +version = "0.44.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39d5ef876a2b2323d63c258e63c2f8e36f205fe5a11f0b3095d59635650790ff" +checksum = "16ea178dabba6dde6ffc260a8e0452ccdc8f79becf544946692fff9d412fc29d" dependencies = [ "arrayvec", "asynchronous-codec", @@ -3762,20 +3852,21 @@ dependencies = [ "libp2p-swarm", "log", "quick-protobuf", + "quick-protobuf-codec", "rand", "sha2 0.10.8", "smallvec", "thiserror", "uint", - "unsigned-varint", + "unsigned-varint 0.7.2", "void", ] [[package]] name = "libp2p-mdns" -version = "0.43.1" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19983e1f949f979a928f2c603de1cf180cc0dc23e4ac93a62651ccb18341460b" +checksum = "42a2567c305232f5ef54185e9604579a894fd0674819402bb0ac0246da82f52a" dependencies = [ "data-encoding", "futures", @@ -3786,38 +3877,43 @@ dependencies = [ "log", "rand", "smallvec", - "socket2 0.4.10", + "socket2 0.5.7", "tokio", - "trust-dns-proto", + "trust-dns-proto 0.22.0", "void", ] [[package]] name = "libp2p-metrics" -version = "0.12.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a42ec91e227d7d0dafa4ce88b333cdf5f277253873ab087555c92798db2ddd46" +checksum = "239ba7d28f8d0b5d77760dc6619c05c7e88e74ec8fbbe97f856f20a56745e620" dependencies = [ + "instant", "libp2p-core", "libp2p-identify", + "libp2p-identity", "libp2p-kad", "libp2p-ping", "libp2p-swarm", + "once_cell", "prometheus-client", ] [[package]] name = "libp2p-noise" -version = "0.42.2" +version = "0.43.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3673da89d29936bc6435bafc638e2f184180d554ce844db65915113f86ec5e" +checksum = "d2eeec39ad3ad0677551907dd304b2f13f17208ccebe333bef194076cd2e8921" dependencies = [ "bytes", - "curve25519-dalek 3.2.0", + "curve25519-dalek", "futures", "libp2p-core", "libp2p-identity", "log", + "multiaddr 0.18.1", + "multihash 0.19.1", "once_cell", "quick-protobuf", "rand", @@ -3825,21 +3921,22 @@ dependencies = [ "snow", "static_assertions", "thiserror", - "x25519-dalek 1.1.1", + "x25519-dalek", "zeroize", ] [[package]] name = "libp2p-ping" -version = "0.42.0" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e57759c19c28a73ef1eb3585ca410cefb72c1a709fcf6de1612a378e4219202" +checksum = "e702d75cd0827dfa15f8fd92d15b9932abe38d10d21f47c50438c71dd1b5dae3" dependencies = [ "either", "futures", "futures-timer", "instant", "libp2p-core", + "libp2p-identity", "libp2p-swarm", "log", "rand", @@ -3848,9 +3945,9 @@ dependencies = [ [[package]] name = "libp2p-quic" -version = "0.7.0-alpha.3" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6b26abd81cd2398382a1edfe739b539775be8a90fa6914f39b2ab49571ec735" +checksum = "130d451d83f21b81eb7b35b360bc7972aeafb15177784adc56528db082e6b927" dependencies = [ "bytes", "futures", @@ -3861,18 +3958,20 @@ dependencies = [ "libp2p-tls", "log", "parking_lot 0.12.3", - "quinn-proto", + "quinn 0.10.2", "rand", - "rustls 0.20.9", + "ring 0.16.20", + "rustls 0.21.12", + "socket2 0.5.7", "thiserror", "tokio", ] [[package]] name = "libp2p-request-response" -version = "0.24.1" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffdb374267d42dc5ed5bc53f6e601d4a64ac5964779c6e40bb9e4f14c1e30d5" +checksum = "d8e3b4d67870478db72bac87bfc260ee6641d0734e0e3e275798f089c3fecfd4" dependencies = [ "async-trait", "futures", @@ -3880,15 +3979,17 @@ dependencies = [ "libp2p-core", "libp2p-identity", "libp2p-swarm", + "log", "rand", "smallvec", + "void", ] [[package]] name = "libp2p-swarm" -version = "0.42.2" +version = "0.43.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "903b3d592d7694e56204d211f29d31bc004be99386644ba8731fc3e3ef27b296" +checksum = "580189e0074af847df90e75ef54f3f30059aedda37ea5a1659e8b9fca05c0141" dependencies = [ "either", "fnv", @@ -3899,6 +4000,8 @@ dependencies = [ "libp2p-identity", "libp2p-swarm-derive", "log", + "multistream-select", + "once_cell", "rand", "smallvec", "tokio", @@ -3907,36 +4010,39 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" -version = "0.32.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fba456131824ab6acd4c7bf61e9c0f0a3014b5fc9868ccb8e10d344594cdc4f" +checksum = "c4d5ec2a3df00c7836d7696c136274c9c59705bac69133253696a6c932cd1d74" dependencies = [ "heck 0.4.1", + "proc-macro-warning 0.4.2", + "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.71", ] [[package]] name = "libp2p-tcp" -version = "0.39.0" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d33698596d7722d85d3ab0c86c2c322254fce1241e91208e3679b4eb3026cf" +checksum = "b558dd40d1bcd1aaaed9de898e9ec6a436019ecc2420dd0016e712fbb61c5508" dependencies = [ "futures", "futures-timer", "if-watch", "libc", "libp2p-core", + "libp2p-identity", "log", - "socket2 0.4.10", + "socket2 0.5.7", "tokio", ] [[package]] name = "libp2p-tls" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff08d13d0dc66e5e9ba6279c1de417b84fa0d0adc3b03e5732928c180ec02781" +checksum = "8218d1d5482b122ccae396bbf38abdcb283ecc96fa54760e1dfd251f0546ac61" dependencies = [ "futures", "futures-rustls", @@ -3944,51 +4050,69 @@ dependencies = [ "libp2p-identity", "rcgen", "ring 0.16.20", - "rustls 0.20.9", + "rustls 0.21.12", + "rustls-webpki", "thiserror", - "webpki", - "x509-parser", + "x509-parser 0.15.1", "yasna", ] +[[package]] +name = "libp2p-upnp" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82775a47b34f10f787ad3e2a22e2c1541e6ebef4fe9f28f3ac553921554c94c1" +dependencies = [ + "futures", + "futures-timer", + "igd-next", + "libp2p-core", + "libp2p-swarm", + "log", + "tokio", + "void", +] + [[package]] name = "libp2p-wasm-ext" -version = "0.39.0" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77dff9d32353a5887adb86c8afc1de1a94d9e8c3bc6df8b2201d7cdf5c848f43" +checksum = "1e5d8e3a9e07da0ef5b55a9f26c009c8fb3c725d492d8bb4b431715786eea79c" dependencies = [ "futures", "js-sys", "libp2p-core", - "parity-send-wrapper", + "send_wrapper", "wasm-bindgen", "wasm-bindgen-futures", ] [[package]] name = "libp2p-websocket" -version = "0.41.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "111273f7b3d3510524c752e8b7a5314b7f7a1fee7e68161c01a7d72cbb06db9f" +checksum = "004ee9c4a4631435169aee6aad2f62e3984dc031c43b6d29731e8e82a016c538" dependencies = [ "either", "futures", "futures-rustls", "libp2p-core", + "libp2p-identity", "log", "parking_lot 0.12.3", - "quicksink", + "pin-project-lite", "rw-stream-sink", - "soketto", + "soketto 0.8.0", + "thiserror", "url", "webpki-roots", ] [[package]] name = "libp2p-yamux" -version = "0.43.1" +version = "0.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd21d950662700a385d4c6d68e2f5f54d778e97068cdd718522222ef513bda" +checksum = "8eedcb62824c4300efb9cfd4e2a6edaf3ca097b9e68b36dabe45a44469fd6a85" dependencies = [ "futures", "libp2p-core", @@ -4138,6 +4262,61 @@ dependencies = [ "keystream", ] +[[package]] +name = "litep2p" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f46c51c205264b834ceed95c8b195026e700494bc3991aaba3b4ea9e20626d9" +dependencies = [ + "async-trait", + "bs58 0.4.0", + "bytes", + "cid 0.10.1", + "ed25519-dalek", + "futures", + "futures-timer", + "hex-literal", + "indexmap 2.2.6", + "libc", + "mockall 0.12.1", + "multiaddr 0.17.1", + "multihash 0.17.0", + "network-interface", + "nohash-hasher", + "parking_lot 0.12.3", + "pin-project", + "prost 0.12.6", + "prost-build 0.11.9", + "quinn 0.9.4", + "rand", + "rcgen", + "ring 0.16.20", + "rustls 0.20.9", + "serde", + "sha2 0.10.8", + "simple-dns", + "smallvec", + "snow", + "socket2 0.5.7", + "static_assertions", + "str0m", + "thiserror", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tokio-util", + "tracing", + "trust-dns-resolver", + "uint", + "unsigned-varint 0.8.0", + "url", + "webpki", + "x25519-dalek", + "x509-parser 0.16.0", + "yasna", + "zeroize", +] + [[package]] name = "lock_api" version = "0.4.12" @@ -4165,11 +4344,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.10.1" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718e8fae447df0c7e1ba7f5189829e63fd536945c8988d61444c19039f16b670" +checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" dependencies = [ - "hashbrown 0.13.2", + "hashbrown 0.14.5", ] [[package]] @@ -4212,9 +4391,9 @@ dependencies = [ [[package]] name = "macro_magic" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e03844fc635e92f3a0067e25fa4bf3e3dbf3f2927bf3aa01bb7bc8f1c428949d" +checksum = "cc33f9f0351468d26fbc53d9ce00a096c8522ecb42f19b50f34f2c422f76d21d" dependencies = [ "macro_magic_core", "macro_magic_macros", @@ -4224,12 +4403,12 @@ dependencies = [ [[package]] name = "macro_magic_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "468155613a44cfd825f1fb0ffa532b018253920d404e6fca1e8d43155198a46d" +checksum = "1687dc887e42f352865a393acae7cf79d98fab6351cde1f58e9e057da89bf150" dependencies = [ "const-random", - "derive-syn-parse 0.1.5", + "derive-syn-parse", "macro_magic_core_macros", "proc-macro2", "quote", @@ -4238,9 +4417,9 @@ dependencies = [ [[package]] name = "macro_magic_core_macros" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" +checksum = "b02abfe41815b5bd98dbd4260173db2c116dda171dc0fe7838cb206333b83308" dependencies = [ "proc-macro2", "quote", @@ -4249,9 +4428,9 @@ dependencies = [ [[package]] name = "macro_magic_macros" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" +checksum = "73ea28ee64b88876bf45277ed9a5817c1817df061a74f2b988971a12570e5869" dependencies = [ "macro_magic_core", "quote", @@ -4264,15 +4443,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" -[[package]] -name = "matchers" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" -dependencies = [ - "regex-automata 0.1.10", -] - [[package]] name = "matchers" version = "0.1.0" @@ -4355,7 +4525,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f313fcff1d2a4bcaa2deeaa00bf7530d77d5f7bd0467a117dde2e29a75a7a17a" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "blake3", "frame-metadata", "parity-scale-codec", @@ -4371,7 +4541,7 @@ checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" dependencies = [ "byteorder", "keccak", - "rand_core 0.6.4", + "rand_core", "zeroize", ] @@ -4397,7 +4567,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] @@ -4412,7 +4582,7 @@ dependencies = [ "bitflags 1.3.2", "blake2 0.10.6", "c2-chacha", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "either", "hashlink", "lioness", @@ -4436,8 +4606,23 @@ dependencies = [ "downcast", "fragile", "lazy_static", - "mockall_derive", - "predicates", + "mockall_derive 0.11.4", + "predicates 2.1.5", + "predicates-tree", +] + +[[package]] +name = "mockall" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43766c2b5203b10de348ffe19f7e54564b64f3d6018ff7648d1e2d6d3a0f0a48" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive 0.12.1", + "predicates 3.1.2", "predicates-tree", ] @@ -4453,6 +4638,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "mockall_derive" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "multiaddr" version = "0.17.1" @@ -4464,11 +4661,30 @@ dependencies = [ "data-encoding", "log", "multibase", - "multihash", + "multihash 0.17.0", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.7.2", + "url", +] + +[[package]] +name = "multiaddr" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "libp2p-identity", + "multibase", + "multihash 0.19.1", "percent-encoding", "serde", "static_assertions", - "unsigned-varint", + "unsigned-varint 0.7.2", "url", ] @@ -4497,7 +4713,34 @@ dependencies = [ "multihash-derive", "sha2 0.10.8", "sha3", - "unsigned-varint", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd8a792c1694c6da4f68db0a9d707c72bd260994da179e6030a5dcee00bb815" +dependencies = [ + "blake2b_simd", + "blake2s_simd", + "blake3", + "core2", + "digest 0.10.7", + "multihash-derive", + "sha2 0.10.8", + "sha3", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" +dependencies = [ + "core2", + "unsigned-varint 0.7.2", ] [[package]] @@ -4511,7 +4754,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -4522,16 +4765,16 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "multistream-select" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8552ab875c1313b97b8d20cb857b9fd63e2d1d6a0a1b53ce9821e575405f27a" +checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" dependencies = [ "bytes", "futures", "log", "pin-project", "smallvec", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -4649,6 +4892,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "network-interface" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a43439bf756eed340bdf8feba761e2d50c7d47175d87545cd5cbe4a137c4d1" +dependencies = [ + "cc", + "libc", + "thiserror", + "winapi", +] + [[package]] name = "nix" version = "0.24.3" @@ -4676,7 +4931,7 @@ dependencies = [ "frame-metadata-hash-extension", "frame-system", "futures", - "jsonrpsee", + "jsonrpsee 0.22.5", "memmap2 0.9.4", "node-subtensor-runtime", "pallet-commitments", @@ -4770,9 +5025,9 @@ dependencies = [ "sp-offchain", "sp-runtime", "sp-session", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", + "sp-storage", + "sp-tracing", "sp-transaction-pool", "sp-version", "substrate-wasm-builder", @@ -4796,6 +5051,12 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nonempty" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e591e719385e6ebaeb5ce5d3887f7d5676fceca6411d1925ccc95745f3d6f7" + [[package]] name = "nonzero_ext" version = "0.3.0" @@ -4928,7 +5189,16 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff" dependencies = [ - "asn1-rs", + "asn1-rs 0.5.2", +] + +[[package]] +name = "oid-registry" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d8034d9489cdaf79228eb9f6a3b8d7bb32ba00d6645ebd48eef4077ceb5bd9" +dependencies = [ + "asn1-rs 0.6.2", ] [[package]] @@ -4949,12 +5219,60 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +dependencies = [ + "bitflags 2.5.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-src" +version = "300.3.2+3.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a211a18d945ef7e648cc6e0058f4c548ee46aab922ea203e0d30e966ea23647b" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -4984,8 +5302,8 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", + "sp-tracing", "sp-weights", "substrate-fixed", "subtensor-macros", @@ -4993,8 +5311,8 @@ dependencies = [ [[package]] name = "pallet-aura" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "36.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-system", @@ -5005,13 +5323,12 @@ dependencies = [ "sp-application-crypto", "sp-consensus-aura", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-authorship" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-system", @@ -5019,13 +5336,12 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-balances" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "38.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "frame-benchmarking", @@ -5035,7 +5351,6 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5051,7 +5366,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", "subtensor-macros", ] @@ -5069,14 +5384,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", "subtensor-macros", ] [[package]] name = "pallet-grandpa" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5093,13 +5408,12 @@ dependencies = [ "sp-runtime", "sp-session", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-insecure-randomness-collective-flip" -version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "25.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-system", @@ -5107,13 +5421,12 @@ dependencies = [ "safe-mix", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-membership" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5124,13 +5437,12 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-multisig" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5140,13 +5452,12 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-preimage" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5157,13 +5468,12 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-proxy" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5172,7 +5482,6 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5188,14 +5497,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", "subtensor-macros", ] [[package]] name = "pallet-safe-mode" -version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "18.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "frame-benchmarking", @@ -5208,13 +5517,12 @@ dependencies = [ "scale-info", "sp-arithmetic", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-scheduler" -version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "38.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "frame-benchmarking", @@ -5225,14 +5533,13 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-weights", ] [[package]] name = "pallet-session" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-system", @@ -5247,7 +5554,6 @@ dependencies = [ "sp-session", "sp-staking", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-trie", ] @@ -5282,8 +5588,8 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", + "sp-tracing", "sp-version", "substrate-fixed", "subtensor-macros", @@ -5291,8 +5597,8 @@ dependencies = [ [[package]] name = "pallet-sudo" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "frame-benchmarking", @@ -5302,13 +5608,12 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-timestamp" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "36.0.1" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "frame-benchmarking", @@ -5320,15 +5625,14 @@ dependencies = [ "sp-inherents", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage", "sp-timestamp", ] [[package]] name = "pallet-transaction-payment" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-support", "frame-system", @@ -5338,15 +5642,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-transaction-payment-rpc" -version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "40.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "jsonrpsee", + "jsonrpsee 0.23.2", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", "sp-api", @@ -5359,8 +5662,8 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5371,8 +5674,8 @@ dependencies = [ [[package]] name = "pallet-utility" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-benchmarking", "frame-support", @@ -5382,7 +5685,6 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5393,7 +5695,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes", "rand", - "rand_core 0.6.4", + "rand_core", "serde", "unicode-normalization", ] @@ -5446,12 +5748,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "parity-send-wrapper" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa9777aa91b8ad9dd5aaa04a9b6bcb02c7f1deb952fca5a66034d5e63afc5c6f" - [[package]] name = "parity-util-mem" version = "0.12.0" @@ -5478,7 +5774,7 @@ checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" dependencies = [ "proc-macro2", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -5554,7 +5850,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle 2.6.0", ] @@ -5670,12 +5966,6 @@ dependencies = [ "syn 2.0.71", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.14" @@ -5796,7 +6086,7 @@ dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi 0.4.0", - "pin-project-lite 0.2.14", + "pin-project-lite", "rustix 0.38.34", "tracing", "windows-sys 0.52.0", @@ -5857,6 +6147,16 @@ dependencies = [ "regex", ] +[[package]] +name = "predicates" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +dependencies = [ + "anstyle", + "predicates-core", +] + [[package]] name = "predicates-core" version = "1.0.6" @@ -5950,6 +6250,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-warning" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1eaa7fa0aa1929ffdf7eeb6eac234dde6268914a14ad44d23521ab6a9b258e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "proc-macro-warning" version = "1.0.2" @@ -5986,9 +6297,9 @@ dependencies = [ [[package]] name = "prometheus-client" -version = "0.19.0" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6fa99d535dd930d1249e6c79cb3c2915f9172a540fe2b02a4c8f9ca954721e" +checksum = "3c99afa9a01501019ac3a14d71d9f94050346f55ca471ce90c799a15c58f61e2" dependencies = [ "dtoa", "itoa", @@ -6042,13 +6353,34 @@ dependencies = [ "petgraph", "prettyplease 0.1.25", "prost 0.11.9", - "prost-types", + "prost-types 0.11.9", "regex", "syn 1.0.109", "tempfile", "which", ] +[[package]] +name = "prost-build" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" +dependencies = [ + "bytes", + "heck 0.5.0", + "itertools 0.12.1", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease 0.2.20", + "prost 0.12.6", + "prost-types 0.12.6", + "regex", + "syn 2.0.71", + "tempfile", +] + [[package]] name = "prost-derive" version = "0.11.9" @@ -6084,6 +6416,15 @@ dependencies = [ "prost 0.11.9", ] +[[package]] +name = "prost-types" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" +dependencies = [ + "prost 0.12.6", +] + [[package]] name = "psm" version = "0.1.21" @@ -6103,7 +6444,7 @@ dependencies = [ "libc", "once_cell", "raw-cpuid", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "web-sys", "winapi", ] @@ -6125,26 +6466,51 @@ dependencies = [ [[package]] name = "quick-protobuf-codec" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1693116345026436eb2f10b677806169c1a1260c1c60eaaffe3fb5a29ae23d8b" +checksum = "f8ededb1cd78531627244d51dd0c7139fbe736c7d57af0092a76f0ffb2f56e98" dependencies = [ "asynchronous-codec", "bytes", "quick-protobuf", "thiserror", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] -name = "quicksink" -version = "0.1.2" +name = "quinn" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77de3c815e5a160b1539c6592796801df2043ae35e123b46d73380cfa57af858" +checksum = "2e8b432585672228923edbbf64b8b12c14e1112f62e88737655b4a083dbcd78e" dependencies = [ - "futures-core", - "futures-sink", - "pin-project-lite 0.1.12", + "bytes", + "pin-project-lite", + "quinn-proto 0.9.6", + "quinn-udp 0.3.2", + "rustc-hash", + "rustls 0.20.9", + "thiserror", + "tokio", + "tracing", + "webpki", +] + +[[package]] +name = "quinn" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75" +dependencies = [ + "bytes", + "futures-io", + "pin-project-lite", + "quinn-proto 0.10.6", + "quinn-udp 0.4.1", + "rustc-hash", + "rustls 0.21.12", + "thiserror", + "tokio", + "tracing", ] [[package]] @@ -6165,6 +6531,49 @@ dependencies = [ "webpki", ] +[[package]] +name = "quinn-proto" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a" +dependencies = [ + "bytes", + "rand", + "ring 0.16.20", + "rustc-hash", + "rustls 0.21.12", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "641538578b21f5e5c8ea733b736895576d0fe329bb883b937db6f4d163dbaaf4" +dependencies = [ + "libc", + "quinn-proto 0.9.6", + "socket2 0.4.10", + "tracing", + "windows-sys 0.42.0", +] + +[[package]] +name = "quinn-udp" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" +dependencies = [ + "bytes", + "libc", + "socket2 0.5.7", + "tracing", + "windows-sys 0.48.0", +] + [[package]] name = "quote" version = "1.0.36" @@ -6188,26 +6597,17 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core 0.6.4", + "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ - "getrandom 0.1.16", + "ppv-lite86", + "rand_core", ] [[package]] @@ -6216,7 +6616,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom", ] [[package]] @@ -6235,7 +6635,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -6318,7 +6718,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ - "getrandom 0.2.15", + "getrandom", "libredox", "thiserror", ] @@ -6432,23 +6832,6 @@ dependencies = [ "subtle 2.6.0", ] -[[package]] -name = "ring" -version = "0.1.0" -source = "git+https://github.com/w3f/ring-proof#665f5f51af5734c7b6d90b985dd6861d4c5b4752" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "arrayvec", - "blake2 0.10.6", - "common", - "fflonk", - "merlin", -] - [[package]] name = "ring" version = "0.16.20" @@ -6472,7 +6855,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom", "libc", "spin 0.9.8", "untrusted 0.9.0", @@ -6619,7 +7002,6 @@ version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ - "log", "ring 0.16.20", "sct", "webpki", @@ -6676,9 +7058,9 @@ checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "rw-stream-sink" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26338f5e09bb721b85b135ea05af7767c90b52f6de4f087d4f4a3a9d64e7dc04" +checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" dependencies = [ "futures", "pin-project", @@ -6720,19 +7102,19 @@ dependencies = [ [[package]] name = "sc-allocator" -version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "29.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "log", "sp-core", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface", "thiserror", ] [[package]] name = "sc-basic-authorship" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "futures", "futures-timer", @@ -6753,8 +7135,8 @@ dependencies = [ [[package]] name = "sc-block-builder" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.42.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "sp-api", @@ -6768,10 +7150,10 @@ dependencies = [ [[package]] name = "sc-chain-spec" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "docify", "log", "memmap2 0.9.4", @@ -6790,12 +7172,13 @@ dependencies = [ "sp-io", "sp-runtime", "sp-state-machine", + "sp-tracing", ] [[package]] name = "sc-chain-spec-derive" -version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "12.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", @@ -6805,15 +7188,15 @@ dependencies = [ [[package]] name = "sc-cli" -version = "0.36.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.46.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "chrono", "clap", "fdlimit", "futures", - "itertools 0.10.5", + "itertools 0.11.0", "libp2p-identity", "log", "names", @@ -6846,8 +7229,8 @@ dependencies = [ [[package]] name = "sc-client-api" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "fnv", "futures", @@ -6862,19 +7245,19 @@ dependencies = [ "sp-consensus", "sp-core", "sp-database", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-runtime", "sp-state-machine", "sp-statement-store", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage", "sp-trie", "substrate-prometheus-endpoint", ] [[package]] name = "sc-client-db" -version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "hash-db", "kvdb", @@ -6899,17 +7282,16 @@ dependencies = [ [[package]] name = "sc-consensus" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", - "futures-timer", - "libp2p-identity", "log", - "mockall", + "mockall 0.11.4", "parking_lot 0.12.3", "sc-client-api", + "sc-network-types", "sc-utils", "serde", "sp-api", @@ -6924,8 +7306,8 @@ dependencies = [ [[package]] name = "sc-consensus-aura" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", @@ -6953,11 +7335,11 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa" -version = "0.19.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.29.1" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "ahash 0.8.11", - "array-bytes 6.2.3", + "array-bytes", "async-trait", "dyn-clone", "finality-grandpa", @@ -6976,6 +7358,7 @@ dependencies = [ "sc-network-common", "sc-network-gossip", "sc-network-sync", + "sc-network-types", "sc-telemetry", "sc-transaction-pool-api", "sc-utils", @@ -6996,12 +7379,12 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa-rpc" -version = "0.19.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.29.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "finality-grandpa", "futures", - "jsonrpsee", + "jsonrpsee 0.23.2", "log", "parity-scale-codec", "sc-client-api", @@ -7016,8 +7399,8 @@ dependencies = [ [[package]] name = "sc-consensus-slots" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", @@ -7039,8 +7422,8 @@ dependencies = [ [[package]] name = "sc-executor" -version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.40.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -7050,44 +7433,44 @@ dependencies = [ "schnellru", "sp-api", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-io", "sp-panic-handler", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", "sp-trie", "sp-version", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface", "tracing", ] [[package]] name = "sc-executor-common" -version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.35.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "polkavm", "sc-allocator", "sp-maybe-compressed-blob", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface", "thiserror", "wasm-instrument", ] [[package]] name = "sc-executor-polkavm" -version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.32.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "log", "polkavm", "sc-executor-common", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface", ] [[package]] name = "sc-executor-wasmtime" -version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.35.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "anyhow", "cfg-if", @@ -7097,15 +7480,15 @@ dependencies = [ "rustix 0.36.17", "sc-allocator", "sc-executor-common", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", + "sp-wasm-interface", "wasmtime", ] [[package]] name = "sc-informant" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "ansi_term", "futures", @@ -7121,10 +7504,10 @@ dependencies = [ [[package]] name = "sc-keystore" -version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "33.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "parking_lot 0.12.3", "serde_json", "sp-application-crypto", @@ -7135,23 +7518,23 @@ dependencies = [ [[package]] name = "sc-mixnet" -version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.14.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 4.2.0", + "array-bytes", "arrayvec", "blake2 0.10.6", "bytes", "futures", "futures-timer", - "libp2p-identity", "log", "mixnet", - "multiaddr", + "multiaddr 0.18.1", "parity-scale-codec", "parking_lot 0.12.3", "sc-client-api", "sc-network", + "sc-network-types", "sc-transaction-pool-api", "sp-api", "sp-consensus", @@ -7164,14 +7547,15 @@ dependencies = [ [[package]] name = "sc-network" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "async-trait", "asynchronous-codec", "bytes", + "cid 0.9.0", "either", "fnv", "futures", @@ -7179,16 +7563,22 @@ dependencies = [ "ip_network", "libp2p", "linked_hash_set", + "litep2p", "log", - "mockall", + "mockall 0.11.4", + "once_cell", "parity-scale-codec", "parking_lot 0.12.3", "partial_sort", "pin-project", + "prost 0.12.6", + "prost-build 0.12.6", "rand", "sc-client-api", "sc-network-common", + "sc-network-types", "sc-utils", + "schnellru", "serde", "serde_json", "smallvec", @@ -7200,43 +7590,25 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "unsigned-varint", + "unsigned-varint 0.7.2", + "void", "wasm-timer", "zeroize", ] -[[package]] -name = "sc-network-bitswap" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "async-channel", - "cid", - "futures", - "libp2p-identity", - "log", - "prost 0.12.6", - "prost-build", - "sc-client-api", - "sc-network", - "sp-blockchain", - "sp-runtime", - "thiserror", - "unsigned-varint", -] - [[package]] name = "sc-network-common" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "bitflags 1.3.2", "futures", "libp2p-identity", "parity-scale-codec", - "prost-build", + "prost-build 0.12.6", "sc-consensus", + "sc-network-types", "sp-consensus", "sp-consensus-grandpa", "sp-runtime", @@ -7244,17 +7616,17 @@ dependencies = [ [[package]] name = "sc-network-gossip" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "ahash 0.8.11", "futures", "futures-timer", - "libp2p", "log", "sc-network", "sc-network-common", "sc-network-sync", + "sc-network-types", "schnellru", "sp-runtime", "substrate-prometheus-endpoint", @@ -7263,19 +7635,19 @@ dependencies = [ [[package]] name = "sc-network-light" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "futures", - "libp2p-identity", "log", "parity-scale-codec", "prost 0.12.6", - "prost-build", + "prost-build 0.12.6", "sc-client-api", "sc-network", + "sc-network-types", "sp-blockchain", "sp-core", "sp-runtime", @@ -7284,10 +7656,10 @@ dependencies = [ [[package]] name = "sc-network-sync" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "async-trait", "fork-tree", @@ -7295,14 +7667,15 @@ dependencies = [ "futures-timer", "libp2p", "log", - "mockall", + "mockall 0.11.4", "parity-scale-codec", "prost 0.12.6", - "prost-build", + "prost-build 0.12.6", "sc-client-api", "sc-consensus", "sc-network", "sc-network-common", + "sc-network-types", "sc-utils", "schnellru", "smallvec", @@ -7320,36 +7693,52 @@ dependencies = [ [[package]] name = "sc-network-transactions" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "futures", - "libp2p", "log", "parity-scale-codec", "sc-network", "sc-network-common", "sc-network-sync", + "sc-network-types", "sc-utils", "sp-consensus", "sp-runtime", "substrate-prometheus-endpoint", ] +[[package]] +name = "sc-network-types" +version = "0.12.1" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" +dependencies = [ + "bs58 0.5.1", + "ed25519-dalek", + "libp2p-identity", + "litep2p", + "log", + "multiaddr 0.18.1", + "multihash 0.19.1", + "rand", + "thiserror", + "zeroize", +] + [[package]] name = "sc-offchain" -version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "39.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "bytes", "fnv", "futures", "futures-timer", - "hyper", + "hyper 0.14.29", "hyper-rustls", - "libp2p", "log", "num_cpus", "once_cell", @@ -7359,11 +7748,12 @@ dependencies = [ "sc-client-api", "sc-network", "sc-network-common", + "sc-network-types", "sc-transaction-pool-api", "sc-utils", "sp-api", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-keystore", "sp-offchain", "sp-runtime", @@ -7373,8 +7763,8 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" -version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.18.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -7382,11 +7772,11 @@ dependencies = [ [[package]] name = "sc-rpc" -version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "39.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "futures", - "jsonrpsee", + "jsonrpsee 0.23.2", "log", "parity-scale-codec", "parking_lot 0.12.3", @@ -7414,10 +7804,10 @@ dependencies = [ [[package]] name = "sc-rpc-api" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "jsonrpsee", + "jsonrpsee 0.23.2", "parity-scale-codec", "sc-chain-spec", "sc-mixnet", @@ -7434,15 +7824,19 @@ dependencies = [ [[package]] name = "sc-rpc-server" -version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "16.0.2" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ + "forwarded-header-value", "futures", "governor", - "http", - "hyper", - "jsonrpsee", + "http 1.1.0", + "http-body-util", + "hyper 1.4.1", + "ip_network", + "jsonrpsee 0.23.2", "log", + "serde", "serde_json", "substrate-prometheus-endpoint", "tokio", @@ -7452,14 +7846,14 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.44.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "futures", "futures-util", "hex", - "jsonrpsee", + "jsonrpsee 0.23.2", "log", "parity-scale-codec", "parking_lot 0.12.3", @@ -7469,6 +7863,7 @@ dependencies = [ "sc-rpc", "sc-transaction-pool-api", "sc-utils", + "schnellru", "serde", "sp-api", "sp-blockchain", @@ -7483,15 +7878,15 @@ dependencies = [ [[package]] name = "sc-service" -version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.45.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "directories", "exit-future", "futures", "futures-timer", - "jsonrpsee", + "jsonrpsee 0.23.2", "log", "parity-scale-codec", "parking_lot 0.12.3", @@ -7505,11 +7900,11 @@ dependencies = [ "sc-informant", "sc-keystore", "sc-network", - "sc-network-bitswap", "sc-network-common", "sc-network-light", "sc-network-sync", "sc-network-transactions", + "sc-network-types", "sc-rpc", "sc-rpc-server", "sc-rpc-spec-v2", @@ -7526,12 +7921,12 @@ dependencies = [ "sp-blockchain", "sp-consensus", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-keystore", "sp-runtime", "sp-session", "sp-state-machine", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage", "sp-transaction-pool", "sp-transaction-storage-proof", "sp-trie", @@ -7547,8 +7942,8 @@ dependencies = [ [[package]] name = "sc-state-db" -version = "0.30.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.36.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "log", "parity-scale-codec", @@ -7558,8 +7953,8 @@ dependencies = [ [[package]] name = "sc-sysinfo" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "derive_more", "futures", @@ -7574,13 +7969,13 @@ dependencies = [ "sp-core", "sp-crypto-hashing", "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", ] [[package]] name = "sc-telemetry" -version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "24.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "chrono", "futures", @@ -7589,6 +7984,7 @@ dependencies = [ "parking_lot 0.12.3", "pin-project", "rand", + "sc-network", "sc-utils", "serde", "serde_json", @@ -7598,8 +7994,8 @@ dependencies = [ [[package]] name = "sc-tracing" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "ansi_term", "chrono", @@ -7619,17 +8015,17 @@ dependencies = [ "sp-core", "sp-rpc", "sp-runtime", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing", "thiserror", "tracing", - "tracing-log 0.1.4", - "tracing-subscriber 0.2.25", + "tracing-log", + "tracing-subscriber", ] [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", @@ -7639,8 +8035,8 @@ dependencies = [ [[package]] name = "sc-transaction-pool" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", @@ -7658,7 +8054,7 @@ dependencies = [ "sp-core", "sp-crypto-hashing", "sp-runtime", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing", "sp-transaction-pool", "substrate-prometheus-endpoint", "thiserror", @@ -7666,8 +8062,8 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", @@ -7682,8 +8078,8 @@ dependencies = [ [[package]] name = "sc-utils" -version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "17.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-channel", "futures", @@ -7779,10 +8175,10 @@ dependencies = [ "aead", "arrayref", "arrayvec", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "getrandom_or_panic", "merlin", - "rand_core 0.6.4", + "rand_core", "serde_bytes", "sha2 0.10.8", "subtle 2.6.0", @@ -7811,6 +8207,21 @@ dependencies = [ "untrusted 0.9.0", ] +[[package]] +name = "sctp-proto" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6220f78bb44c15f326b0596113305f6101097a18755d53727a575c97e09fb24" +dependencies = [ + "bytes", + "crc", + "fxhash", + "log", + "rand", + "slab", + "thiserror", +] + [[package]] name = "sec1" version = "0.7.3" @@ -7909,6 +8320,12 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.204" @@ -7949,11 +8366,12 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.127" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -8018,6 +8436,38 @@ dependencies = [ "opaque-debug 0.3.1", ] +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", + "sha1-asm", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1-asm" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "286acebaf8b67c1130aedffad26f594eff0c1292389158135327d2e23aed582b" +dependencies = [ + "cc", +] + [[package]] name = "sha2" version = "0.9.9" @@ -8083,7 +8533,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -8099,6 +8549,15 @@ dependencies = [ "wide", ] +[[package]] +name = "simple-dns" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cae9a3fcdadafb6d97f4c0e007e4247b114ee0f119f650c3cbf3a8b3a1479694" +dependencies = [ + "bitflags 2.5.0", +] + [[package]] name = "simple-mermaid" version = "0.1.1" @@ -8147,8 +8606,8 @@ dependencies = [ "aes-gcm", "blake2 0.10.6", "chacha20poly1305", - "curve25519-dalek 4.1.3", - "rand_core 0.6.4", + "curve25519-dalek", + "rand_core", "ring 0.17.8", "rustc_version 0.4.0", "sha2 0.10.8", @@ -8183,32 +8642,47 @@ checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" dependencies = [ "base64 0.13.1", "bytes", - "flate2", "futures", - "http", + "http 0.2.12", + "httparse", + "log", + "rand", + "sha-1 0.9.8", +] + +[[package]] +name = "soketto" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures", + "http 1.1.0", "httparse", "log", "rand", - "sha-1", + "sha1", ] [[package]] name = "sp-api" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ + "docify", "hash-db", "log", "parity-scale-codec", "scale-info", "sp-api-proc-macro", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-metadata-ir", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-trie", "sp-version", "thiserror", @@ -8216,8 +8690,8 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" -version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "20.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "Inflector", "blake2 0.10.6", @@ -8230,21 +8704,20 @@ dependencies = [ [[package]] name = "sp-application-crypto" -version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "38.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "scale-info", "serde", "sp-core", "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "sp-arithmetic" -version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "26.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "integer-sqrt", @@ -8252,32 +8725,13 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "static_assertions", ] -[[package]] -name = "sp-ark-bls12-381" -version = "0.4.2" -source = "git+https://github.com/paritytech/arkworks-substrate#caa2eed74beb885dd07c7db5f916f2281dad818f" -dependencies = [ - "ark-bls12-381-ext", - "sp-crypto-ec-utils", -] - -[[package]] -name = "sp-ark-ed-on-bls12-381-bandersnatch" -version = "0.4.2" -source = "git+https://github.com/paritytech/arkworks-substrate#caa2eed74beb885dd07c7db5f916f2281dad818f" -dependencies = [ - "ark-ed-on-bls12-381-bandersnatch-ext", - "sp-crypto-ec-utils", -] - [[package]] name = "sp-block-builder" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "sp-api", "sp-inherents", @@ -8286,26 +8740,27 @@ dependencies = [ [[package]] name = "sp-blockchain" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.1" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "futures", - "log", "parity-scale-codec", "parking_lot 0.12.3", "schnellru", "sp-api", "sp-consensus", + "sp-core", "sp-database", "sp-runtime", "sp-state-machine", "thiserror", + "tracing", ] [[package]] name = "sp-consensus" -version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.40.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "futures", @@ -8319,8 +8774,8 @@ dependencies = [ [[package]] name = "sp-consensus-aura" -version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.40.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "parity-scale-codec", @@ -8335,8 +8790,8 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" -version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "21.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "finality-grandpa", "log", @@ -8352,8 +8807,8 @@ dependencies = [ [[package]] name = "sp-consensus-slots" -version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.40.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "scale-info", @@ -8363,11 +8818,10 @@ dependencies = [ [[package]] name = "sp-core" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", - "bandersnatch_vrfs", + "array-bytes", "bitflags 1.3.2", "blake2 0.10.6", "bounded-collections", @@ -8378,7 +8832,7 @@ dependencies = [ "hash-db", "hash256-std-hasher", "impl-serde", - "itertools 0.10.5", + "itertools 0.11.0", "k256", "libsecp256k1", "log", @@ -8395,11 +8849,11 @@ dependencies = [ "secrecy", "serde", "sp-crypto-hashing", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive", + "sp-externalities", + "sp-runtime-interface", + "sp-std", + "sp-storage", "ss58-registry", "substrate-bip39", "thiserror", @@ -8408,30 +8862,10 @@ dependencies = [ "zeroize", ] -[[package]] -name = "sp-crypto-ec-utils" -version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" -dependencies = [ - "ark-bls12-377", - "ark-bls12-377-ext", - "ark-bls12-381", - "ark-bls12-381-ext", - "ark-bw6-761", - "ark-bw6-761-ext", - "ark-ec", - "ark-ed-on-bls12-377", - "ark-ed-on-bls12-377-ext", - "ark-ed-on-bls12-381-bandersnatch", - "ark-ed-on-bls12-381-bandersnatch-ext", - "ark-scale", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk)", -] - [[package]] name = "sp-crypto-hashing" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "blake2b_simd", "byteorder", @@ -8443,8 +8877,8 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "quote", "sp-crypto-hashing", @@ -8454,7 +8888,7 @@ dependencies = [ [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "kvdb", "parking_lot 0.12.3", @@ -8463,17 +8897,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.71", -] - -[[package]] -name = "sp-debug-derive" -version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "proc-macro2", "quote", @@ -8482,29 +8906,21 @@ dependencies = [ [[package]] name = "sp-externalities" -version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "environmental", - "parity-scale-codec", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", -] - -[[package]] -name = "sp-externalities" -version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +version = "0.29.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "environmental", "parity-scale-codec", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk)", + "sp-storage", ] [[package]] name = "sp-genesis-builder" -version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.15.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ + "parity-scale-codec", + "scale-info", "serde_json", "sp-api", "sp-runtime", @@ -8512,8 +8928,8 @@ dependencies = [ [[package]] name = "sp-inherents" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8525,10 +8941,11 @@ dependencies = [ [[package]] name = "sp-io" -version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "38.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "bytes", + "docify", "ed25519-dalek", "libsecp256k1", "log", @@ -8538,12 +8955,11 @@ dependencies = [ "secp256k1", "sp-core", "sp-crypto-hashing", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-keystore", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing", "sp-trie", "tracing", "tracing-core", @@ -8551,8 +8967,8 @@ dependencies = [ [[package]] name = "sp-keyring" -version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "39.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "sp-core", "sp-runtime", @@ -8561,19 +8977,19 @@ dependencies = [ [[package]] name = "sp-keystore" -version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.40.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", ] [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8581,8 +8997,8 @@ dependencies = [ [[package]] name = "sp-metadata-ir" -version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -8591,8 +9007,8 @@ dependencies = [ [[package]] name = "sp-mixnet" -version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.12.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "scale-info", @@ -8602,8 +9018,8 @@ dependencies = [ [[package]] name = "sp-offchain" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "sp-api", "sp-core", @@ -8613,7 +9029,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "backtrace", "lazy_static", @@ -8622,8 +9038,8 @@ dependencies = [ [[package]] name = "sp-rpc" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "32.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "rustc-hash", "serde", @@ -8632,83 +9048,53 @@ dependencies = [ [[package]] name = "sp-runtime" -version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "39.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "docify", "either", "hash256-std-hasher", "impl-trait-for-tuples", "log", + "num-traits", "parity-scale-codec", - "paste", - "rand", - "scale-info", - "serde", - "simple-mermaid", - "sp-application-crypto", - "sp-arithmetic", - "sp-core", - "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-weights", -] - -[[package]] -name = "sp-runtime-interface" -version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "bytes", - "impl-trait-for-tuples", - "parity-scale-codec", - "polkavm-derive", - "primitive-types", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-runtime-interface-proc-macro 17.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "static_assertions", + "paste", + "rand", + "scale-info", + "serde", + "simple-mermaid", + "sp-application-crypto", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-std", + "sp-weights", + "tracing", ] [[package]] name = "sp-runtime-interface" -version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +version = "28.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive", "primitive-types", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-runtime-interface-proc-macro 17.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk)", + "sp-externalities", + "sp-runtime-interface-proc-macro", + "sp-std", + "sp-storage", + "sp-tracing", + "sp-wasm-interface", "static_assertions", ] [[package]] name = "sp-runtime-interface-proc-macro" -version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "Inflector", - "expander", - "proc-macro-crate 3.1.0", - "proc-macro2", - "quote", - "syn 2.0.71", -] - -[[package]] -name = "sp-runtime-interface-proc-macro" -version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +version = "18.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "Inflector", "expander", @@ -8720,8 +9106,8 @@ dependencies = [ [[package]] name = "sp-session" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "35.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "scale-info", @@ -8734,8 +9120,8 @@ dependencies = [ [[package]] name = "sp-staking" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8747,8 +9133,8 @@ dependencies = [ [[package]] name = "sp-state-machine" -version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.43.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "hash-db", "log", @@ -8757,7 +9143,7 @@ dependencies = [ "rand", "smallvec", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-panic-handler", "sp-trie", "thiserror", @@ -8767,11 +9153,11 @@ dependencies = [ [[package]] name = "sp-statement-store" -version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "18.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "aes-gcm", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "ed25519-dalek", "hkdf", "parity-scale-codec", @@ -8782,51 +9168,34 @@ dependencies = [ "sp-application-crypto", "sp-core", "sp-crypto-hashing", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface", "thiserror", - "x25519-dalek 2.0.1", + "x25519-dalek", ] [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" - -[[package]] -name = "sp-std" -version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" - -[[package]] -name = "sp-storage" -version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "impl-serde", - "parity-scale-codec", - "ref-cast", - "serde", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", -] +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" [[package]] name = "sp-storage" -version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +version = "21.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", + "sp-debug-derive", ] [[package]] name = "sp-timestamp" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "parity-scale-codec", @@ -8837,30 +9206,19 @@ dependencies = [ [[package]] name = "sp-tracing" -version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "parity-scale-codec", - "tracing", - "tracing-core", - "tracing-subscriber 0.2.25", -] - -[[package]] -name = "sp-tracing" -version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +version = "17.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "tracing", "tracing-core", - "tracing-subscriber 0.3.18", + "tracing-subscriber", ] [[package]] name = "sp-transaction-pool" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "sp-api", "sp-runtime", @@ -8868,8 +9226,8 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "34.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "async-trait", "parity-scale-codec", @@ -8882,8 +9240,8 @@ dependencies = [ [[package]] name = "sp-trie" -version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "ahash 0.8.11", "hash-db", @@ -8896,7 +9254,7 @@ dependencies = [ "scale-info", "schnellru", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities", "thiserror", "tracing", "trie-db", @@ -8905,8 +9263,8 @@ dependencies = [ [[package]] name = "sp-version" -version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "37.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8915,15 +9273,15 @@ dependencies = [ "serde", "sp-crypto-hashing-proc-macro", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std", "sp-version-proc-macro", "thiserror", ] [[package]] name = "sp-version-proc-macro" -version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "14.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "parity-scale-codec", "proc-macro2", @@ -8933,8 +9291,8 @@ dependencies = [ [[package]] name = "sp-wasm-interface" -version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "21.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -8943,20 +9301,10 @@ dependencies = [ "wasmtime", ] -[[package]] -name = "sp-wasm-interface" -version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" -dependencies = [ - "impl-trait-for-tuples", - "log", - "parity-scale-codec", -] - [[package]] name = "sp-weights" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "31.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -8964,7 +9312,7 @@ dependencies = [ "serde", "smallvec", "sp-arithmetic", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive", ] [[package]] @@ -9053,6 +9401,26 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "str0m" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6706347e49b13373f7ddfafad47df7583ed52083d6fc8a594eb2c80497ef959d" +dependencies = [ + "combine", + "crc", + "fastrand", + "hmac 0.12.1", + "once_cell", + "openssl", + "openssl-sys", + "sctp-proto", + "serde", + "sha-1 0.10.1", + "thiserror", + "tracing", +] + [[package]] name = "strsim" version = "0.11.1" @@ -9102,8 +9470,8 @@ dependencies = [ [[package]] name = "substrate-bip39" -version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.6.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9115,7 +9483,7 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" [[package]] name = "substrate-fixed" @@ -9130,12 +9498,13 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "38.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ + "docify", "frame-system-rpc-runtime-api", "futures", - "jsonrpsee", + "jsonrpsee 0.23.2", "log", "parity-scale-codec", "sc-rpc-api", @@ -9150,9 +9519,11 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "hyper", + "http-body-util", + "hyper 1.4.1", + "hyper-util", "log", "prometheus", "thiserror", @@ -9161,10 +9532,10 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" -version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "24.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.15.2-rc1#d6f482d5593c3e791d7b3e92e95aa3c734e23794" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "build-helper", "cargo_metadata", "console", @@ -9178,7 +9549,7 @@ dependencies = [ "sp-core", "sp-io", "sp-maybe-compressed-blob", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing", "sp-version", "strum 0.26.2", "tempfile", @@ -9205,7 +9576,7 @@ dependencies = [ name = "subtensor-custom-rpc" version = "0.0.2" dependencies = [ - "jsonrpsee", + "jsonrpsee 0.22.5", "pallet-subtensor", "parity-scale-codec", "serde", @@ -9301,6 +9672,17 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -9493,7 +9875,7 @@ dependencies = [ "mio", "num_cpus", "parking_lot 0.12.3", - "pin-project-lite 0.2.14", + "pin-project-lite", "signal-hook-registry", "socket2 0.5.7", "tokio-macros", @@ -9528,11 +9910,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", - "pin-project-lite 0.2.14", + "pin-project-lite", "tokio", "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +dependencies = [ + "futures-util", + "log", + "rustls 0.21.12", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "tungstenite", +] + [[package]] name = "tokio-util" version = "0.7.11" @@ -9543,7 +9940,7 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", - "pin-project-lite 0.2.14", + "pin-project-lite", "tokio", ] @@ -9610,7 +10007,8 @@ dependencies = [ "futures-core", "futures-util", "pin-project", - "pin-project-lite 0.2.14", + "pin-project-lite", + "tokio", "tower-layer", "tower-service", "tracing", @@ -9618,18 +10016,16 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.4" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ "bitflags 2.5.0", "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "pin-project-lite 0.2.14", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "pin-project-lite", "tower-layer", "tower-service", ] @@ -9653,7 +10049,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", - "pin-project-lite 0.2.14", + "pin-project-lite", "tracing-attributes", "tracing-core", ] @@ -9689,17 +10085,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -9711,65 +10096,32 @@ dependencies = [ "tracing-core", ] -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "ansi_term", - "chrono", - "lazy_static", - "matchers 0.0.1", - "parking_lot 0.11.2", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log 0.1.4", - "tracing-serde", -] - [[package]] name = "tracing-subscriber" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ - "matchers 0.1.0", + "matchers", "nu-ansi-term", "once_cell", + "parking_lot 0.12.3", "regex", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", ] [[package]] name = "trie-db" -version = "0.28.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff28e0f815c2fea41ebddf148e008b077d2faddb026c9555b29696114d602642" +checksum = "0c992b4f40c234a074d48a757efeabb1a6be88af84c0c23f7ca158950cb0ae7f" dependencies = [ "hash-db", - "hashbrown 0.13.2", "log", "rustc-hex", "smallvec", @@ -9793,7 +10145,7 @@ dependencies = [ "async-trait", "cfg-if", "data-encoding", - "enum-as-inner", + "enum-as-inner 0.5.1", "futures-channel", "futures-io", "futures-util", @@ -9810,24 +10162,50 @@ dependencies = [ "url", ] +[[package]] +name = "trust-dns-proto" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.6.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + [[package]] name = "trust-dns-resolver" -version = "0.22.0" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aff21aa4dcefb0a1afbfac26deb0adc93888c7d295fb63ab273ef276ba2b7cfe" +checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" dependencies = [ "cfg-if", "futures-util", "ipconfig", - "lazy_static", "lru-cache", + "once_cell", "parking_lot 0.12.3", + "rand", "resolv-conf", "smallvec", "thiserror", "tokio", "tracing", - "trust-dns-proto", + "trust-dns-proto 0.23.2", ] [[package]] @@ -9842,6 +10220,26 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f195fd851901624eee5a58c4bb2b4f06399148fcd0ed336e6f1cb60a9881df" +[[package]] +name = "tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 0.2.12", + "httparse", + "log", + "rand", + "rustls 0.21.12", + "sha1", + "thiserror", + "url", + "utf-8", +] + [[package]] name = "twox-hash" version = "1.6.3" @@ -9942,6 +10340,16 @@ dependencies = [ "futures-util", ] +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" +dependencies = [ + "bytes", + "tokio-util", +] + [[package]] name = "untrusted" version = "0.7.1" @@ -9965,6 +10373,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8parse" version = "0.2.2" @@ -10012,7 +10426,7 @@ dependencies = [ "digest 0.10.7", "rand", "rand_chacha", - "rand_core 0.6.4", + "rand_core", "sha2 0.10.8", "sha3", "thiserror", @@ -10038,12 +10452,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -10407,12 +10815,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.6" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "which" @@ -10501,6 +10906,21 @@ dependencies = [ "windows-targets 0.52.5", ] +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-sys" version = "0.45.0" @@ -10745,55 +11165,76 @@ dependencies = [ [[package]] name = "x25519-dalek" -version = "1.1.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ - "curve25519-dalek 3.2.0", - "rand_core 0.5.1", + "curve25519-dalek", + "rand_core", + "serde", "zeroize", ] [[package]] -name = "x25519-dalek" -version = "2.0.1" +name = "x509-parser" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da" dependencies = [ - "curve25519-dalek 4.1.3", - "rand_core 0.6.4", - "serde", - "zeroize", + "asn1-rs 0.5.2", + "data-encoding", + "der-parser 8.2.0", + "lazy_static", + "nom", + "oid-registry 0.6.1", + "rusticata-macros", + "thiserror", + "time", ] [[package]] name = "x509-parser" -version = "0.14.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ecbeb7b67ce215e40e3cc7f2ff902f94a223acf44995934763467e7b1febc8" +checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" dependencies = [ - "asn1-rs", - "base64 0.13.1", + "asn1-rs 0.6.2", "data-encoding", - "der-parser", + "der-parser 9.0.0", "lazy_static", "nom", - "oid-registry", + "oid-registry 0.7.1", "rusticata-macros", "thiserror", "time", ] +[[package]] +name = "xml-rs" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "539a77ee7c0de333dcc6da69b177380a0b81e0dacfa4f7344c465a36871ee601" + +[[package]] +name = "xmltree" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" +dependencies = [ + "xml-rs", +] + [[package]] name = "yamux" -version = "0.10.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d9ba232399af1783a58d8eb26f6b5006fbefe2dc9ef36bd283324792d03ea5" +checksum = "9ed0164ae619f2dc144909a9f082187ebb5893693d8c0196e8085283ccd4b776" dependencies = [ "futures", "log", "nohash-hasher", "parking_lot 0.12.3", + "pin-project", "rand", "static_assertions", ] diff --git a/Cargo.toml b/Cargo.toml index f9a7968b9..a185567c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,7 +60,7 @@ scale-info = { version = "2.11.2", default-features = false } serde = { version = "1.0.199", default-features = false } serde-tuple-vec-map = { version = "1.0.1", default-features = false } serde_bytes = { version = "0.11.14", default-features = false } -serde_json = { version = "1.0.116", default-features = false } +serde_json = { version = "1.0.121", default-features = false } serde_with = { version = "=2.0.0", default-features = false } smallvec = "1.13.2" litep2p = { git = "https://github.com/paritytech/litep2p", branch = "master" } @@ -77,81 +77,81 @@ walkdir = "2" subtensor-macros = { path = "support/macros" } -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-benchmarking-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -frame-executive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-metadata-hash-extension = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-try-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-benchmarking-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +frame-executive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-metadata-hash-extension = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-support = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-system-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +frame-try-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } -pallet-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-insecure-randomness-collective-flip = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +pallet-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-insecure-randomness-collective-flip = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } -sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-client-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-grandpa-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-chain-spec-derive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-chain-spec = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-slots = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-executor = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-network = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-service = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-telemetry = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-client-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-consensus-grandpa-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-chain-spec-derive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-chain-spec = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-consensus-slots = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-executor = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-network = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-service = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-telemetry = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-block-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-blockchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-genesis-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-inherents = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-session = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-storage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-tracing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-version = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-weights = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +sp-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-block-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-blockchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sp-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sp-genesis-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-core = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-inherents = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-io = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sp-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-session = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-std = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-storage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +sp-tracing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-version = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } +sp-weights = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1", default-features = false } -substrate-build-script-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +substrate-build-script-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } substrate-fixed = { git = "https://github.com/opentensor/substrate-fixed.git", tag = "v0.5.9" } -substrate-frame-rpc-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -substrate-wasm-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +substrate-frame-rpc-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } +substrate-wasm-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.15.2-rc1" } frame-metadata = "16" [profile.release] From e773f3f388730842a3db56a538f00adf7ba7232b Mon Sep 17 00:00:00 2001 From: Keith Date: Wed, 4 Sep 2024 09:24:17 -0500 Subject: [PATCH 021/213] Remove unused import --- pallets/commitments/src/tests.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/pallets/commitments/src/tests.rs b/pallets/commitments/src/tests.rs index 7449003f4..058b5faf0 100644 --- a/pallets/commitments/src/tests.rs +++ b/pallets/commitments/src/tests.rs @@ -1,6 +1,5 @@ #![allow(non_camel_case_types)] -use super::*; use crate as pallet_commitments; use frame_support::derive_impl; use frame_support::traits::ConstU64; From cfd4795487bdc2f3d6ac3701272293959765f935 Mon Sep 17 00:00:00 2001 From: Keith Date: Wed, 4 Sep 2024 10:15:57 -0500 Subject: [PATCH 022/213] Ignore rustls vulnerability --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 0c1b4a1cf..ac7b8e3ec 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -247,7 +247,7 @@ jobs: run: cargo install cargo-audit - name: cargo audit - run: cargo audit + run: cargo audit --ignore RUSTSEC-2024-0336 # rustls issue; wait for upstream to resolve this # runs cargo test --workspace cargo-test: From 6b89f64c4d14fa52f8e091696c0b00472ae7cafa Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Fri, 6 Sep 2024 22:26:59 +0800 Subject: [PATCH 023/213] bounded vec --- pallets/subtensor/src/lib.rs | 27 +++++++++++++++++++++--- pallets/subtensor/src/subnets/serving.rs | 8 +++---- pallets/subtensor/tests/serving.rs | 4 ++-- pallets/subtensor/tests/swap_hotkey.rs | 4 +--- pallets/subtensor/tests/uids.rs | 27 ++++++++++++++++++++---- 5 files changed, 53 insertions(+), 17 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 4f553ed7a..cc3d7d025 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -67,6 +67,7 @@ pub mod pallet { traits::{ tokens::fungible, OriginTrait, QueryPreimage, StorePreimage, UnfilteredDispatchable, }, + BoundedVec, }; use frame_system::pallet_prelude::*; use sp_core::H256; @@ -133,11 +134,31 @@ pub mod pallet { /// Struct for NeuronCertificate. pub type NeuronCertificateOf = NeuronCertificate; /// Data structure for NeuronCertificate information. - #[freeze_struct("e6193a76002d491")] + #[freeze_struct("1c232be200d9ec6c")] #[derive(Decode, Encode, Default, TypeInfo, PartialEq, Eq, Clone, Debug)] pub struct NeuronCertificate { - /// The neuron certificate. - pub certificate: Vec, + /// The neuron TLS public key + pub public_key: BoundedVec>, + /// The algorithm used to generate the public key + pub algorithm: u8, + } + + impl TryFrom> for NeuronCertificate { + type Error = (); + + fn try_from(value: Vec) -> Result { + if value.len() > 65 { + return Err(()); + } + // take the first byte as the algorithm + let algorithm = value.first().ok_or(())?; + // and the rest as the public_key + let certificate = value.get(1..).ok_or(())?.to_vec(); + Ok(Self { + public_key: BoundedVec::try_from(certificate).map_err(|_| ())?, + algorithm: *algorithm, + }) + } } /// Struct for Prometheus. diff --git a/pallets/subtensor/src/subnets/serving.rs b/pallets/subtensor/src/subnets/serving.rs index 22550fb93..7e2b9a0f0 100644 --- a/pallets/subtensor/src/subnets/serving.rs +++ b/pallets/subtensor/src/subnets/serving.rs @@ -92,11 +92,9 @@ impl Pallet { // Check certificate if let Some(certificate) = certificate { - NeuronCertificates::::insert( - netuid, - hotkey_id.clone(), - NeuronCertificate { certificate }, - ) + if let Ok(certificate) = NeuronCertificateOf::try_from(certificate) { + NeuronCertificates::::insert(netuid, hotkey_id.clone(), certificate) + } } // We insert the axon meta. diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index 17b8d2144..4516c3b05 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -131,7 +131,7 @@ fn test_serving_tls_ok() { let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) .expect("Certificate should exist"); - assert_eq!(stored_certificate.certificate, certificate); + assert_eq!(stored_certificate.public_key.clone().into_inner(), certificate.get(1..).expect("Certificate should exist")); let new_certificate = "UPDATED_CERT".as_bytes().to_vec(); assert_ok!(SubtensorModule::serve_axon_tls( <::RuntimeOrigin>::signed(hotkey_account_id), @@ -147,7 +147,7 @@ fn test_serving_tls_ok() { )); let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) .expect("Certificate should exist"); - assert_eq!(stored_certificate.certificate, new_certificate) + assert_eq!(stored_certificate.public_key.clone().into_inner(), new_certificate.get(1..).expect("Certificate should exist")); }); } diff --git a/pallets/subtensor/tests/swap_hotkey.rs b/pallets/subtensor/tests/swap_hotkey.rs index 845338be9..89938e3eb 100644 --- a/pallets/subtensor/tests/swap_hotkey.rs +++ b/pallets/subtensor/tests/swap_hotkey.rs @@ -319,9 +319,7 @@ fn test_swap_certificates() { let new_hotkey = U256::from(2); let coldkey = U256::from(3); let netuid = 0u16; - let certificate = NeuronCertificate { - certificate: vec![1, 2, 3], - }; + let certificate = NeuronCertificate::try_from(vec![1, 2, 3]).unwrap(); let mut weight = Weight::zero(); add_network(netuid, 0, 1); diff --git a/pallets/subtensor/tests/uids.rs b/pallets/subtensor/tests/uids.rs index 827d4ec1a..6b4c00328 100644 --- a/pallets/subtensor/tests/uids.rs +++ b/pallets/subtensor/tests/uids.rs @@ -1,7 +1,7 @@ #![allow(clippy::unwrap_used)] use crate::mock::*; -use frame_support::assert_ok; +use frame_support::{assert_err, assert_ok}; use frame_system::Config; use pallet_subtensor::*; use sp_core::U256; @@ -33,9 +33,7 @@ fn test_replace_neuron() { let new_hotkey_account_id = U256::from(2); let _new_colkey_account_id = U256::from(12345); - let certificate = NeuronCertificate { - certificate: vec![1, 2, 3], - }; + let certificate = NeuronCertificate::try_from(vec![1, 2, 3]).unwrap(); //add network add_network(netuid, tempo, 0); @@ -382,3 +380,24 @@ fn test_replace_neuron_multiple_subnets_unstake_all() { ); }); } + +#[test] +fn test_neuron_certificate() { + new_test_ext(1).execute_with(|| { + // 512 bits key + let mut data = [0; 65].to_vec(); + assert_ok!(NeuronCertificate::try_from(data)); + + // 256 bits key + data = [1; 33].to_vec(); + assert_ok!(NeuronCertificate::try_from(data)); + + // too much data + data = [8; 88].to_vec(); + assert_err!(NeuronCertificate::try_from(data), ()); + + // no data + data = vec![]; + assert_err!(NeuronCertificate::try_from(data), ()); + }); +} From 162453ecb636dc028a682428b2a8837dc3a5a3a4 Mon Sep 17 00:00:00 2001 From: Andreea Popescu Date: Mon, 9 Sep 2024 14:04:20 +0800 Subject: [PATCH 024/213] spec version and fmt --- pallets/subtensor/tests/serving.rs | 10 ++++++++-- runtime/src/lib.rs | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/pallets/subtensor/tests/serving.rs b/pallets/subtensor/tests/serving.rs index 4516c3b05..6bc30c76f 100644 --- a/pallets/subtensor/tests/serving.rs +++ b/pallets/subtensor/tests/serving.rs @@ -131,7 +131,10 @@ fn test_serving_tls_ok() { let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) .expect("Certificate should exist"); - assert_eq!(stored_certificate.public_key.clone().into_inner(), certificate.get(1..).expect("Certificate should exist")); + assert_eq!( + stored_certificate.public_key.clone().into_inner(), + certificate.get(1..).expect("Certificate should exist") + ); let new_certificate = "UPDATED_CERT".as_bytes().to_vec(); assert_ok!(SubtensorModule::serve_axon_tls( <::RuntimeOrigin>::signed(hotkey_account_id), @@ -147,7 +150,10 @@ fn test_serving_tls_ok() { )); let stored_certificate = NeuronCertificates::::get(netuid, hotkey_account_id) .expect("Certificate should exist"); - assert_eq!(stored_certificate.public_key.clone().into_inner(), new_certificate.get(1..).expect("Certificate should exist")); + assert_eq!( + stored_certificate.public_key.clone().into_inner(), + new_certificate.get(1..).expect("Certificate should exist") + ); }); } diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 0d952c2bb..884c9d85c 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -142,7 +142,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 196, + spec_version: 197, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, From e84da1546f2f83c6d4900264ae5b48517c156f41 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:01:26 -0400 Subject: [PATCH 025/213] import original script --- scripts/merged_script.sh | 102 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 scripts/merged_script.sh diff --git a/scripts/merged_script.sh b/scripts/merged_script.sh new file mode 100644 index 000000000..030fd8efa --- /dev/null +++ b/scripts/merged_script.sh @@ -0,0 +1,102 @@ +#!/bin/bash + + +usage() { + echo "Usage: $0 -v " + exit 1 +} + +while getopts ":v:" opt; do + case ${opt} in + v) + version=${OPTARG} + ;; + \?) + usage + ;; + esac +done + + +if [ -z "$version" ]; then + usage +fi + +echo "[INFO] Starting the process for version $version" + + +echo "[INFO] Building the project with wasm-pack" + +# Build command +wasm-pack build --release + +# Wasm Blob file path +artifact_path="pkg/my_wasm_project_bg.wasm" + + +if [ ! -f "$artifact_path" ]; then + echo "[ERROR] Artifact not found: $artifact_path" + exit 1 +fi + +echo "[INFO] Artifact found at $artifact_path" + +git checkout main + +git fetch origin --tags + +latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1)) + +if [ -z "$latest_tag" ]; then + echo "No tags found in the repository." + exit 1 +fi + +commits=$(git log ${latest_tag}..main --pretty=format:"%H %ci %s" --reverse) + +# List PRs merged since the last release +pr_list=() +while read -r hash date time timezone message; do + if [[ $message =~ Merge\ pull\ request\ \#([0-9]+) ]]; then + pr_number=${BASH_REMATCH[1]} + pr_list+=("$pr_number") + fi +done <<< "$commits" + + +if [ ${#pr_list[@]} -eq 0 ]; then + echo "[ERROR] No PRs found since the last release" + exit 1 +fi + +echo -e "[INFO] PRs found: ${pr_list[*]}" + + +get_pr_title() { + local pr_number=$1 + gh pr view "$pr_number" --json title --jq '.title' +} + + +formatted_pr_list="PR numbers and their titles merged into main since the last tag ($latest_tag):\n\n" + +for pr_number in "${pr_list[@]}"; do + pr_title=$(get_pr_title "$pr_number") + formatted_pr_list+="* PR #${pr_number} - ${pr_title}\n" +done + + +formatted_pr_list=$(printf "%b" "$formatted_pr_list") + +echo -e "$formatted_pr_list" + + +echo "[INFO] Creating a new release with version $version and uploading the artifact" +if gh release create "$version" "$artifact_path" --title "$version" --notes "$formatted_pr_list"; then + echo "[INFO] Release created successfully" +else + echo "[ERROR] Failed to create the release" + exit 1 +fi + +echo "[INFO] Release $version created and tagged successfully." From 5810aa40325ef0f6bfbce2da86241dfdd6b71988 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:09:58 -0400 Subject: [PATCH 026/213] scaffold --- scripts/release_notes.rs | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100755 scripts/release_notes.rs diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs new file mode 100755 index 000000000..c9cb6e7b9 --- /dev/null +++ b/scripts/release_notes.rs @@ -0,0 +1,6 @@ +#!/usr/bin/env rust-script +// ^ `cargo install rust-script` to be able to run this script + +fn main() { + println!("hello world"); +} From e8562384af195f7bda880c7bcf4fa1dadc21314b Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:19:30 -0400 Subject: [PATCH 027/213] parsing of previous tag --- scripts/release_notes.rs | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index c9cb6e7b9..7277a1cb7 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -1,6 +1,24 @@ #!/usr/bin/env rust-script // ^ `cargo install rust-script` to be able to run this script +use core::fmt::Display; +use std::{env, process::Command}; + +fn eval(cmd: impl Display) -> String { + let output = Command::new("sh") + .arg("-c") + .arg(cmd.to_string()) + .output() + .expect("failed to execute process"); + String::from_utf8(output.stdout).unwrap().trim().to_string() +} + fn main() { - println!("hello world"); + let previous_tag = env::var("PREVIOUS_TAG").unwrap_or_else(|_| { + eval("git describe --abbrev=0 --tags $(git rev-list --tags --skip=1 --max-count=1)") + }); + if previous_tag.is_empty() { + panic!("PREVIOUS_TAG is not specified or invalid"); + } + println!("Previous tag: {}", previous_tag); } From d8b566b27e1515974262f2342c49b73487e65107 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 15:26:02 -0400 Subject: [PATCH 028/213] parse network and all tags --- scripts/release_notes.rs | 38 ++++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 7277a1cb7..d3b42665c 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -1,7 +1,7 @@ #!/usr/bin/env rust-script // ^ `cargo install rust-script` to be able to run this script -use core::fmt::Display; +use core::{fmt::Display, str::FromStr}; use std::{env, process::Command}; fn eval(cmd: impl Display) -> String { @@ -13,12 +13,34 @@ fn eval(cmd: impl Display) -> String { String::from_utf8(output.stdout).unwrap().trim().to_string() } -fn main() { - let previous_tag = env::var("PREVIOUS_TAG").unwrap_or_else(|_| { - eval("git describe --abbrev=0 --tags $(git rev-list --tags --skip=1 --max-count=1)") - }); - if previous_tag.is_empty() { - panic!("PREVIOUS_TAG is not specified or invalid"); +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum Network { + Mainnet, + Testnet, +} + +impl FromStr for Network { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "mainnet" => Ok(Network::Mainnet), + "testnet" => Ok(Network::Testnet), + _ => Err(()), + } } - println!("Previous tag: {}", previous_tag); +} + +fn main() { + let network = env::var("NETWORK") + .unwrap_or_else(|_| "mainnet".to_string()) + .parse::() + .unwrap_or_else(|_| panic!("Invalid NETWORK value")); + println!("Network: {:?}", network); + + let all_tags = env::var("PREVIOUS_TAG") + .unwrap_or_else(|_| eval("git tag --sort=-creatordate")) + .split("\n") + .map(|s| s.trim().to_string()) + .collect::>(); } From 65b107eb372b3ba09aa072791d612136a0212780 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 16:03:41 -0400 Subject: [PATCH 029/213] resolve previous tag --- scripts/release_notes.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index d3b42665c..28279807e 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -43,4 +43,17 @@ fn main() { .split("\n") .map(|s| s.trim().to_string()) .collect::>(); + + let previous_tag = match network { + Network::Mainnet => all_tags + .iter() + .find(|tag| tag.starts_with("v") && !tag.ends_with("-pre-release")) + .expect("could not find a valid mainnet tag!"), + Network::Testnet => all_tags + .iter() + .find(|tag| tag.starts_with("v") && tag.ends_with("-pre-release")) + .expect("could not find a valid testnet tag!"), + }; + + println!("Previous Release Tag: {}", previous_tag); } From e4efa4f81f4a5931b50397cf38d997ec13d58172 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 16:50:00 -0400 Subject: [PATCH 030/213] narrow down to filtered merges --- scripts/release_notes.rs | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 28279807e..99f94a7c7 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -54,6 +54,32 @@ fn main() { .find(|tag| tag.starts_with("v") && tag.ends_with("-pre-release")) .expect("could not find a valid testnet tag!"), }; + println!("Previous release tag: {}", previous_tag); - println!("Previous Release Tag: {}", previous_tag); + println!(""); + println!( + "Generating release notes for all merges since {}...", + previous_tag, + ); + + let merges = eval(format!( + "git log --merges --pretty=format:'%s' {}..HEAD", + previous_tag + )) + .split("\n") + .map(|s| s.trim().to_string()) + .filter(|s| { + !s.is_empty() + && s.starts_with("Merge pull request #") + && !s.ends_with("from opentensor/devnet-ready") + && !s.ends_with("from opentensor/testnet-ready") + && !s.ends_with("from opentensor/devnet") + && !s.ends_with("from opentensor/testnet") + }) + .collect::>(); + + println!(""); + println!("Filtered merges:\n{}", merges.join("\n")); + + println!(""); } From 67e50b8980c6ba683d85c5bd9480447fccec6405 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 17:48:42 -0400 Subject: [PATCH 031/213] branch selection, fallible eval method --- scripts/release_notes.rs | 39 +++++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 99f94a7c7..54b5b3dec 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -4,13 +4,23 @@ use core::{fmt::Display, str::FromStr}; use std::{env, process::Command}; -fn eval(cmd: impl Display) -> String { +fn eval(cmd: impl Display, print: bool) -> Result { + if print { + println!("$ {}", cmd); + } let output = Command::new("sh") .arg("-c") .arg(cmd.to_string()) .output() .expect("failed to execute process"); - String::from_utf8(output.stdout).unwrap().trim().to_string() + if print { + println!("{}", String::from_utf8(output.stdout.clone()).unwrap()); + eprintln!("{}", String::from_utf8(output.stderr.clone()).unwrap()); + } + if !output.status.success() { + return Err(String::from_utf8(output.stderr).unwrap()); + } + Ok(String::from_utf8(output.stdout).unwrap().trim().to_string()) } #[derive(Copy, Clone, PartialEq, Eq, Debug)] @@ -39,7 +49,7 @@ fn main() { println!("Network: {:?}", network); let all_tags = env::var("PREVIOUS_TAG") - .unwrap_or_else(|_| eval("git tag --sort=-creatordate")) + .unwrap_or_else(|_| eval("git tag --sort=-creatordate", false).unwrap()) .split("\n") .map(|s| s.trim().to_string()) .collect::>(); @@ -56,16 +66,29 @@ fn main() { }; println!("Previous release tag: {}", previous_tag); + let branch = env::var("BRANCH").unwrap_or( + match network { + Network::Mainnet => "main", + Network::Testnet => "testnet", + } + .to_string(), + ); + println!("Branch: {}", branch); + eval(format!("git checkout {}", branch), true).unwrap(); + println!(""); println!( "Generating release notes for all merges since {}...", previous_tag, ); - - let merges = eval(format!( - "git log --merges --pretty=format:'%s' {}..HEAD", - previous_tag - )) + let merges = eval( + format!( + "git log --merges --pretty=format:'%s' {}..HEAD", + previous_tag + ), + false, + ) + .unwrap() .split("\n") .map(|s| s.trim().to_string()) .filter(|s| { From b7a46e62372ff16367bee2f668f8fe6436a8b16c Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 17:51:12 -0400 Subject: [PATCH 032/213] tweak --- scripts/release_notes.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 54b5b3dec..c990c9db4 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -76,7 +76,6 @@ fn main() { println!("Branch: {}", branch); eval(format!("git checkout {}", branch), true).unwrap(); - println!(""); println!( "Generating release notes for all merges since {}...", previous_tag, From 2d746cec73629e47763a1b3d7f8a3c44fc779a7c Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 18:47:09 -0400 Subject: [PATCH 033/213] WIP --- scripts/release_notes.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index c990c9db4..3192a7e27 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -68,13 +68,12 @@ fn main() { let branch = env::var("BRANCH").unwrap_or( match network { - Network::Mainnet => "main", - Network::Testnet => "testnet", + Network::Mainnet => "testnet", + Network::Testnet => "devnet", } .to_string(), ); println!("Branch: {}", branch); - eval(format!("git checkout {}", branch), true).unwrap(); println!( "Generating release notes for all merges since {}...", @@ -82,8 +81,9 @@ fn main() { ); let merges = eval( format!( - "git log --merges --pretty=format:'%s' {}..HEAD", - previous_tag + "git log --merges --pretty=format:'%s' {}..{}", + previous_tag, + branch // Replace HEAD with branch variable ), false, ) From d442ebd6c366316cfa4bf00b8fcfd85caef095e1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 09:52:56 -0400 Subject: [PATCH 034/213] get PR titles --- scripts/release_notes.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 3192a7e27..fec8c88df 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -104,4 +104,21 @@ fn main() { println!("Filtered merges:\n{}", merges.join("\n")); println!(""); + let pr_numbers = merges + .iter() + .map(|s| s.split(" ").collect::>()[3].trim_start_matches("#")) + .collect::>(); + println!("PR numbers:\n{}", pr_numbers.join("\n")); + + println!(""); + let pr_titles = pr_numbers + .iter() + .map(|pr_number| { + eval(format!("gh pr view {} --json title", pr_number), false) + .unwrap() + .trim() + .to_string() + }) + .collect::>(); + println!("PR titles:\n{}", pr_titles.join("\n")); } From 70a433039e127d335609bfd0fdd09522103b0392 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 10:35:15 -0400 Subject: [PATCH 035/213] resolve PR titles properly --- scripts/release_notes.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index fec8c88df..02d391593 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -108,17 +108,28 @@ fn main() { .iter() .map(|s| s.split(" ").collect::>()[3].trim_start_matches("#")) .collect::>(); - println!("PR numbers:\n{}", pr_numbers.join("\n")); + println!("PR numbers:\n{:?}", pr_numbers); println!(""); + println!("Fetching PR titles..."); let pr_titles = pr_numbers .iter() .map(|pr_number| { - eval(format!("gh pr view {} --json title", pr_number), false) + print!("#{}: ", pr_number); + let title = eval(format!("gh pr view {} --json title", pr_number), false) .unwrap() .trim() - .to_string() + .to_string(); + if !title.starts_with("{\"title\":\"") { + panic!("Malformed PR title: {}", title); + } + let title = title + .trim_start_matches("{\"title\":\"") + .trim_end_matches("\"}") + .trim() + .to_string(); + println!("{}", title); + title }) .collect::>(); - println!("PR titles:\n{}", pr_titles.join("\n")); } From 66cb0f5150cff8626214d2d297670349876f0332 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:25:37 -0400 Subject: [PATCH 036/213] authors working :tada: --- scripts/release_notes.rs | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 02d391593..72ca53ada 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -132,4 +132,38 @@ fn main() { title }) .collect::>(); + + println!(""); + println!("Fetching PR authors..."); + let pr_authors = pr_numbers + .iter() + .map(|pr_number| { + print!("#{}: ", pr_number); + let author = eval( + format!("gh pr view {} --json author | jq .author.login", pr_number), + false, + ) + .unwrap() + .trim() + .trim_start_matches("\"") + .trim_end_matches("\"") + .to_string(); + println!("{}", author); + author + }) + .collect::>(); + + println!(""); + println!("generated release notes:"); + let release_notes = "\n## What's Changed\n".to_string(); + let release_notes = release_notes + + &pr_numbers + .iter() + .zip(pr_titles.iter()) + .zip(pr_authors.iter()) + .map(|((pr_number, pr_title), pr_author)| { + format!("- {} in #{} by @{}\n", pr_title, pr_number, pr_author) + }) + .collect::(); + println!("{}", release_notes); } From ebdd014feebd28233f956549b33a6e6d323fdaa9 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:37:19 -0400 Subject: [PATCH 037/213] working --- scripts/release_notes.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 72ca53ada..90ef17868 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -82,8 +82,7 @@ fn main() { let merges = eval( format!( "git log --merges --pretty=format:'%s' {}..{}", - previous_tag, - branch // Replace HEAD with branch variable + branch, previous_tag, ), false, ) From 51bc0a6abd1aabe2499c0371b6ac04b460bb06e1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:58:36 -0400 Subject: [PATCH 038/213] write release notes to /tmp/release_notes.md --- scripts/release_notes.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 90ef17868..73cbde4d9 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -154,7 +154,7 @@ fn main() { println!(""); println!("generated release notes:"); - let release_notes = "\n## What's Changed\n".to_string(); + let release_notes = "## What's Changed\n".to_string(); let release_notes = release_notes + &pr_numbers .iter() @@ -165,4 +165,9 @@ fn main() { }) .collect::(); println!("{}", release_notes); + + println!(""); + println!("writing release notes to /tmp/release_notes.md"); + std::fs::write("/tmp/release_notes.md", release_notes).unwrap(); + println!("done!"); } From da11c43e9d59b0580f83e6cd246f58c10e1c15aa Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:13:06 -0400 Subject: [PATCH 039/213] automatically re-run deployment check when labels changed --- .github/workflows/check-devnet.yml | 2 ++ .github/workflows/check-finney.yml | 4 +++- .github/workflows/check-testnet.yml | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 3d7f17723..1a7ae4aa3 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,6 +3,8 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index 3e9fb5994..52600806d 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -3,6 +3,8 @@ name: Finney Deploy Check on: pull_request: branches: [finney, main] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always @@ -51,4 +53,4 @@ jobs: runtime-package: "node-subtensor-runtime" node-uri: "wss://entrypoint-finney.opentensor.ai:443" checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" \ No newline at end of file + extra-args: "--disable-spec-version-check --no-weight-warnings" diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index c18b45ac2..6c3ebcfdf 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -3,6 +3,8 @@ name: Testnet Deploy Check on: pull_request: branches: [testnet, testnet-ready] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always From 6cfc5c2c513212d11203a0a2aa5883bb80ee84d0 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:20:48 -0400 Subject: [PATCH 040/213] tweak --- .github/workflows/check-testnet.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 6c3ebcfdf..39847767b 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -13,7 +13,7 @@ jobs: check-spec-version: name: Check spec_version bump runs-on: SubtensorCI - if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-spec-version-bump') }} + if: ${{ github.event.pull_request.labels | contains('no-spec-version-bump') == false }} steps: - name: Dependencies run: | From 5b3390a792d55596d3df05a34259ee60e80d56c8 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:33:10 -0400 Subject: [PATCH 041/213] debug --- .github/workflows/check-testnet.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 39847767b..09963b152 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -10,6 +10,13 @@ env: CARGO_TERM_COLOR: always jobs: + debug-labels: + name: Debug Labels + runs-on: SubtensorCI + steps: + - name: Debug Labels + run: | + echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From 4ecbdcd74dcdc04ddbfd5e52f5374b7c15938a3e Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:36:15 -0400 Subject: [PATCH 042/213] bump CI From ccfb417dfe6b84f6b7744b573281cb1f8827e1fd Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:37:56 -0400 Subject: [PATCH 043/213] whoops --- .github/workflows/check-devnet.yml | 7 +++++++ .github/workflows/check-testnet.yml | 7 ------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 1a7ae4aa3..65a4c88dc 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -10,6 +10,13 @@ env: CARGO_TERM_COLOR: always jobs: + debug-labels: + name: Debug Labels + runs-on: SubtensorCI + steps: + - name: Debug Labels + run: | + echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 09963b152..39847767b 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -10,13 +10,6 @@ env: CARGO_TERM_COLOR: always jobs: - debug-labels: - name: Debug Labels - runs-on: SubtensorCI - steps: - - name: Debug Labels - run: | - echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From 72454a64b220f51d25e8df3143aaf7ddf01961ff Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:39:27 -0400 Subject: [PATCH 044/213] fix conditional --- .github/workflows/check-testnet.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 39847767b..6c3ebcfdf 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -13,7 +13,7 @@ jobs: check-spec-version: name: Check spec_version bump runs-on: SubtensorCI - if: ${{ github.event.pull_request.labels | contains('no-spec-version-bump') == false }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-spec-version-bump') }} steps: - name: Dependencies run: | From 2ca5ce5dd06492e3fe4559b596e35cbe031ff31b Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:39:45 -0400 Subject: [PATCH 045/213] remove label debug step --- .github/workflows/check-devnet.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 65a4c88dc..1a7ae4aa3 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -10,13 +10,6 @@ env: CARGO_TERM_COLOR: always jobs: - debug-labels: - name: Debug Labels - runs-on: SubtensorCI - steps: - - name: Debug Labels - run: | - echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From 6210795d43573ca1c223a097f76956cffa33c038 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:43:28 -0400 Subject: [PATCH 046/213] tweak triggers --- .github/workflows/check-devnet.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 1a7ae4aa3..b27542d94 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,8 +3,9 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] - pull_request_target: types: [labeled, unlabeled] + push: + branches: [devnet, devnet-ready] env: CARGO_TERM_COLOR: always From b61646fa50d7484f9f0db78dac5068513b5eedba Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:46:53 -0400 Subject: [PATCH 047/213] check on-push From dfbd6de262f20a923543d46fe88c7c9158d5a407 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:48:45 -0400 Subject: [PATCH 048/213] fix --- .github/workflows/check-devnet.yml | 4 +--- .github/workflows/check-finney.yml | 3 +-- .github/workflows/check-testnet.yml | 3 +-- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index b27542d94..2cb586348 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,9 +3,7 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] - types: [labeled, unlabeled] - push: - branches: [devnet, devnet-ready] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index 52600806d..665c9c8a9 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -3,8 +3,7 @@ name: Finney Deploy Check on: pull_request: branches: [finney, main] - pull_request_target: - types: [labeled, unlabeled] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 6c3ebcfdf..95277c94a 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -3,8 +3,7 @@ name: Testnet Deploy Check on: pull_request: branches: [testnet, testnet-ready] - pull_request_target: - types: [labeled, unlabeled] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always From 51935edf63fc589b4e3055ad4d1e4cb179e06909 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:49:43 -0400 Subject: [PATCH 049/213] remove merged_script.sh --- scripts/merged_script.sh | 102 --------------------------------------- 1 file changed, 102 deletions(-) delete mode 100644 scripts/merged_script.sh diff --git a/scripts/merged_script.sh b/scripts/merged_script.sh deleted file mode 100644 index 030fd8efa..000000000 --- a/scripts/merged_script.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash - - -usage() { - echo "Usage: $0 -v " - exit 1 -} - -while getopts ":v:" opt; do - case ${opt} in - v) - version=${OPTARG} - ;; - \?) - usage - ;; - esac -done - - -if [ -z "$version" ]; then - usage -fi - -echo "[INFO] Starting the process for version $version" - - -echo "[INFO] Building the project with wasm-pack" - -# Build command -wasm-pack build --release - -# Wasm Blob file path -artifact_path="pkg/my_wasm_project_bg.wasm" - - -if [ ! -f "$artifact_path" ]; then - echo "[ERROR] Artifact not found: $artifact_path" - exit 1 -fi - -echo "[INFO] Artifact found at $artifact_path" - -git checkout main - -git fetch origin --tags - -latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1)) - -if [ -z "$latest_tag" ]; then - echo "No tags found in the repository." - exit 1 -fi - -commits=$(git log ${latest_tag}..main --pretty=format:"%H %ci %s" --reverse) - -# List PRs merged since the last release -pr_list=() -while read -r hash date time timezone message; do - if [[ $message =~ Merge\ pull\ request\ \#([0-9]+) ]]; then - pr_number=${BASH_REMATCH[1]} - pr_list+=("$pr_number") - fi -done <<< "$commits" - - -if [ ${#pr_list[@]} -eq 0 ]; then - echo "[ERROR] No PRs found since the last release" - exit 1 -fi - -echo -e "[INFO] PRs found: ${pr_list[*]}" - - -get_pr_title() { - local pr_number=$1 - gh pr view "$pr_number" --json title --jq '.title' -} - - -formatted_pr_list="PR numbers and their titles merged into main since the last tag ($latest_tag):\n\n" - -for pr_number in "${pr_list[@]}"; do - pr_title=$(get_pr_title "$pr_number") - formatted_pr_list+="* PR #${pr_number} - ${pr_title}\n" -done - - -formatted_pr_list=$(printf "%b" "$formatted_pr_list") - -echo -e "$formatted_pr_list" - - -echo "[INFO] Creating a new release with version $version and uploading the artifact" -if gh release create "$version" "$artifact_path" --title "$version" --notes "$formatted_pr_list"; then - echo "[INFO] Release created successfully" -else - echo "[ERROR] Failed to create the release" - exit 1 -fi - -echo "[INFO] Release $version created and tagged successfully." From 85e8443c7e2c57739d02eca9b77cfaf3d52a8b48 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 17:40:47 -0400 Subject: [PATCH 050/213] ignore manual_inspect clippy error --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index f9a7968b9..4f162ca6b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,6 +41,7 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" +manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From 90ba835222708d5829bb9541e401754678976566 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 17:43:14 -0400 Subject: [PATCH 051/213] fix workspace lint error --- build.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.rs b/build.rs index 10cac0ea7..4739a8aca 100644 --- a/build.rs +++ b/build.rs @@ -75,7 +75,9 @@ fn collect_rust_files(dir: &Path) -> Vec { let mut rust_files = Vec::new(); for entry in WalkDir::new(dir) { - let entry = entry.unwrap(); + let Ok(entry) = entry else { + continue; + }; let path = entry.path(); // Skip any path that contains "target" directory From f37d9c1ba89937cdc06754b035c17c7b856924eb Mon Sep 17 00:00:00 2001 From: Liam Date: Wed, 11 Sep 2024 11:30:06 +0200 Subject: [PATCH 052/213] fix node dep on runtime-benchmarks --- Dockerfile | 2 +- justfile | 2 +- node/src/service.rs | 10 +++++----- scripts/build.sh | 3 +-- scripts/localnet.sh | 12 ++++++------ 5 files changed, 14 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2dd2e2370..9edb2749b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ COPY . /build WORKDIR /build # Build the project -RUN cargo build -p node-subtensor --profile production --features="runtime-benchmarks metadata-hash" --locked +RUN cargo build -p node-subtensor --profile production --features="metadata-hash" --locked # Verify the binary was produced RUN test -e /build/target/production/node-subtensor diff --git a/justfile b/justfile index f99f3913a..a753f7bb2 100644 --- a/justfile +++ b/justfile @@ -51,4 +51,4 @@ lint: production: @echo "Running cargo build with metadata-hash generation..." - cargo +{{RUSTV}} build --profile production --features="runtime-benchmarks metadata-hash" + cargo +{{RUSTV}} build --profile production --features="metadata-hash" diff --git a/node/src/service.rs b/node/src/service.rs index 9a19ae354..074530dff 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -36,12 +36,12 @@ impl HostFunctions for ExecutorDispatch { } impl sc_executor::NativeExecutionDispatch for ExecutorDispatch { - // Only enable the benchmarking host functions when we actually want to benchmark. - #[cfg(feature = "runtime-benchmarks")] + // Always enable runtime benchmark host functions, the genesis state + // was built with them so we're stuck with them forever. + // + // They're just a noop, never actually get used if the runtime was not compiled with + // `runtime-benchmarks`. type ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions; - // Otherwise we only use the default Substrate host functions. - #[cfg(not(feature = "runtime-benchmarks"))] - type ExtendHostFunctions = (); fn dispatch(method: &str, data: &[u8]) -> Option> { node_subtensor_runtime::api::dispatch(method, data) diff --git a/scripts/build.sh b/scripts/build.sh index 3f588a1cc..b64c177d6 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -1,2 +1 @@ -cargo build --profile production --features "runtime-benchmarks metadata-hash" - +cargo build --profile production --features "metadata-hash" diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 850a314d8..582d0478d 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -3,10 +3,10 @@ # Check if `--no-purge` passed as a parameter NO_PURGE=0 for arg in "$@"; do - if [ "$arg" = "--no-purge" ]; then - NO_PURGE=1 - break - fi + if [ "$arg" = "--no-purge" ]; then + NO_PURGE=1 + break + fi done # Determine the directory this script resides in. This allows invoking it from any location. @@ -25,13 +25,13 @@ if [ "$fast_blocks" == "False" ]; then echo "fast_blocks is Off" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" - : "${FEATURES:="pow-faucet runtime-benchmarks"}" + : "${FEATURES:="pow-faucet"}" else # Block of code to execute if fast_blocks is not False echo "fast_blocks is On" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" - : "${FEATURES:="pow-faucet runtime-benchmarks fast-blocks"}" + : "${FEATURES:="pow-faucet fast-blocks"}" fi SPEC_PATH="${SCRIPT_DIR}/specs/" From 13c7adabeacb9540e8556f121a36ba0946e99eea Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 16 Sep 2024 12:24:07 -0400 Subject: [PATCH 053/213] add pallet index lint --- support/linting/src/lib.rs | 2 + support/linting/src/pallet_index.rs | 128 ++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+) create mode 100644 support/linting/src/pallet_index.rs diff --git a/support/linting/src/lib.rs b/support/linting/src/lib.rs index d02a70a2b..e5416c1d5 100644 --- a/support/linting/src/lib.rs +++ b/support/linting/src/lib.rs @@ -1,6 +1,8 @@ pub mod lint; pub use lint::*; +mod pallet_index; mod require_freeze_struct; +pub use pallet_index::RequireExplicitPalletIndex; pub use require_freeze_struct::RequireFreezeStruct; diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs new file mode 100644 index 000000000..2b80f7d57 --- /dev/null +++ b/support/linting/src/pallet_index.rs @@ -0,0 +1,128 @@ +use super::*; +use syn::parse::{Parse, ParseStream}; +use syn::punctuated::Punctuated; +use syn::token::Colon; +use syn::visit::Visit; +use syn::{File, ItemMacro, Token}; + +pub struct RequireExplicitPalletIndex; + +impl Lint for RequireExplicitPalletIndex { + fn lint(source: &File) -> Result { + let mut visitor = ConstructRuntimeVisitor::default(); + + visitor.visit_file(source); + + if !visitor.errors.is_empty() { + return Err(visitor.errors); + } + + Ok(()) + } +} + +#[derive(Default)] +struct ConstructRuntimeVisitor { + errors: Vec, +} + +impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { + fn visit_item_macro(&mut self, node: &'ast ItemMacro) { + if node.mac.path.is_ident("construct_runtime") { + let tokens = node.mac.tokens.clone(); + if let Ok(runtime_entries) = syn::parse2::(tokens) { + for entry in runtime_entries.entries { + if entry.index.is_none() { + self.errors.push(syn::Error::new( + entry.pallet_name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + entry.pallet_name + ), + )); + } + } + } + } + + syn::visit::visit_item_macro(self, node); + } +} + +struct ConstructRuntimeEntries { + entries: Punctuated, +} + +impl Parse for ConstructRuntimeEntries { + fn parse(input: ParseStream) -> syn::Result { + Ok(ConstructRuntimeEntries { + entries: input.parse_terminated(PalletEntry::parse, Token![,])?, + }) + } +} + +struct PalletEntry { + pallet_name: syn::Ident, + index: Option, +} + +impl Parse for PalletEntry { + fn parse(input: ParseStream) -> syn::Result { + let pallet_name: syn::Ident = input.parse()?; + let index = if input.peek(Colon) { + input.parse::()?; + Some(input.parse::()?) + } else { + None + }; + Ok(PalletEntry { pallet_name, index }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn lint_macro(input: &str) -> Result { + let item_macro: ItemMacro = syn::parse_str(input).expect("should only use on a macro"); + let mut visitor = ConstructRuntimeVisitor::default(); + visitor.visit_item_macro(&item_macro); + if !visitor.errors.is_empty() { + return Err(visitor.errors); + } + Ok(()) + } + + #[test] + fn test_no_pallet_index() { + let input = r#" + construct_runtime!( + PalletA, + PalletB + ); + "#; + assert!(lint_macro(input).is_err()); + } + + #[test] + fn test_with_pallet_index() { + let input = r#" + construct_runtime!( + PalletA: 0, + PalletB: 1 + ); + "#; + assert!(lint_macro(input).is_ok()); + } + + #[test] + fn test_mixed_pallet_index() { + let input = r#" + construct_runtime!( + PalletA, + PalletB: 1 + ); + "#; + assert!(lint_macro(input).is_err()); + } +} From 3fd76968e99d2c87b6ada99923c70f253fd506d6 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 17 Sep 2024 01:12:22 -0400 Subject: [PATCH 054/213] WIP --- build.rs | 1 + support/linting/src/pallet_index.rs | 116 ++++++++++++++++++++++++---- 2 files changed, 100 insertions(+), 17 deletions(-) diff --git a/build.rs b/build.rs index 4739a8aca..85388fd6c 100644 --- a/build.rs +++ b/build.rs @@ -60,6 +60,7 @@ fn main() { }; track_lint(RequireFreezeStruct::lint(&parsed_file)); + track_lint(RequireExplicitPalletIndex::lint(&parsed_file)); }); // Collect and print all errors after the parallel processing is done diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 2b80f7d57..6e0633d51 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -1,9 +1,12 @@ use super::*; +use quote::ToTokens; +use syn::braced; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; +use syn::spanned::Spanned; use syn::token::Colon; use syn::visit::Visit; -use syn::{File, ItemMacro, Token}; +use syn::{File, Ident, ItemMacro, Path, Token, Visibility}; pub struct RequireExplicitPalletIndex; @@ -29,22 +32,24 @@ struct ConstructRuntimeVisitor { impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { fn visit_item_macro(&mut self, node: &'ast ItemMacro) { if node.mac.path.is_ident("construct_runtime") { + // Token stream parsing logic let tokens = node.mac.tokens.clone(); - if let Ok(runtime_entries) = syn::parse2::(tokens) { - for entry in runtime_entries.entries { - if entry.index.is_none() { - self.errors.push(syn::Error::new( - entry.pallet_name.span(), - format!( - "Pallet `{}` does not have an explicit index in construct_runtime!", - entry.pallet_name - ), - )); - } + let runtime_entries = syn::parse2::(tokens).unwrap(); + for entry in runtime_entries.entries { + // Check if the entry is missing an explicit index + if entry.index.is_none() { + self.errors.push(syn::Error::new( + entry.pallet_name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + entry.pallet_name.to_token_stream().to_string().trim() + ), + )); } } } + // Continue visiting the rest of the file syn::visit::visit_item_macro(self, node); } } @@ -62,20 +67,64 @@ impl Parse for ConstructRuntimeEntries { } struct PalletEntry { - pallet_name: syn::Ident, - index: Option, + visibility: Option, + pallet_name: Path, + components: Option, + index: Option, // Now index can be None (i.e., missing) } impl Parse for PalletEntry { fn parse(input: ParseStream) -> syn::Result { - let pallet_name: syn::Ident = input.parse()?; + // Optionally parse visibility (e.g., `pub`) + let visibility: Option = input.parse().ok(); + + // Parse the pallet name (with possible generics and paths like `pallet_collective::::{ Pallet, Call, Storage }`) + let pallet_name = parse_complex_pallet_path(input)?; + + // Optionally parse the index if it's present let index = if input.peek(Colon) { input.parse::()?; Some(input.parse::()?) } else { - None + None // Missing index is allowed during parsing }; - Ok(PalletEntry { pallet_name, index }) + + Ok(PalletEntry { + visibility, + pallet_name, + components: None, // Components will be handled directly in `parse_complex_pallet_path` + index, + }) + } +} + +fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { + let mut path = Path::parse_mod_style(input)?; + + // Check if there are generics like `::` + if input.peek(syn::token::Lt) { + let _generics: syn::AngleBracketedGenericArguments = input.parse()?; + } + + // Now check for nested components in `{ Pallet, Call, Storage }` + if input.peek(syn::token::Brace) { + let content; + braced!(content in input); + let _: Punctuated = content.parse_terminated(Ident::parse, Token![,])?; + } + + Ok(path) +} + +struct PalletComponents { + components: Punctuated, +} + +impl Parse for PalletComponents { + fn parse(input: ParseStream) -> syn::Result { + Ok(PalletComponents { + components: input.parse_terminated(Ident::parse, Token![,])?, + }) } } @@ -125,4 +174,37 @@ mod tests { "#; assert!(lint_macro(input).is_err()); } + + #[test] + fn test_with_visibility_and_index() { + let input = r#" + construct_runtime!( + pub PalletA: 0, + PalletB: 1 + ); + "#; + assert!(lint_macro(input).is_ok()); + } + + #[test] + fn test_with_generic_and_index() { + let input = r#" + construct_runtime!( + PalletA, + pallet_collective::::{ Pallet, Call, Storage }: 1 + ); + "#; + assert!(lint_macro(input).is_ok()); + } + + #[test] + fn test_with_nested_and_missing_index() { + let input = r#" + construct_runtime!( + PalletA, + pallet_collective::::{ Pallet, Call, Storage } + ); + "#; + assert!(lint_macro(input).is_err()); + } } From b6c7165538fc2f40895386475e1cf0ac771d4974 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 17 Sep 2024 15:44:12 -0400 Subject: [PATCH 055/213] silence warnings --- support/linting/src/pallet_index.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 6e0633d51..9b6723a49 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -67,9 +67,9 @@ impl Parse for ConstructRuntimeEntries { } struct PalletEntry { - visibility: Option, + _visibility: Option, pallet_name: Path, - components: Option, + _components: Option, index: Option, // Now index can be None (i.e., missing) } @@ -90,16 +90,16 @@ impl Parse for PalletEntry { }; Ok(PalletEntry { - visibility, + _visibility: visibility, pallet_name, - components: None, // Components will be handled directly in `parse_complex_pallet_path` + _components: None, // Components will be handled directly in `parse_complex_pallet_path` index, }) } } fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { - let mut path = Path::parse_mod_style(input)?; + let path = Path::parse_mod_style(input)?; // Check if there are generics like `::` if input.peek(syn::token::Lt) { @@ -117,13 +117,13 @@ fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { } struct PalletComponents { - components: Punctuated, + _components: Punctuated, } impl Parse for PalletComponents { fn parse(input: ParseStream) -> syn::Result { Ok(PalletComponents { - components: input.parse_terminated(Ident::parse, Token![,])?, + _components: input.parse_terminated(Ident::parse, Token![,])?, }) } } From 5b24374f697cd958e2a480725308680b39a27483 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 01:13:01 -0400 Subject: [PATCH 056/213] checkpoint --- support/linting/src/pallet_index.rs | 50 +++++++++++++++++++++++++---- 1 file changed, 44 insertions(+), 6 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 9b6723a49..9f77b4582 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -34,6 +34,7 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { if node.mac.path.is_ident("construct_runtime") { // Token stream parsing logic let tokens = node.mac.tokens.clone(); + println!("{}", tokens.to_string()); let runtime_entries = syn::parse2::(tokens).unwrap(); for entry in runtime_entries.entries { // Check if the entry is missing an explicit index @@ -78,7 +79,7 @@ impl Parse for PalletEntry { // Optionally parse visibility (e.g., `pub`) let visibility: Option = input.parse().ok(); - // Parse the pallet name (with possible generics and paths like `pallet_collective::::{ Pallet, Call, Storage }`) + // Parse the pallet name (handling complex paths with generics and nested components) let pallet_name = parse_complex_pallet_path(input)?; // Optionally parse the index if it's present @@ -92,25 +93,29 @@ impl Parse for PalletEntry { Ok(PalletEntry { _visibility: visibility, pallet_name, - _components: None, // Components will be handled directly in `parse_complex_pallet_path` + _components: None, // Components will be handled in `parse_complex_pallet_path` index, }) } } fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { - let path = Path::parse_mod_style(input)?; + // Start by parsing the base path (pallet name) + let mut path = input.parse::()?; - // Check if there are generics like `::` + // If there are generics like `::`, handle them if input.peek(syn::token::Lt) { let _generics: syn::AngleBracketedGenericArguments = input.parse()?; } - // Now check for nested components in `{ Pallet, Call, Storage }` + // Now handle nested components like `{ Pallet, Call, Storage }` if input.peek(syn::token::Brace) { let content; braced!(content in input); - let _: Punctuated = content.parse_terminated(Ident::parse, Token![,])?; + let components: Punctuated = + content.parse_terminated(Ident::parse, Token![,])?; + + // We can attach the components to the path, if necessary, or validate them separately. } Ok(path) @@ -207,4 +212,37 @@ mod tests { "#; assert!(lint_macro(input).is_err()); } + + #[test] + fn test_complex_construct_runtime() { + let input = r#" + pub struct Runtime { + System : frame_system = 0, + RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, + Timestamp : pallet_timestamp = 2, + Aura : pallet_aura = 3, + Grandpa : pallet_grandpa = 4, + Balances : pallet_balances = 5, + TransactionPayment : pallet_transaction_payment = 6, + SubtensorModule : pallet_subtensor = 7, + Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, + TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, + SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, + Utility : pallet_utility = 11, + Sudo : pallet_sudo = 12, + Multisig : pallet_multisig = 13, + Preimage : pallet_preimage = 14, + Scheduler : pallet_scheduler = 15, + Proxy : pallet_proxy = 16, + Registry : pallet_registry = 17, + Commitments : pallet_commitments = 18, + AdminUtils : pallet_admin_utils = 19, + SafeMode : pallet_safe_mode = 20 + } + "#; + + // Call the lint function on this input to ensure it parses correctly + let result = lint_macro(input); + assert!(result.is_ok()); + } } From ca3c9f4c9be18fadb1342a1d8f2f49212c01299a Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 01:34:21 -0400 Subject: [PATCH 057/213] closer --- support/linting/src/pallet_index.rs | 107 ++++++++++++++++++---------- 1 file changed, 71 insertions(+), 36 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 9f77b4582..b0ae7d177 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -34,19 +34,27 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { if node.mac.path.is_ident("construct_runtime") { // Token stream parsing logic let tokens = node.mac.tokens.clone(); - println!("{}", tokens.to_string()); - let runtime_entries = syn::parse2::(tokens).unwrap(); - for entry in runtime_entries.entries { - // Check if the entry is missing an explicit index - if entry.index.is_none() { - self.errors.push(syn::Error::new( - entry.pallet_name.span(), - format!( - "Pallet `{}` does not have an explicit index in construct_runtime!", - entry.pallet_name.to_token_stream().to_string().trim() - ), - )); + + // Try parsing as runtime entries + if let Ok(runtime_entries) = syn::parse2::(tokens) { + for entry in runtime_entries.entries { + // Check if the entry is missing an explicit index + if entry.index.is_none() { + self.errors.push(syn::Error::new( + entry.pallet_name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + entry.pallet_name.to_token_stream().to_string().trim() + ), + )); + } } + } else { + // Handle other cases, e.g., enum/struct definitions inside construct_runtime + self.errors.push(syn::Error::new( + node.mac.span(), + "Failed to parse construct_runtime!", + )); } } @@ -214,31 +222,58 @@ mod tests { } #[test] - fn test_complex_construct_runtime() { + fn test_complex_construct_runtime_enum_should_fail() { + // This test should fail because there are no explicit indices for the pallets let input = r#" - pub struct Runtime { - System : frame_system = 0, - RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, - Timestamp : pallet_timestamp = 2, - Aura : pallet_aura = 3, - Grandpa : pallet_grandpa = 4, - Balances : pallet_balances = 5, - TransactionPayment : pallet_transaction_payment = 6, - SubtensorModule : pallet_subtensor = 7, - Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, - TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, - SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, - Utility : pallet_utility = 11, - Sudo : pallet_sudo = 12, - Multisig : pallet_multisig = 13, - Preimage : pallet_preimage = 14, - Scheduler : pallet_scheduler = 15, - Proxy : pallet_proxy = 16, - Registry : pallet_registry = 17, - Commitments : pallet_commitments = 18, - AdminUtils : pallet_admin_utils = 19, - SafeMode : pallet_safe_mode = 20 - } + construct_runtime! { + pub enum Test { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, + Utility: pallet_utility::{Pallet, Call, Storage, Event}, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, + } + } + "#; + + // This should fail because there are no explicit indices + let result = lint_macro(input); + assert!(result.is_err()); + } + + #[test] + fn test_complex_construct_runtime_struct() { + let input = r#" + construct_runtime! { + pub struct Runtime { + System : frame_system = 0, + RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, + Timestamp : pallet_timestamp = 2, + Aura : pallet_aura = 3, + Grandpa : pallet_grandpa = 4, + Balances : pallet_balances = 5, + TransactionPayment : pallet_transaction_payment = 6, + SubtensorModule : pallet_subtensor = 7, + Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, + TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, + SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, + Utility : pallet_utility = 11, + Sudo : pallet_sudo = 12, + Multisig : pallet_multisig = 13, + Preimage : pallet_preimage = 14, + Scheduler : pallet_scheduler = 15, + Proxy : pallet_proxy = 16, + Registry : pallet_registry = 17, + Commitments : pallet_commitments = 18, + AdminUtils : pallet_admin_utils = 19, + SafeMode : pallet_safe_mode = 20 + } + } "#; // Call the lint function on this input to ensure it parses correctly From d49581250e753c22bb2bf4422efb94db592650c7 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 10:47:01 -0400 Subject: [PATCH 058/213] still 2 test failures --- support/linting/src/pallet_index.rs | 42 +++++++++++++++-------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index b0ae7d177..57cc4e46f 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -36,7 +36,8 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { let tokens = node.mac.tokens.clone(); // Try parsing as runtime entries - if let Ok(runtime_entries) = syn::parse2::(tokens) { + let result = syn::parse2::(tokens); + if let Ok(runtime_entries) = result { for entry in runtime_entries.entries { // Check if the entry is missing an explicit index if entry.index.is_none() { @@ -51,10 +52,7 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { } } else { // Handle other cases, e.g., enum/struct definitions inside construct_runtime - self.errors.push(syn::Error::new( - node.mac.span(), - "Failed to parse construct_runtime!", - )); + self.errors.push(result.unwrap_err()); } } @@ -63,6 +61,7 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { } } +#[derive(Debug)] struct ConstructRuntimeEntries { entries: Punctuated, } @@ -75,6 +74,7 @@ impl Parse for ConstructRuntimeEntries { } } +#[derive(Debug)] struct PalletEntry { _visibility: Option, pallet_name: Path, @@ -87,6 +87,11 @@ impl Parse for PalletEntry { // Optionally parse visibility (e.g., `pub`) let visibility: Option = input.parse().ok(); + // Handle 'struct' keyword if present + if input.peek(Token![struct]) { + let _: Token![struct] = input.parse()?; + } + // Parse the pallet name (handling complex paths with generics and nested components) let pallet_name = parse_complex_pallet_path(input)?; @@ -109,26 +114,25 @@ impl Parse for PalletEntry { fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { // Start by parsing the base path (pallet name) - let mut path = input.parse::()?; + let path = input.parse::()?; // If there are generics like `::`, handle them if input.peek(syn::token::Lt) { - let _generics: syn::AngleBracketedGenericArguments = input.parse()?; + let _: syn::AngleBracketedGenericArguments = input.parse()?; } // Now handle nested components like `{ Pallet, Call, Storage }` if input.peek(syn::token::Brace) { let content; braced!(content in input); - let components: Punctuated = + let _components: Punctuated = content.parse_terminated(Ident::parse, Token![,])?; - - // We can attach the components to the path, if necessary, or validate them separately. } Ok(path) } +#[derive(Debug)] struct PalletComponents { _components: Punctuated, } @@ -163,7 +167,7 @@ mod tests { PalletB ); "#; - assert!(lint_macro(input).is_err()); + lint_macro(input).unwrap_err(); } #[test] @@ -174,7 +178,7 @@ mod tests { PalletB: 1 ); "#; - assert!(lint_macro(input).is_ok()); + lint_macro(input).unwrap(); } #[test] @@ -185,7 +189,7 @@ mod tests { PalletB: 1 ); "#; - assert!(lint_macro(input).is_err()); + lint_macro(input).unwrap_err(); } #[test] @@ -196,7 +200,7 @@ mod tests { PalletB: 1 ); "#; - assert!(lint_macro(input).is_ok()); + lint_macro(input).unwrap(); } #[test] @@ -207,7 +211,7 @@ mod tests { pallet_collective::::{ Pallet, Call, Storage }: 1 ); "#; - assert!(lint_macro(input).is_ok()); + lint_macro(input).unwrap(); } #[test] @@ -218,7 +222,7 @@ mod tests { pallet_collective::::{ Pallet, Call, Storage } ); "#; - assert!(lint_macro(input).is_err()); + lint_macro(input).unwrap_err(); } #[test] @@ -242,8 +246,7 @@ mod tests { "#; // This should fail because there are no explicit indices - let result = lint_macro(input); - assert!(result.is_err()); + lint_macro(input).unwrap_err(); } #[test] @@ -277,7 +280,6 @@ mod tests { "#; // Call the lint function on this input to ensure it parses correctly - let result = lint_macro(input); - assert!(result.is_ok()); + lint_macro(input).unwrap(); } } From 11ca35fd8a49528a39878ca0b2babfdd435c7192 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 11:46:30 -0400 Subject: [PATCH 059/213] close --- support/linting/src/pallet_index.rs | 33 +++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 57cc4e46f..60c0b26d7 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -34,11 +34,13 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { if node.mac.path.is_ident("construct_runtime") { // Token stream parsing logic let tokens = node.mac.tokens.clone(); + println!("Parsing construct_runtime! tokens: {}", tokens.to_string()); // Try parsing as runtime entries let result = syn::parse2::(tokens); if let Ok(runtime_entries) = result { for entry in runtime_entries.entries { + println!("Parsed entry: {:?}", entry); // Check if the entry is missing an explicit index if entry.index.is_none() { self.errors.push(syn::Error::new( @@ -51,7 +53,8 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { } } } else { - // Handle other cases, e.g., enum/struct definitions inside construct_runtime + // Print out the error and where it failed + println!("Failed to parse construct_runtime! block: {:?}", result); self.errors.push(result.unwrap_err()); } } @@ -68,9 +71,10 @@ struct ConstructRuntimeEntries { impl Parse for ConstructRuntimeEntries { fn parse(input: ParseStream) -> syn::Result { - Ok(ConstructRuntimeEntries { - entries: input.parse_terminated(PalletEntry::parse, Token![,])?, - }) + println!("Parsing ConstructRuntimeEntries"); + let entries = input.parse_terminated(PalletEntry::parse, Token![,])?; + println!("Parsed entries: {:?}", entries); + Ok(ConstructRuntimeEntries { entries }) } } @@ -86,20 +90,26 @@ impl Parse for PalletEntry { fn parse(input: ParseStream) -> syn::Result { // Optionally parse visibility (e.g., `pub`) let visibility: Option = input.parse().ok(); + println!("Parsed visibility: {:?}", visibility); // Handle 'struct' keyword if present if input.peek(Token![struct]) { let _: Token![struct] = input.parse()?; + println!("Parsed 'struct' keyword"); } // Parse the pallet name (handling complex paths with generics and nested components) let pallet_name = parse_complex_pallet_path(input)?; + println!("Parsed pallet name: {:?}", pallet_name); // Optionally parse the index if it's present let index = if input.peek(Colon) { input.parse::()?; - Some(input.parse::()?) + let index = input.parse::()?; + println!("Parsed index: {:?}", index); + Some(index) } else { + println!("No index found"); None // Missing index is allowed during parsing }; @@ -114,19 +124,22 @@ impl Parse for PalletEntry { fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { // Start by parsing the base path (pallet name) - let path = input.parse::()?; + let mut path = input.parse::()?; + println!("Parsed base path: {:?}", path); // If there are generics like `::`, handle them if input.peek(syn::token::Lt) { - let _: syn::AngleBracketedGenericArguments = input.parse()?; + let generics: syn::AngleBracketedGenericArguments = input.parse()?; + println!("Parsed generics: {:?}", generics); } // Now handle nested components like `{ Pallet, Call, Storage }` if input.peek(syn::token::Brace) { let content; braced!(content in input); - let _components: Punctuated = + let components: Punctuated = content.parse_terminated(Ident::parse, Token![,])?; + println!("Parsed components: {:?}", components); } Ok(path) @@ -139,8 +152,10 @@ struct PalletComponents { impl Parse for PalletComponents { fn parse(input: ParseStream) -> syn::Result { + let components = input.parse_terminated(Ident::parse, Token![,])?; + println!("Parsed components: {:?}", components); Ok(PalletComponents { - _components: input.parse_terminated(Ident::parse, Token![,])?, + _components: components, }) } } From 57eb7508463efbc0bbb6c5fea833968c85ee6f27 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 12:32:30 -0400 Subject: [PATCH 060/213] WIP --- support/linting/src/pallet_index.rs | 55 ++++++++++++----------------- 1 file changed, 22 insertions(+), 33 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 60c0b26d7..b373a04b2 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -36,11 +36,9 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { let tokens = node.mac.tokens.clone(); println!("Parsing construct_runtime! tokens: {}", tokens.to_string()); - // Try parsing as runtime entries let result = syn::parse2::(tokens); if let Ok(runtime_entries) = result { for entry in runtime_entries.entries { - println!("Parsed entry: {:?}", entry); // Check if the entry is missing an explicit index if entry.index.is_none() { self.errors.push(syn::Error::new( @@ -53,7 +51,7 @@ impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { } } } else { - // Print out the error and where it failed + // Log error println!("Failed to parse construct_runtime! block: {:?}", result); self.errors.push(result.unwrap_err()); } @@ -71,66 +69,60 @@ struct ConstructRuntimeEntries { impl Parse for ConstructRuntimeEntries { fn parse(input: ParseStream) -> syn::Result { - println!("Parsing ConstructRuntimeEntries"); let entries = input.parse_terminated(PalletEntry::parse, Token![,])?; - println!("Parsed entries: {:?}", entries); Ok(ConstructRuntimeEntries { entries }) } } #[derive(Debug)] struct PalletEntry { - _visibility: Option, + visibility: Option, pallet_name: Path, - _components: Option, - index: Option, // Now index can be None (i.e., missing) + components: Option, + index: Option, } impl Parse for PalletEntry { fn parse(input: ParseStream) -> syn::Result { // Optionally parse visibility (e.g., `pub`) let visibility: Option = input.parse().ok(); - println!("Parsed visibility: {:?}", visibility); - - // Handle 'struct' keyword if present - if input.peek(Token![struct]) { - let _: Token![struct] = input.parse()?; - println!("Parsed 'struct' keyword"); - } // Parse the pallet name (handling complex paths with generics and nested components) let pallet_name = parse_complex_pallet_path(input)?; - println!("Parsed pallet name: {:?}", pallet_name); + + // Optionally parse the components in `{ Pallet, Call, Storage }` + let components = if input.peek(syn::token::Brace) { + let content; + braced!(content in input); + Some(content.parse::()?) + } else { + None + }; // Optionally parse the index if it's present let index = if input.peek(Colon) { input.parse::()?; - let index = input.parse::()?; - println!("Parsed index: {:?}", index); - Some(index) + Some(input.parse::()?) } else { - println!("No index found"); - None // Missing index is allowed during parsing + None }; Ok(PalletEntry { - _visibility: visibility, + visibility, pallet_name, - _components: None, // Components will be handled in `parse_complex_pallet_path` + components, index, }) } } fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { - // Start by parsing the base path (pallet name) - let mut path = input.parse::()?; - println!("Parsed base path: {:?}", path); + // Parse the base path (e.g., `pallet_collective`) + let path = input.parse::()?; // If there are generics like `::`, handle them if input.peek(syn::token::Lt) { - let generics: syn::AngleBracketedGenericArguments = input.parse()?; - println!("Parsed generics: {:?}", generics); + let _generics: syn::AngleBracketedGenericArguments = input.parse()?; } // Now handle nested components like `{ Pallet, Call, Storage }` @@ -147,15 +139,13 @@ fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { #[derive(Debug)] struct PalletComponents { - _components: Punctuated, + components: Punctuated, } impl Parse for PalletComponents { fn parse(input: ParseStream) -> syn::Result { - let components = input.parse_terminated(Ident::parse, Token![,])?; - println!("Parsed components: {:?}", components); Ok(PalletComponents { - _components: components, + components: input.parse_terminated(Ident::parse, Token![,])?, }) } } @@ -294,7 +284,6 @@ mod tests { } "#; - // Call the lint function on this input to ensure it parses correctly lint_macro(input).unwrap(); } } From 23b4a68b40497d4474e0fcaffa2820aa31886f59 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 15:04:28 -0400 Subject: [PATCH 061/213] add procedural-fork stub --- Cargo.lock | 5 +++++ Cargo.toml | 2 +- support/linting/Cargo.toml | 1 + support/procedural-fork/Cargo.toml | 9 +++++++++ support/procedural-fork/src/lib.rs | 1 + 5 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 support/procedural-fork/Cargo.toml create mode 100644 support/procedural-fork/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ee0933379..867ed787f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5970,6 +5970,10 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "procedural-fork" +version = "1.10.0-rc3" + [[package]] name = "prometheus" version = "0.13.4" @@ -9231,6 +9235,7 @@ name = "subtensor-linting" version = "0.1.0" dependencies = [ "proc-macro2", + "procedural-fork", "quote", "syn 2.0.71", ] diff --git a/Cargo.toml b/Cargo.toml index 4f162ca6b..a9788a222 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ members = [ "runtime", "support/tools", "support/macros", - "support/linting", + "support/linting", "support/procedural-fork", ] resolver = "2" diff --git a/support/linting/Cargo.toml b/support/linting/Cargo.toml index 1e37d8163..4378ca9dd 100644 --- a/support/linting/Cargo.toml +++ b/support/linting/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" syn.workspace = true quote.workspace = true proc-macro2.workspace = true +procedural-fork = { version = "1.10.0-rc3", path = "../procedural-fork" } [lints] workspace = true diff --git a/support/procedural-fork/Cargo.toml b/support/procedural-fork/Cargo.toml new file mode 100644 index 000000000..070e3e9c2 --- /dev/null +++ b/support/procedural-fork/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "procedural-fork" +version = "1.10.0-rc3" +edition = "2021" + +[dependencies] + +[lints] +workspace = true diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/support/procedural-fork/src/lib.rs @@ -0,0 +1 @@ + From c23485fac00b303a2fc0b0cd35fc970fbeeb448c Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 15:05:37 -0400 Subject: [PATCH 062/213] add update.sh script --- support/procedural-fork/update.sh | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100755 support/procedural-fork/update.sh diff --git a/support/procedural-fork/update.sh b/support/procedural-fork/update.sh new file mode 100755 index 000000000..c8bbd3cc9 --- /dev/null +++ b/support/procedural-fork/update.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +# Enable error handling +set -e + +# Set the repository and tag +REPO_URL="git@github.com:paritytech/polkadot-sdk.git" +POLKADOT_SDK_TAG="v1.10.0-rc3" + +# Create a temporary directory for cloning +TMP_DIR=$(mktemp -d) + +# Define source and destination directories +SRC_DIR="substrate/support/procedural/src" +DEST_DIR="$(pwd)/src" # Absolute path to `src` directory of procedural-fork + +# Check if DEST_DIR exists +if [ ! -d "$DEST_DIR" ]; then + echo "Error: Destination directory $DEST_DIR does not exist." + rm -rf "$TMP_DIR" + exit 1 +fi + +# Clone only the required directory from the repository +echo "Cloning $REPO_URL at tag $POLKADOT_SDK_TAG ..." +git clone --depth 1 --branch "$POLKADOT_SDK_TAG" --filter=blob:none --sparse "$REPO_URL" "$TMP_DIR" + +cd "$TMP_DIR" +git sparse-checkout init --cone +git sparse-checkout set "$SRC_DIR" + +# Copy all files from `src` except `lib.rs` to the destination folder +echo "Copying files to $DEST_DIR ..." +rsync -a --exclude='lib.rs' "$TMP_DIR/$SRC_DIR/" "$DEST_DIR/" + +# Clean up the temporary directory +rm -rf "$TMP_DIR" + +echo "Update completed successfully." From fdd33fbeb3ba59740b05294bb49b9458236b41e1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 15:06:58 -0400 Subject: [PATCH 063/213] add debugging --- support/procedural-fork/update.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/support/procedural-fork/update.sh b/support/procedural-fork/update.sh index c8bbd3cc9..f47a782e2 100755 --- a/support/procedural-fork/update.sh +++ b/support/procedural-fork/update.sh @@ -25,10 +25,17 @@ fi echo "Cloning $REPO_URL at tag $POLKADOT_SDK_TAG ..." git clone --depth 1 --branch "$POLKADOT_SDK_TAG" --filter=blob:none --sparse "$REPO_URL" "$TMP_DIR" +# Navigate to the cloned directory cd "$TMP_DIR" + +# Initialize sparse-checkout and set the directory git sparse-checkout init --cone git sparse-checkout set "$SRC_DIR" +# Debugging: List the contents of the sparse-checked-out directory +echo "Contents of $TMP_DIR/$SRC_DIR after sparse-checkout:" +ls -l "$TMP_DIR/$SRC_DIR" || { echo "Error: Sparse checkout failed, $SRC_DIR not found."; rm -rf "$TMP_DIR"; exit 1; } + # Copy all files from `src` except `lib.rs` to the destination folder echo "Copying files to $DEST_DIR ..." rsync -a --exclude='lib.rs' "$TMP_DIR/$SRC_DIR/" "$DEST_DIR/" From 69410ace4a8ab6420c5548f42a65ec96765e0cfb Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 15:08:18 -0400 Subject: [PATCH 064/213] update script --- support/procedural-fork/update.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/support/procedural-fork/update.sh b/support/procedural-fork/update.sh index f47a782e2..a8793b261 100755 --- a/support/procedural-fork/update.sh +++ b/support/procedural-fork/update.sh @@ -11,7 +11,7 @@ POLKADOT_SDK_TAG="v1.10.0-rc3" TMP_DIR=$(mktemp -d) # Define source and destination directories -SRC_DIR="substrate/support/procedural/src" +SRC_DIR="substrate/frame/support/procedural/src" DEST_DIR="$(pwd)/src" # Absolute path to `src` directory of procedural-fork # Check if DEST_DIR exists @@ -28,7 +28,7 @@ git clone --depth 1 --branch "$POLKADOT_SDK_TAG" --filter=blob:none --sparse "$R # Navigate to the cloned directory cd "$TMP_DIR" -# Initialize sparse-checkout and set the directory +# Initialize sparse-checkout and set the correct directory git sparse-checkout init --cone git sparse-checkout set "$SRC_DIR" From 021ac06dce3cd8448d5e8e7dded7f2bcecb68f20 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:00:10 -0400 Subject: [PATCH 065/213] local no-proc-macro fork of frame-support-procedural --- Cargo.lock | 75 +- support/procedural-fork/Cargo.toml | 29 +- support/procedural-fork/src/benchmark.rs | 1202 +++++++++++++++++ .../src/construct_runtime/expand/call.rs | 223 +++ .../expand/composite_helper.rs | 101 ++ .../src/construct_runtime/expand/config.rs | 147 ++ .../construct_runtime/expand/freeze_reason.rs | 75 + .../construct_runtime/expand/hold_reason.rs | 75 + .../src/construct_runtime/expand/inherent.rs | 254 ++++ .../src/construct_runtime/expand/lock_id.rs | 64 + .../src/construct_runtime/expand/metadata.rs | 258 ++++ .../src/construct_runtime/expand/mod.rs | 43 + .../src/construct_runtime/expand/origin.rs | 455 +++++++ .../construct_runtime/expand/outer_enums.rs | 279 ++++ .../construct_runtime/expand/slash_reason.rs | 64 + .../src/construct_runtime/expand/task.rs | 131 ++ .../src/construct_runtime/expand/unsigned.rs | 89 ++ .../src/construct_runtime/mod.rs | 809 +++++++++++ .../src/construct_runtime/parse.rs | 786 +++++++++++ support/procedural-fork/src/crate_version.rs | 54 + support/procedural-fork/src/derive_impl.rs | 303 +++++ .../procedural-fork/src/dummy_part_checker.rs | 79 ++ support/procedural-fork/src/dynamic_params.rs | 563 ++++++++ support/procedural-fork/src/key_prefix.rs | 104 ++ support/procedural-fork/src/lib.rs | 66 + .../procedural-fork/src/match_and_insert.rs | 159 +++ support/procedural-fork/src/no_bound/clone.rs | 107 ++ support/procedural-fork/src/no_bound/debug.rs | 121 ++ .../procedural-fork/src/no_bound/default.rs | 161 +++ support/procedural-fork/src/no_bound/mod.rs | 25 + support/procedural-fork/src/no_bound/ord.rs | 75 + .../src/no_bound/partial_eq.rs | 137 ++ .../src/no_bound/partial_ord.rs | 89 ++ .../procedural-fork/src/pallet/expand/call.rs | 452 +++++++ .../src/pallet/expand/composite.rs | 40 + .../src/pallet/expand/config.rs | 97 ++ .../src/pallet/expand/constants.rs | 108 ++ .../src/pallet/expand/doc_only.rs | 103 ++ .../src/pallet/expand/documentation.rs | 172 +++ .../src/pallet/expand/error.rs | 191 +++ .../src/pallet/expand/event.rs | 174 +++ .../src/pallet/expand/genesis_build.rs | 49 + .../src/pallet/expand/genesis_config.rs | 147 ++ .../src/pallet/expand/hooks.rs | 340 +++++ .../src/pallet/expand/inherent.rs | 55 + .../src/pallet/expand/instances.rs | 43 + .../procedural-fork/src/pallet/expand/mod.rs | 130 ++ .../src/pallet/expand/origin.rs | 55 + .../src/pallet/expand/pallet_struct.rs | 290 ++++ .../src/pallet/expand/storage.rs | 919 +++++++++++++ .../src/pallet/expand/tasks.rs | 267 ++++ .../src/pallet/expand/tt_default_parts.rs | 216 +++ .../src/pallet/expand/type_value.rs | 77 ++ .../src/pallet/expand/validate_unsigned.rs | 56 + .../src/pallet/expand/warnings.rs | 98 ++ support/procedural-fork/src/pallet/mod.rs | 61 + .../procedural-fork/src/pallet/parse/call.rs | 467 +++++++ .../src/pallet/parse/composite.rs | 191 +++ .../src/pallet/parse/config.rs | 590 ++++++++ .../procedural-fork/src/pallet/parse/error.rs | 115 ++ .../procedural-fork/src/pallet/parse/event.rs | 141 ++ .../src/pallet/parse/extra_constants.rs | 160 +++ .../src/pallet/parse/genesis_build.rs | 61 + .../src/pallet/parse/genesis_config.rs | 73 + .../src/pallet/parse/helper.rs | 632 +++++++++ .../procedural-fork/src/pallet/parse/hooks.rs | 86 ++ .../src/pallet/parse/inherent.rs | 60 + .../procedural-fork/src/pallet/parse/mod.rs | 749 ++++++++++ .../src/pallet/parse/origin.rs | 72 + .../src/pallet/parse/pallet_struct.rs | 149 ++ .../src/pallet/parse/storage.rs | 947 +++++++++++++ .../procedural-fork/src/pallet/parse/tasks.rs | 968 +++++++++++++ .../src/pallet/parse/tests/mod.rs | 264 ++++ .../src/pallet/parse/tests/tasks.rs | 240 ++++ .../src/pallet/parse/type_value.rs | 123 ++ .../src/pallet/parse/validate_unsigned.rs | 62 + support/procedural-fork/src/pallet_error.rs | 178 +++ .../procedural-fork/src/runtime/expand/mod.rs | 320 +++++ support/procedural-fork/src/runtime/mod.rs | 236 ++++ .../src/runtime/parse/helper.rs | 37 + .../procedural-fork/src/runtime/parse/mod.rs | 266 ++++ .../src/runtime/parse/pallet.rs | 99 ++ .../src/runtime/parse/pallet_decl.rs | 60 + .../src/runtime/parse/runtime_struct.rs | 35 + .../src/runtime/parse/runtime_types.rs | 76 ++ support/procedural-fork/src/storage_alias.rs | 676 +++++++++ support/procedural-fork/src/transactional.rs | 60 + support/procedural-fork/src/tt_macro.rs | 105 ++ 88 files changed, 19627 insertions(+), 13 deletions(-) create mode 100644 support/procedural-fork/src/benchmark.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/call.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/composite_helper.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/config.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/hold_reason.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/inherent.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/lock_id.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/metadata.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/mod.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/origin.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/outer_enums.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/slash_reason.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/task.rs create mode 100644 support/procedural-fork/src/construct_runtime/expand/unsigned.rs create mode 100644 support/procedural-fork/src/construct_runtime/mod.rs create mode 100644 support/procedural-fork/src/construct_runtime/parse.rs create mode 100644 support/procedural-fork/src/crate_version.rs create mode 100644 support/procedural-fork/src/derive_impl.rs create mode 100644 support/procedural-fork/src/dummy_part_checker.rs create mode 100644 support/procedural-fork/src/dynamic_params.rs create mode 100644 support/procedural-fork/src/key_prefix.rs create mode 100644 support/procedural-fork/src/match_and_insert.rs create mode 100644 support/procedural-fork/src/no_bound/clone.rs create mode 100644 support/procedural-fork/src/no_bound/debug.rs create mode 100644 support/procedural-fork/src/no_bound/default.rs create mode 100644 support/procedural-fork/src/no_bound/mod.rs create mode 100644 support/procedural-fork/src/no_bound/ord.rs create mode 100644 support/procedural-fork/src/no_bound/partial_eq.rs create mode 100644 support/procedural-fork/src/no_bound/partial_ord.rs create mode 100644 support/procedural-fork/src/pallet/expand/call.rs create mode 100644 support/procedural-fork/src/pallet/expand/composite.rs create mode 100644 support/procedural-fork/src/pallet/expand/config.rs create mode 100644 support/procedural-fork/src/pallet/expand/constants.rs create mode 100644 support/procedural-fork/src/pallet/expand/doc_only.rs create mode 100644 support/procedural-fork/src/pallet/expand/documentation.rs create mode 100644 support/procedural-fork/src/pallet/expand/error.rs create mode 100644 support/procedural-fork/src/pallet/expand/event.rs create mode 100644 support/procedural-fork/src/pallet/expand/genesis_build.rs create mode 100644 support/procedural-fork/src/pallet/expand/genesis_config.rs create mode 100644 support/procedural-fork/src/pallet/expand/hooks.rs create mode 100644 support/procedural-fork/src/pallet/expand/inherent.rs create mode 100644 support/procedural-fork/src/pallet/expand/instances.rs create mode 100644 support/procedural-fork/src/pallet/expand/mod.rs create mode 100644 support/procedural-fork/src/pallet/expand/origin.rs create mode 100644 support/procedural-fork/src/pallet/expand/pallet_struct.rs create mode 100644 support/procedural-fork/src/pallet/expand/storage.rs create mode 100644 support/procedural-fork/src/pallet/expand/tasks.rs create mode 100644 support/procedural-fork/src/pallet/expand/tt_default_parts.rs create mode 100644 support/procedural-fork/src/pallet/expand/type_value.rs create mode 100644 support/procedural-fork/src/pallet/expand/validate_unsigned.rs create mode 100644 support/procedural-fork/src/pallet/expand/warnings.rs create mode 100644 support/procedural-fork/src/pallet/mod.rs create mode 100644 support/procedural-fork/src/pallet/parse/call.rs create mode 100644 support/procedural-fork/src/pallet/parse/composite.rs create mode 100644 support/procedural-fork/src/pallet/parse/config.rs create mode 100644 support/procedural-fork/src/pallet/parse/error.rs create mode 100644 support/procedural-fork/src/pallet/parse/event.rs create mode 100644 support/procedural-fork/src/pallet/parse/extra_constants.rs create mode 100644 support/procedural-fork/src/pallet/parse/genesis_build.rs create mode 100644 support/procedural-fork/src/pallet/parse/genesis_config.rs create mode 100644 support/procedural-fork/src/pallet/parse/helper.rs create mode 100644 support/procedural-fork/src/pallet/parse/hooks.rs create mode 100644 support/procedural-fork/src/pallet/parse/inherent.rs create mode 100644 support/procedural-fork/src/pallet/parse/mod.rs create mode 100644 support/procedural-fork/src/pallet/parse/origin.rs create mode 100644 support/procedural-fork/src/pallet/parse/pallet_struct.rs create mode 100644 support/procedural-fork/src/pallet/parse/storage.rs create mode 100644 support/procedural-fork/src/pallet/parse/tasks.rs create mode 100644 support/procedural-fork/src/pallet/parse/tests/mod.rs create mode 100644 support/procedural-fork/src/pallet/parse/tests/tasks.rs create mode 100644 support/procedural-fork/src/pallet/parse/type_value.rs create mode 100644 support/procedural-fork/src/pallet/parse/validate_unsigned.rs create mode 100644 support/procedural-fork/src/pallet_error.rs create mode 100644 support/procedural-fork/src/runtime/expand/mod.rs create mode 100644 support/procedural-fork/src/runtime/mod.rs create mode 100644 support/procedural-fork/src/runtime/parse/helper.rs create mode 100644 support/procedural-fork/src/runtime/parse/mod.rs create mode 100644 support/procedural-fork/src/runtime/parse/pallet.rs create mode 100644 support/procedural-fork/src/runtime/parse/pallet_decl.rs create mode 100644 support/procedural-fork/src/runtime/parse/runtime_struct.rs create mode 100644 support/procedural-fork/src/runtime/parse/runtime_types.rs create mode 100644 support/procedural-fork/src/storage_alias.rs create mode 100644 support/procedural-fork/src/transactional.rs create mode 100644 support/procedural-fork/src/tt_macro.rs diff --git a/Cargo.lock b/Cargo.lock index 867ed787f..50f4e50c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2497,13 +2497,26 @@ dependencies = [ "cfg-expr", "derive-syn-parse 0.2.0", "expander", - "frame-support-procedural-tools", + "frame-support-procedural-tools 10.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "itertools 0.10.5", "macro_magic", "proc-macro-warning", "proc-macro2", "quote", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", + "syn 2.0.71", +] + +[[package]] +name = "frame-support-procedural-tools" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3363df38464c47a73eb521a4f648bfcc7537a82d70347ef8af3f73b6d019e910" +dependencies = [ + "frame-support-procedural-tools-derive 11.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", "syn 2.0.71", ] @@ -2512,13 +2525,24 @@ name = "frame-support-procedural-tools" version = "10.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" dependencies = [ - "frame-support-procedural-tools-derive", + "frame-support-procedural-tools-derive 11.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.71", ] +[[package]] +name = "frame-support-procedural-tools-derive" +version = "11.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68672b9ec6fe72d259d3879dc212c5e42e977588cdac830c76f54d9f492aeb58" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.71", +] + [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" @@ -5973,6 +5997,21 @@ dependencies = [ [[package]] name = "procedural-fork" version = "1.10.0-rc3" +dependencies = [ + "Inflector", + "cfg-expr", + "derive-syn-parse 0.2.0", + "expander", + "frame-support-procedural-tools 10.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.10.5", + "macro_magic", + "proc-macro-warning", + "proc-macro2", + "quote", + "regex", + "sp-crypto-hashing 0.1.0", + "syn 2.0.71", +] [[package]] name = "prometheus" @@ -6789,7 +6828,7 @@ dependencies = [ "serde_json", "sp-blockchain", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-genesis-builder", "sp-io", "sp-runtime", @@ -6991,7 +7030,7 @@ dependencies = [ "sp-consensus", "sp-consensus-grandpa", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-keystore", "sp-runtime", "substrate-prometheus-endpoint", @@ -7576,7 +7615,7 @@ dependencies = [ "serde", "serde_json", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-io", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] @@ -7660,7 +7699,7 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-runtime", "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-transaction-pool", @@ -8398,7 +8437,7 @@ dependencies = [ "secp256k1", "secrecy", "serde", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", @@ -8445,13 +8484,27 @@ dependencies = [ "twox-hash", ] +[[package]] +name = "sp-crypto-hashing" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc9927a7f81334ed5b8a98a4a978c81324d12bd9713ec76b5c68fd410174c5eb" +dependencies = [ + "blake2b_simd", + "byteorder", + "digest 0.10.7", + "sha2 0.10.8", + "sha3", + "twox-hash", +] + [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.0.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" dependencies = [ "quote", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "syn 2.0.71", ] @@ -8541,7 +8594,7 @@ dependencies = [ "rustversion", "secp256k1", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-keystore", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", @@ -8785,7 +8838,7 @@ dependencies = [ "sp-api", "sp-application-crypto", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.0.0", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-runtime", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", diff --git a/support/procedural-fork/Cargo.toml b/support/procedural-fork/Cargo.toml index 070e3e9c2..61221ead8 100644 --- a/support/procedural-fork/Cargo.toml +++ b/support/procedural-fork/Cargo.toml @@ -3,7 +3,32 @@ name = "procedural-fork" version = "1.10.0-rc3" edition = "2021" -[dependencies] - [lints] workspace = true + +[dependencies] +derive-syn-parse = "0.2" +Inflector = "0.11" +cfg-expr = "0.15" +itertools = "0.10" +proc-macro2.workspace = true +quote.workspace = true +syn.workspace = true +macro_magic = { version = "0.5", features = ["proc_support"] } +frame-support-procedural-tools = { version = "10.0.0" } +proc-macro-warning = { version = "1", default-features = false } +expander = "2" +sp-crypto-hashing = { default-features = false, version = "0.1.0" } + +[dev-dependencies] +regex = "1" + +[features] +default = ["std"] +std = ["sp-crypto-hashing/std"] +no-metadata-docs = [] +experimental = [] +# Generate impl-trait for tuples with the given number of tuples. Will be needed as the number of +# pallets in a runtime grows. Does increase the compile time! +tuples-96 = [] +tuples-128 = [] diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs new file mode 100644 index 000000000..0a62c3f92 --- /dev/null +++ b/support/procedural-fork/src/benchmark.rs @@ -0,0 +1,1202 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Home of the parsing and expansion code for the new pallet benchmarking syntax + +use derive_syn_parse::Parse; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro::TokenStream; +use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; +use quote::{quote, ToTokens}; +use syn::{ + parse::{Nothing, ParseStream}, + parse_quote, + punctuated::Punctuated, + spanned::Spanned, + token::{Comma, Gt, Lt, PathSep}, + Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, + PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, + Visibility, WhereClause, +}; + +mod keywords { + use syn::custom_keyword; + + custom_keyword!(benchmark); + custom_keyword!(benchmarks); + custom_keyword!(block); + custom_keyword!(extra); + custom_keyword!(pov_mode); + custom_keyword!(extrinsic_call); + custom_keyword!(skip_meta); + custom_keyword!(BenchmarkError); + custom_keyword!(Result); + custom_keyword!(MaxEncodedLen); + custom_keyword!(Measured); + custom_keyword!(Ignored); + + pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); + pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); +} + +/// This represents the raw parsed data for a param definition such as `x: Linear<10, 20>`. +#[derive(Clone)] +struct ParamDef { + name: String, + _typ: Type, + start: syn::GenericArgument, + end: syn::GenericArgument, +} + +/// Allows easy parsing of the `<10, 20>` component of `x: Linear<10, 20>`. +#[derive(Parse)] +struct RangeArgs { + _lt_token: Lt, + start: syn::GenericArgument, + _comma: Comma, + end: syn::GenericArgument, + _trailing_comma: Option, + _gt_token: Gt, +} + +#[derive(Clone, Debug)] +struct BenchmarkAttrs { + skip_meta: bool, + extra: bool, + pov_mode: Option, +} + +/// Represents a single benchmark option +enum BenchmarkAttr { + Extra, + SkipMeta, + /// How the PoV should be measured. + PoV(PovModeAttr), +} + +impl syn::parse::Parse for PovModeAttr { + fn parse(input: ParseStream) -> Result { + let _pov: keywords::pov_mode = input.parse()?; + let _eq: Token![=] = input.parse()?; + let root = PovEstimationMode::parse(input)?; + + let mut maybe_content = None; + let _ = || -> Result<()> { + let content; + syn::braced!(content in input); + maybe_content = Some(content); + Ok(()) + }(); + + let per_key = match maybe_content { + Some(content) => { + let per_key = Punctuated::::parse_terminated(&content)?; + per_key.into_iter().collect() + }, + None => Vec::new(), + }; + + Ok(Self { root, per_key }) + } +} + +impl syn::parse::Parse for BenchmarkAttr { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::extra) { + let _extra: keywords::extra = input.parse()?; + Ok(BenchmarkAttr::Extra) + } else if lookahead.peek(keywords::skip_meta) { + let _skip_meta: keywords::skip_meta = input.parse()?; + Ok(BenchmarkAttr::SkipMeta) + } else if lookahead.peek(keywords::pov_mode) { + PovModeAttr::parse(input).map(BenchmarkAttr::PoV) + } else { + Err(lookahead.error()) + } + } +} + +/// A `#[pov_mode = .. { .. }]` attribute. +#[derive(Debug, Clone)] +struct PovModeAttr { + /// The root mode for this benchmarks. + root: PovEstimationMode, + /// The pov-mode for a specific key. This overwrites `root` for this key. + per_key: Vec, +} + +/// A single key-value pair inside the `{}` of a `#[pov_mode = .. { .. }]` attribute. +#[derive(Debug, Clone, derive_syn_parse::Parse)] +struct PovModeKeyAttr { + /// A specific storage key for which to set the PoV mode. + key: Path, + _underscore: Token![:], + /// The PoV mode for this key. + mode: PovEstimationMode, +} + +/// How the PoV should be estimated. +#[derive(Debug, Eq, PartialEq, Clone, Copy)] +pub enum PovEstimationMode { + /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. + MaxEncodedLen, + /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. + Measured, + /// Do not estimate the PoV size for this storage item or benchmark. + Ignored, +} + +impl syn::parse::Parse for PovEstimationMode { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::MaxEncodedLen) { + let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; + return Ok(PovEstimationMode::MaxEncodedLen) + } else if lookahead.peek(keywords::Measured) { + let _measured: keywords::Measured = input.parse()?; + return Ok(PovEstimationMode::Measured) + } else if lookahead.peek(keywords::Ignored) { + let _ignored: keywords::Ignored = input.parse()?; + return Ok(PovEstimationMode::Ignored) + } else { + return Err(lookahead.error()) + } + } +} + +impl ToString for PovEstimationMode { + fn to_string(&self) -> String { + match self { + PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), + PovEstimationMode::Measured => "Measured".into(), + PovEstimationMode::Ignored => "Ignored".into(), + } + } +} + +impl quote::ToTokens for PovEstimationMode { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), + PovEstimationMode::Measured => tokens.extend(quote!(Measured)), + PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), + } + } +} + +impl syn::parse::Parse for BenchmarkAttrs { + fn parse(input: ParseStream) -> syn::Result { + let mut extra = false; + let mut skip_meta = false; + let mut pov_mode = None; + let args = Punctuated::::parse_terminated(&input)?; + + for arg in args.into_iter() { + match arg { + BenchmarkAttr::Extra => { + if extra { + return Err(input.error("`extra` can only be specified once")) + } + extra = true; + }, + BenchmarkAttr::SkipMeta => { + if skip_meta { + return Err(input.error("`skip_meta` can only be specified once")) + } + skip_meta = true; + }, + BenchmarkAttr::PoV(mode) => { + if pov_mode.is_some() { + return Err(input.error("`pov_mode` can only be specified once")) + } + pov_mode = Some(mode); + }, + } + } + Ok(BenchmarkAttrs { extra, skip_meta, pov_mode }) + } +} + +/// Represents the parsed extrinsic call for a benchmark +#[derive(Clone)] +enum BenchmarkCallDef { + ExtrinsicCall { origin: Expr, expr_call: ExprCall, attr_span: Span }, // #[extrinsic_call] + Block { block: ExprBlock, attr_span: Span }, // #[block] +} + +impl BenchmarkCallDef { + /// Returns the `span()` for attribute + fn attr_span(&self) -> Span { + match self { + BenchmarkCallDef::ExtrinsicCall { origin: _, expr_call: _, attr_span } => *attr_span, + BenchmarkCallDef::Block { block: _, attr_span } => *attr_span, + } + } +} + +/// Represents a parsed `#[benchmark]` or `#[instance_benchmark]` item. +#[derive(Clone)] +struct BenchmarkDef { + params: Vec, + setup_stmts: Vec, + call_def: BenchmarkCallDef, + verify_stmts: Vec, + last_stmt: Option, + fn_sig: Signature, + fn_vis: Visibility, + fn_attrs: Vec, +} + +/// used to parse something compatible with `Result` +#[derive(Parse)] +struct ResultDef { + _result_kw: keywords::Result, + _lt: Token![<], + unit: Type, + _comma: Comma, + e_type: TypePath, + _gt: Token![>], +} + +/// Ensures that `ReturnType` is a `Result<(), BenchmarkError>`, if specified +fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { + if let ReturnType::Type(_, typ) = &item_fn.sig.output { + let non_unit = |span| return Err(Error::new(span, "expected `()`")); + let Type::Path(TypePath { path, qself: _ }) = &**typ else { + return Err(Error::new( + typ.span(), + "Only `Result<(), BenchmarkError>` or a blank return type is allowed on benchmark function definitions", + )) + }; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + let res: ResultDef = syn::parse2(seg.to_token_stream())?; + // ensure T in Result is () + let Type::Tuple(tup) = res.unit else { return non_unit(res.unit.span()) }; + if !tup.elems.is_empty() { + return non_unit(tup.span()) + } + let TypePath { path, qself: _ } = res.e_type; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + syn::parse2::(seg.to_token_stream())?; + } + Ok(()) +} + +/// Parses params such as `x: Linear<0, 1>` +fn parse_params(item_fn: &ItemFn) -> Result> { + let mut params: Vec = Vec::new(); + for arg in &item_fn.sig.inputs { + let invalid_param = |span| { + return Err(Error::new( + span, + "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", + )) + }; + + let FnArg::Typed(arg) = arg else { return invalid_param(arg.span()) }; + let Pat::Ident(ident) = &*arg.pat else { return invalid_param(arg.span()) }; + + // check param name + let var_span = ident.span(); + let invalid_param_name = || { + return Err(Error::new( + var_span, + "Benchmark parameter names must consist of a single lowercase letter (a-z) and no other characters.", + )); + }; + let name = ident.ident.to_token_stream().to_string(); + if name.len() > 1 { + return invalid_param_name() + }; + let Some(name_char) = name.chars().next() else { return invalid_param_name() }; + if !name_char.is_alphabetic() || !name_char.is_lowercase() { + return invalid_param_name() + } + + // parse type + let typ = &*arg.ty; + let Type::Path(tpath) = typ else { return invalid_param(typ.span()) }; + let Some(segment) = tpath.path.segments.last() else { return invalid_param(typ.span()) }; + let args = segment.arguments.to_token_stream().into(); + let Ok(args) = syn::parse::(args) else { return invalid_param(typ.span()) }; + + params.push(ParamDef { name, _typ: typ.clone(), start: args.start, end: args.end }); + } + Ok(params) +} + +/// Used in several places where the `#[extrinsic_call]` or `#[body]` annotation is missing +fn missing_call(item_fn: &ItemFn) -> Result { + return Err(Error::new( + item_fn.block.brace_token.span.join(), + "No valid #[extrinsic_call] or #[block] annotation could be found in benchmark function body." + )); +} + +/// Finds the `BenchmarkCallDef` and its index (within the list of stmts for the fn) and +/// returns them. Also handles parsing errors for invalid / extra call defs. AKA this is +/// general handling for `#[extrinsic_call]` and `#[block]` +fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { + // #[extrinsic_call] / #[block] handling + let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { + if let Stmt::Expr(Expr::Call(expr_call), _semi) = child { + // #[extrinsic_call] case + expr_call.attrs.iter().enumerate().find_map(|(k, attr)| { + let segment = attr.path().segments.last()?; + let _: keywords::extrinsic_call = syn::parse(segment.ident.to_token_stream().into()).ok()?; + let mut expr_call = expr_call.clone(); + + // consume #[extrinsic_call] tokens + expr_call.attrs.remove(k); + + // extract origin from expr_call + let Some(origin) = expr_call.args.first().cloned() else { + return Some(Err(Error::new(expr_call.span(), "Single-item extrinsic calls must specify their origin as the first argument."))) + }; + + Some(Ok((i, BenchmarkCallDef::ExtrinsicCall { origin, expr_call, attr_span: attr.span() }))) + }) + } else if let Stmt::Expr(Expr::Block(block), _) = child { + // #[block] case + block.attrs.iter().enumerate().find_map(|(k, attr)| { + let segment = attr.path().segments.last()?; + let _: keywords::block = syn::parse(segment.ident.to_token_stream().into()).ok()?; + let mut block = block.clone(); + + // consume #[block] tokens + block.attrs.remove(k); + + Some(Ok((i, BenchmarkCallDef::Block { block, attr_span: attr.span() }))) + }) + } else { + None + } + }).collect::>>()?; + Ok(match &call_defs[..] { + [(i, call_def)] => (*i, call_def.clone()), // = 1 + [] => return missing_call(item_fn), + _ => + return Err(Error::new( + call_defs[1].1.attr_span(), + "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", + )), + }) +} + +impl BenchmarkDef { + /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. + pub fn from(item_fn: &ItemFn) -> Result { + let params = parse_params(item_fn)?; + ensure_valid_return_type(item_fn)?; + let (i, call_def) = parse_call_def(&item_fn)?; + + let (verify_stmts, last_stmt) = match item_fn.sig.output { + ReturnType::Default => + // no return type, last_stmt should be None + (Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), None), + ReturnType::Type(_, _) => { + // defined return type, last_stmt should be Result<(), BenchmarkError> + // compatible and should not be included in verify_stmts + if i + 1 >= item_fn.block.stmts.len() { + return Err(Error::new( + item_fn.block.span(), + "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ + last statement of your benchmark function definition if you have \ + defined a return type. You should return something compatible \ + with Result<(), BenchmarkError> (i.e. `Ok(())`) as the last statement \ + or change your signature to a blank return type.", + )) + } + let Some(stmt) = item_fn.block.stmts.last() else { return missing_call(item_fn) }; + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), + Some(stmt.clone()), + ) + }, + }; + + Ok(BenchmarkDef { + params, + setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), + call_def, + verify_stmts, + last_stmt, + fn_sig: item_fn.sig.clone(), + fn_vis: item_fn.vis.clone(), + fn_attrs: item_fn.attrs.clone(), + }) + } +} + +/// Parses and expands a `#[benchmarks]` or `#[instance_benchmarks]` invocation +pub fn benchmarks( + attrs: TokenStream, + tokens: TokenStream, + instance: bool, +) -> syn::Result { + let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; + // gather module info + let module: ItemMod = syn::parse(tokens)?; + let mod_span = module.span(); + let where_clause = match syn::parse::(attrs.clone()) { + Ok(_) => quote!(), + Err(_) => syn::parse::(attrs)?.predicates.to_token_stream(), + }; + let mod_vis = module.vis; + let mod_name = module.ident; + + // consume #[benchmarks] attribute by excluding it from mod_attrs + let mod_attrs: Vec<&Attribute> = module + .attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) + .collect(); + + let mut benchmark_names: Vec = Vec::new(); + let mut extra_benchmark_names: Vec = Vec::new(); + let mut skip_meta_benchmark_names: Vec = Vec::new(); + // Map benchmarks to PoV modes. + let mut pov_modes = Vec::new(); + + let (_brace, mut content) = + module.content.ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; + + // find all function defs marked with #[benchmark] + let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { + // parse as a function def first + let Item::Fn(func) = stmt else { return None }; + + // find #[benchmark] attribute on function def + let benchmark_attr = + func.attrs.iter().find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; + + Some((benchmark_attr.clone(), func.clone(), stmt)) + }); + + // parse individual benchmark defs and args + for (benchmark_attr, func, stmt) in benchmark_fn_metas { + // parse benchmark def + let benchmark_def = BenchmarkDef::from(&func)?; + + // record benchmark name + let name = &func.sig.ident; + benchmark_names.push(name.clone()); + + // Check if we need to parse any args + if benchmark_attr.meta.require_path_only().is_err() { + // parse any args provided to #[benchmark] + let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; + + // record name sets + if benchmark_attrs.extra { + extra_benchmark_names.push(name.clone()); + } else if benchmark_attrs.skip_meta { + skip_meta_benchmark_names.push(name.clone()); + } + + if let Some(mode) = benchmark_attrs.pov_mode { + let mut modes = Vec::new(); + // We cannot expand strings here since it is no-std, but syn does not expand bytes. + let name = name.to_string(); + let m = mode.root.to_string(); + modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); + + for attr in mode.per_key.iter() { + // syn always puts spaces in quoted paths: + let key = attr.key.clone().into_token_stream().to_string().replace(" ", ""); + let mode = attr.mode.to_string(); + modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); + } + + pov_modes.push( + quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), + ); + } + } + + // expand benchmark + let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); + + // replace original function def with expanded code + *stmt = Item::Verbatim(expanded); + } + + // generics + let type_use_generics = match instance { + false => quote!(T), + true => quote!(T, I), + }; + let type_impl_generics = match instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + + // benchmark name variables + let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); + let extra_benchmark_names_str: Vec = + extra_benchmark_names.iter().map(|n| n.to_string()).collect(); + let skip_meta_benchmark_names_str: Vec = + skip_meta_benchmark_names.iter().map(|n| n.to_string()).collect(); + let mut selected_benchmark_mappings: Vec = Vec::new(); + let mut benchmarks_by_name_mappings: Vec = Vec::new(); + let test_idents: Vec = benchmark_names_str + .iter() + .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) + .collect(); + for i in 0..benchmark_names.len() { + let name_ident = &benchmark_names[i]; + let name_str = &benchmark_names_str[i]; + let test_ident = &test_idents[i]; + selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); + benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) + } + + let impl_test_function = content + .iter_mut() + .find_map(|item| { + let Item::Macro(item_macro) = item else { + return None; + }; + + if !item_macro + .mac + .path + .segments + .iter() + .any(|s| s.ident == "impl_benchmark_test_suite") + { + return None; + } + + let tokens = item_macro.mac.tokens.clone(); + *item = Item::Verbatim(quote! {}); + + Some(quote! { + impl_test_function!( + (#( {} #benchmark_names )*) + (#( #extra_benchmark_names )*) + (#( #skip_meta_benchmark_names )*) + #tokens + ); + }) + }) + .unwrap_or(quote! {}); + + // emit final quoted tokens + let res = quote! { + #(#mod_attrs) + * + #mod_vis mod #mod_name { + #(#content) + * + + #[allow(non_camel_case_types)] + enum SelectedBenchmark { + #(#benchmark_names), + * + } + + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) + } + ) + * + } + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool, + ) -> Result< + #krate::__private::Box Result<(), #krate::BenchmarkError>>, + #krate::BenchmarkError, + > { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup< + #type_use_generics + >>::instance(&#benchmark_names, components, verify) + } + ) + * + } + } + } + #[cfg(any(feature = "runtime-benchmarks", test))] + impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> + where T: #frame_system::Config, #where_clause + { + fn benchmarks( + extra: bool, + ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { + let mut all_names = #krate::__private::vec![ + #(#benchmark_names_str), + * + ]; + if !extra { + let extra = [ + #(#extra_benchmark_names_str), + * + ]; + all_names.retain(|x| !extra.contains(x)); + } + let pov_modes: + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec + )>, + )> = #krate::__private::vec![ + #( #pov_modes ),* + ]; + all_names.into_iter().map(|benchmark| { + let selected_benchmark = match benchmark { + #(#selected_benchmark_mappings), + *, + _ => panic!("all benchmarks should be selectable") + }; + let components = >::components(&selected_benchmark); + let name = benchmark.as_bytes().to_vec(); + let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); + + #krate::BenchmarkMetadata { + name: benchmark.as_bytes().to_vec(), + components, + pov_modes: modes.unwrap_or_default(), + } + }).collect::<#krate::__private::Vec<_>>() + } + + fn run_benchmark( + extrinsic: &[u8], + c: &[(#krate::BenchmarkParameter, u32)], + whitelist: &[#krate::__private::TrackedStorageKey], + verify: bool, + internal_repeats: u32, + ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { + let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; + let selected_benchmark = match extrinsic { + #(#selected_benchmark_mappings), + *, + _ => return Err("Could not find extrinsic.".into()), + }; + let mut whitelist = whitelist.to_vec(); + let whitelisted_caller_key = <#frame_system::Account< + T, + > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( + #krate::whitelisted_caller::() + ); + whitelist.push(whitelisted_caller_key.into()); + let transactional_layer_key = #krate::__private::TrackedStorageKey::new( + #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), + ); + whitelist.push(transactional_layer_key); + // Whitelist the `:extrinsic_index`. + let extrinsic_index = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() + ); + whitelist.push(extrinsic_index); + // Whitelist the `:intrablock_entropy`. + let intrablock_entropy = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() + ); + whitelist.push(intrablock_entropy); + + #krate::benchmarking::set_whitelist(whitelist.clone()); + let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); + + // Always do at least one internal repeat... + for _ in 0 .. internal_repeats.max(1) { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the externalities environment for the setup we want to + // benchmark. + let closure_to_benchmark = < + SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> + >::instance(&selected_benchmark, c, verify)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Commit the externalities to the database, flushing the DB cache. + // This will enable worst case scenario for reading from the database. + #krate::benchmarking::commit_db(); + + // Access all whitelisted keys to get them into the proof recorder since the + // recorder does now have a whitelist. + for key in &whitelist { + #krate::__private::storage::unhashed::get_raw(&key.key); + } + + // Reset the read/write counter so we don't count operations in the setup process. + #krate::benchmarking::reset_read_write_count(); + + // Time the extrinsic logic. + #krate::__private::log::trace!( + target: "benchmark", + "Start Benchmark: {} ({:?})", + extrinsic, + c + ); + + let start_pov = #krate::benchmarking::proof_size(); + let start_extrinsic = #krate::benchmarking::current_time(); + + closure_to_benchmark()?; + + let finish_extrinsic = #krate::benchmarking::current_time(); + let end_pov = #krate::benchmarking::proof_size(); + + // Calculate the diff caused by the benchmark. + let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); + let diff_pov = match (start_pov, end_pov) { + (Some(start), Some(end)) => end.saturating_sub(start), + _ => Default::default(), + }; + + // Commit the changes to get proper write count + #krate::benchmarking::commit_db(); + #krate::__private::log::trace!( + target: "benchmark", + "End Benchmark: {} ns", elapsed_extrinsic + ); + let read_write_count = #krate::benchmarking::read_write_count(); + #krate::__private::log::trace!( + target: "benchmark", + "Read/Write Count {:?}", read_write_count + ); + + // Time the storage root recalculation. + let start_storage_root = #krate::benchmarking::current_time(); + #krate::__private::storage_root(#krate::__private::StateVersion::V1); + let finish_storage_root = #krate::benchmarking::current_time(); + let elapsed_storage_root = finish_storage_root - start_storage_root; + + let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; + let read_and_written_keys = if skip_meta.contains(&extrinsic) { + #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] + } else { + #krate::benchmarking::get_read_and_written_keys() + }; + + results.push(#krate::BenchmarkResult { + components: c.to_vec(), + extrinsic_time: elapsed_extrinsic, + storage_root_time: elapsed_storage_root, + reads: read_write_count.0, + repeat_reads: read_write_count.1, + writes: read_write_count.2, + repeat_writes: read_write_count.3, + proof_size: diff_pov, + keys: read_and_written_keys, + }); + } + + return Ok(results); + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + /// Test a particular benchmark by name. + /// + /// This isn't called `test_benchmark_by_name` just in case some end-user eventually + /// writes a benchmark, itself called `by_name`; the function would be shadowed in + /// that case. + /// + /// This is generally intended to be used by child test modules such as those created + /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet + /// author chooses not to implement benchmarks. + #[allow(unused)] + fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { + let name = #krate::__private::str::from_utf8(name) + .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; + match name { + #(#benchmarks_by_name_mappings), + *, + _ => Err("Could not find test for requested benchmark.".into()), + } + } + } + + #impl_test_function + } + #mod_vis use #mod_name::*; + }; + Ok(res.into()) +} + +/// Prepares a [`Vec`] to be interpolated by [`quote!`] by creating easily-iterable +/// arrays formatted in such a way that they can be interpolated directly. +struct UnrolledParams { + param_ranges: Vec, + param_names: Vec, +} + +impl UnrolledParams { + /// Constructs an [`UnrolledParams`] from a [`Vec`] + fn from(params: &Vec) -> UnrolledParams { + let param_ranges: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + let start = &p.start; + let end = &p.end; + quote!(#name, #start, #end) + }) + .collect(); + let param_names: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + quote!(#name) + }) + .collect(); + UnrolledParams { param_ranges, param_names } + } +} + +/// Performs expansion of an already-parsed [`BenchmarkDef`]. +fn expand_benchmark( + benchmark_def: BenchmarkDef, + name: &Ident, + is_instance: bool, + where_clause: TokenStream2, +) -> TokenStream2 { + // set up variables needed during quoting + let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { + Ok(ident) => ident, + Err(err) => return err.to_compile_error().into(), + }; + let frame_system = match generate_access_from_frame_or_crate("frame-system") { + Ok(path) => path, + Err(err) => return err.to_compile_error().into(), + }; + let codec = quote!(#krate::__private::codec); + let traits = quote!(#krate::__private::traits); + let setup_stmts = benchmark_def.setup_stmts; + let verify_stmts = benchmark_def.verify_stmts; + let last_stmt = benchmark_def.last_stmt; + let test_ident = + Ident::new(format!("test_benchmark_{}", name.to_string()).as_str(), Span::call_site()); + + // unroll params (prepare for quoting) + let unrolled = UnrolledParams::from(&benchmark_def.params); + let param_names = unrolled.param_names; + let param_ranges = unrolled.param_ranges; + + let type_use_generics = match is_instance { + false => quote!(T), + true => quote!(T, I), + }; + + let type_impl_generics = match is_instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + // used in the benchmarking impls + let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { + BenchmarkCallDef::ExtrinsicCall { origin, expr_call, attr_span: _ } => { + let mut expr_call = expr_call.clone(); + + // remove first arg from expr_call + let mut final_args = Punctuated::::new(); + let args: Vec<&Expr> = expr_call.args.iter().collect(); + for arg in &args[1..] { + final_args.push((*(*arg)).clone()); + } + expr_call.args = final_args; + + let origin = match origin { + Expr::Cast(t) => { + let ty = t.ty.clone(); + quote! { + <::RuntimeOrigin as From<#ty>>::from(#origin); + } + }, + _ => quote! { + #origin.into(); + }, + }; + + // determine call name (handles `_` and normal call syntax) + let expr_span = expr_call.span(); + let call_err = || { + syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") + .to_compile_error() + }; + let call_name = match *expr_call.func { + Expr::Path(expr_path) => { + // normal function call + let Some(segment) = expr_path.path.segments.last() else { return call_err() }; + segment.ident.to_string() + }, + Expr::Infer(_) => { + // `_` style + // replace `_` with fn name + name.to_string() + }, + _ => return call_err(), + }; + + // modify extrinsic call to be prefixed with "new_call_variant" + let call_name = format!("new_call_variant_{}", call_name); + let mut punct: Punctuated = Punctuated::new(); + punct.push(PathSegment { + arguments: PathArguments::None, + ident: Ident::new(call_name.as_str(), Span::call_site()), + }); + *expr_call.func = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: Path { leading_colon: None, segments: punct }, + }); + let pre_call = quote! { + let __call = Call::<#type_use_generics>::#expr_call; + let __benchmarked_call_encoded = #codec::Encode::encode(&__call); + }; + let post_call = quote! { + let __call_decoded = as #codec::Decode> + ::decode(&mut &__benchmarked_call_encoded[..]) + .expect("call is encoded above, encoding must be correct"); + let __origin = #origin; + as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( + __call_decoded, + __origin, + ) + }; + ( + // (pre_call, post_call, fn_call_body): + pre_call.clone(), + quote!(#post_call?;), + quote! { + #pre_call + #post_call.unwrap(); + }, + ) + }, + BenchmarkCallDef::Block { block, attr_span: _ } => + (quote!(), quote!(#block), quote!(#block)), + }; + + let vis = benchmark_def.fn_vis; + + // remove #[benchmark] attribute + let fn_attrs = benchmark_def + .fn_attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); + + // modify signature generics, ident, and inputs, e.g: + // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` + // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), + // BenchmarkError>` + let mut sig = benchmark_def.fn_sig; + sig.generics = parse_quote!(<#type_impl_generics>); + if !where_clause.is_empty() { + sig.generics.where_clause = parse_quote!(where #where_clause); + } + sig.ident = + Ident::new(format!("_{}", name.to_token_stream().to_string()).as_str(), Span::call_site()); + let mut fn_param_inputs: Vec = + param_names.iter().map(|name| quote!(#name: u32)).collect(); + fn_param_inputs.push(quote!(verify: bool)); + sig.inputs = parse_quote!(#(#fn_param_inputs),*); + + // used in instance() impl + let impl_last_stmt = match &last_stmt { + Some(stmt) => quote!(#stmt), + None => quote!(Ok(())), + }; + let fn_attrs_clone = fn_attrs.clone(); + + let fn_def = quote! { + #( + #fn_attrs_clone + )* + #vis #sig { + #( + #setup_stmts + )* + #fn_call_body + if verify { + #( + #verify_stmts + )* + } + #last_stmt + } + }; + + // generate final quoted tokens + let res = quote! { + // benchmark function definition + #fn_def + + #[allow(non_camel_case_types)] + #( + #fn_attrs + )* + struct #name; + + #[allow(unused_variables)] + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> + for #name where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + #krate::__private::vec! [ + #( + (#krate::BenchmarkParameter::#param_ranges) + ),* + ] + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool + ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { + #( + // prepare instance #param_names + let #param_names = components.iter() + .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) + .ok_or("Could not find component during benchmark preparation.")? + .1; + )* + + // benchmark setup code + #( + #setup_stmts + )* + #pre_call + Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { + #post_call + if verify { + #( + #verify_stmts + )* + } + #impl_last_stmt + })) + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + #[allow(unused)] + fn #test_ident() -> Result<(), #krate::BenchmarkError> { + let selected_benchmark = SelectedBenchmark::#name; + let components = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::components(&selected_benchmark); + let execute_benchmark = | + c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> + | -> Result<(), #krate::BenchmarkError> { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the benchmark, return execution + verification function. + let closure_to_verify = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::instance(&selected_benchmark, &c, true)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Run execution + verification + closure_to_verify() + }; + + if components.is_empty() { + execute_benchmark(Default::default())?; + } else { + let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { + ev.parse().map_err(|_| { + #krate::BenchmarkError::Stop( + "Could not parse env var `VALUES_PER_COMPONENT` as u32." + ) + })? + } else { + 6 + }; + + if num_values < 2 { + return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); + } + + for (name, low, high) in components.clone().into_iter() { + // Test the lowest, highest (if its different from the lowest) + // and up to num_values-2 more equidistant values in between. + // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] + if high < low { + return Err("The start of a `ParamRange` must be less than or equal to the end".into()); + } + + let mut values = #krate::__private::vec![low]; + let diff = (high - low).min(num_values - 1); + let slope = (high - low) as f32 / diff as f32; + + for i in 1..=diff { + let value = ((low as f32 + slope * i as f32) as u32) + .clamp(low, high); + values.push(value); + } + + for component_value in values { + // Select the max value for all the other components. + let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components + .iter() + .map(|(n, _, h)| + if *n == name { + (*n, component_value) + } else { + (*n, *h) + } + ) + .collect(); + + execute_benchmark(c)?; + } + } + } + return Ok(()); + } + } + }; + res +} diff --git a/support/procedural-fork/src/construct_runtime/expand/call.rs b/support/procedural-fork/src/construct_runtime/expand/call.rs new file mode 100644 index 000000000..b0041ccc0 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/call.rs @@ -0,0 +1,223 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; +use std::str::FromStr; +use syn::Ident; + +pub fn expand_outer_dispatch( + runtime: &Ident, + system_pallet: &Pallet, + pallet_decls: &[Pallet], + scrate: &TokenStream, +) -> TokenStream { + let mut variant_defs = TokenStream::new(); + let mut variant_patterns = Vec::new(); + let mut query_call_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let system_path = &system_pallet.path; + + let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); + + for pallet_declaration in pallets_with_call { + let name = &pallet_declaration.name; + let path = &pallet_declaration.path; + let index = pallet_declaration.index; + let attr = + pallet_declaration.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + variant_defs.extend(quote! { + #attr + #[codec(index = #index)] + #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), + }); + variant_patterns.push(quote!(RuntimeCall::#name(call))); + pallet_names.push(name); + pallet_attrs.push(attr); + query_call_part_macros.push(quote! { + #path::__substrate_call_check::is_call_part_defined!(#name); + }); + } + + quote! { + #( #query_call_part_macros )* + + #[derive( + Clone, PartialEq, Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeCall { + #variant_defs + } + #[cfg(test)] + impl RuntimeCall { + /// Return a list of the module names together with their size in memory. + pub const fn sizes() -> &'static [( &'static str, usize )] { + use #scrate::dispatch::Callable; + use core::mem::size_of; + &[#( + #pallet_attrs + ( + stringify!(#pallet_names), + size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), + ), + )*] + } + + /// Panics with diagnostic information if the size is greater than the given `limit`. + pub fn assert_size_under(limit: usize) { + let size = core::mem::size_of::(); + let call_oversize = size > limit; + if call_oversize { + println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); + let mut sizes = Self::sizes().to_vec(); + sizes.sort_by_key(|x| -(x.1 as isize)); + for (i, &(name, size)) in sizes.iter().enumerate().take(5) { + println!("Offender #{}: {} at {} bytes", i + 1, name, size); + } + if let Some((_, next_size)) = sizes.get(5) { + println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); + } + panic!( + "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the + size of `Call`. + If the limit is too strong, maybe consider providing a higher limit." + ); + } + } + } + impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { + fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { + match self { + #( + #pallet_attrs + #variant_patterns => call.get_dispatch_info(), + )* + } + } + } + + impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { + type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match self { + #( + #pallet_attrs + #variant_patterns => call.is_feeless(origin), + )* + } + } + } + + impl #scrate::traits::GetCallMetadata for RuntimeCall { + fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { + use #scrate::traits::GetCallName; + match self { + #( + #pallet_attrs + #variant_patterns => { + let function_name = call.get_call_name(); + let pallet_name = stringify!(#pallet_names); + #scrate::traits::CallMetadata { function_name, pallet_name } + } + )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[#( + #pallet_attrs + stringify!(#pallet_names), + )*] + } + + fn get_call_names(module: &str) -> &'static [&'static str] { + use #scrate::{dispatch::Callable, traits::GetCallName}; + match module { + #( + #pallet_attrs + stringify!(#pallet_names) => + <<#pallet_names as Callable<#runtime>>::RuntimeCall + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } + impl #scrate::__private::Dispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + type Config = RuntimeCall; + type Info = #scrate::dispatch::DispatchInfo; + type PostInfo = #scrate::dispatch::PostDispatchInfo; + fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + if !::filter_call(&origin, &self) { + return ::core::result::Result::Err( + #system_path::Error::<#runtime>::CallFiltered.into() + ); + } + + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) + } + } + impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + match self { + #( + #pallet_attrs + #variant_patterns => + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), + )* + } + } + } + + #( + #pallet_attrs + impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + #[allow(unreachable_patterns)] + fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { + match self { + #variant_patterns => Some(call), + // May be unreachable + _ => None, + } + } + } + + #pallet_attrs + impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { + #variant_patterns + } + } + )* + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs new file mode 100644 index 000000000..101a476fb --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs @@ -0,0 +1,101 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::parse::PalletPath; +use proc_macro2::{Ident, TokenStream}; +use quote::quote; + +pub(crate) fn expand_conversion_fn( + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, +) -> TokenStream { + let composite_name = quote::format_ident!("{}", composite_name); + let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); + + if let Some(inst) = instance { + quote! { + impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { + fn from(hr: #path::#composite_name<#path::#inst>) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } else { + quote! { + impl From<#path::#composite_name> for #runtime_composite_name { + fn from(hr: #path::#composite_name) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } +} + +pub(crate) fn expand_variant( + composite_name: &str, + index: u8, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, +) -> TokenStream { + let composite_name = quote::format_ident!("{}", composite_name); + + if let Some(inst) = instance { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name<#path::#inst>), + } + } else { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name), + } + } +} + +pub(crate) fn expand_variant_count( + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, +) -> TokenStream { + let composite_name = quote::format_ident!("{}", composite_name); + + if let Some(inst) = instance { + quote! { + #path::#composite_name::<#path::#inst>::VARIANT_COUNT + } + } else { + // Wrapped `<`..`>` means: use default type parameter for enum. + // + // This is used for pallets without instance support or pallets with instance support when + // we don't specify instance: + // + // ``` + // pub struct Pallet{..} + // + // #[pallet::composite_enum] + // pub enum HoldReason {..} + // + // Pallet1: pallet_x, // <- default type parameter + // ``` + quote! { + <#path::#composite_name>::VARIANT_COUNT + } + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/config.rs b/support/procedural-fork/src/construct_runtime/expand/config.rs new file mode 100644 index 000000000..dbbe6ba6e --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/config.rs @@ -0,0 +1,147 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use inflector::Inflector; +use proc_macro2::TokenStream; +use quote::{format_ident, quote, ToTokens}; +use std::str::FromStr; +use syn::Ident; + +pub fn expand_outer_config( + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, +) -> TokenStream { + let mut types = TokenStream::new(); + let mut fields = TokenStream::new(); + let mut genesis_build_calls = TokenStream::new(); + let mut query_genesis_config_part_macros = Vec::new(); + + for decl in pallet_decls { + if let Some(pallet_entry) = decl.find_part("Config") { + let path = &decl.path; + let pallet_name = &decl.name; + let path_str = path.into_token_stream().to_string(); + let config = format_ident!("{}Config", pallet_name); + let field_name = + &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); + let part_is_generic = !pallet_entry.generics.params.is_empty(); + let attr = &decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + types.extend(expand_config_types(attr, runtime, decl, &config, part_is_generic)); + fields.extend(quote!(#attr pub #field_name: #config,)); + genesis_build_calls + .extend(expand_config_build_storage_call(scrate, &config, attr, field_name)); + query_genesis_config_part_macros.push(quote! { + #path::__substrate_genesis_config_check::is_genesis_config_defined!(#pallet_name); + #[cfg(feature = "std")] + #path::__substrate_genesis_config_check::is_std_enabled_for_genesis!(#pallet_name, #path_str); + }); + } + } + + quote! { + #( #query_genesis_config_part_macros )* + + #types + + use #scrate::__private::serde as __genesis_config_serde_import__; + #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] + #[serde(rename_all = "camelCase")] + #[serde(deny_unknown_fields)] + #[serde(crate = "__genesis_config_serde_import__")] + pub struct RuntimeGenesisConfig { + #fields + } + + #[cfg(any(feature = "std", test))] + impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { + fn assimilate_storage( + &self, + storage: &mut #scrate::sp_runtime::Storage, + ) -> std::result::Result<(), String> { + #scrate::__private::BasicExternalities::execute_with_storage(storage, || { + ::build(&self); + Ok(()) + }) + } + } + + impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { + fn build(&self) { + #genesis_build_calls + ::on_genesis(); + } + } + + /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. + #[cfg(test)] + #[test] + fn test_genesis_config_builds() { + #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { + ::build( + &RuntimeGenesisConfig::default() + ); + }); + } + } +} + +fn expand_config_types( + attr: &TokenStream, + runtime: &Ident, + decl: &Pallet, + config: &Ident, + part_is_generic: bool, +) -> TokenStream { + let path = &decl.path; + + match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; + }, + (None, true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime>; + }, + (_, false) => quote! { + #attr + pub type #config = #path::GenesisConfig; + }, + } +} + +fn expand_config_build_storage_call( + scrate: &TokenStream, + pallet_genesis_config: &Ident, + attr: &TokenStream, + field_name: &Ident, +) -> TokenStream { + quote! { + #attr + <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs new file mode 100644 index 000000000..f12f99526 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs @@ -0,0 +1,75 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use super::composite_helper; +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; + +pub fn expand_outer_freeze_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { + let mut conversion_fns = Vec::new(); + let mut freeze_reason_variants = Vec::new(); + let mut freeze_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("FreezeReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); + + conversion_fns.push(composite_helper::expand_conversion_fn( + "FreezeReason", + path, + instance, + variant_name, + )); + + freeze_reason_variants.push(composite_helper::expand_variant( + "FreezeReason", + index, + path, + instance, + variant_name, + )); + + freeze_reason_variants_count.push(composite_helper::expand_variant_count( + "FreezeReason", + path, + instance, + )); + } + } + + quote! { + /// A reason for placing a freeze on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeFreezeReason { + #( #freeze_reason_variants )* + } + + impl #scrate::traits::VariantCount for RuntimeFreezeReason { + const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; + } + + #( #conversion_fns )* + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs new file mode 100644 index 000000000..cdab92712 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs @@ -0,0 +1,75 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use super::composite_helper; +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; + +pub fn expand_outer_hold_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { + let mut conversion_fns = Vec::new(); + let mut hold_reason_variants = Vec::new(); + let mut hold_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("HoldReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); + + conversion_fns.push(composite_helper::expand_conversion_fn( + "HoldReason", + path, + instance, + variant_name, + )); + + hold_reason_variants.push(composite_helper::expand_variant( + "HoldReason", + index, + path, + instance, + variant_name, + )); + + hold_reason_variants_count.push(composite_helper::expand_variant_count( + "HoldReason", + path, + instance, + )); + } + } + + quote! { + /// A reason for placing a hold on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeHoldReason { + #( #hold_reason_variants )* + } + + impl #scrate::traits::VariantCount for RuntimeHoldReason { + const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; + } + + #( #conversion_fns )* + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/inherent.rs b/support/procedural-fork/src/construct_runtime/expand/inherent.rs new file mode 100644 index 000000000..da483fa6c --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/inherent.rs @@ -0,0 +1,254 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; +use std::str::FromStr; +use syn::Ident; + +pub fn expand_outer_inherent( + runtime: &Ident, + block: &TokenStream, + unchecked_extrinsic: &TokenStream, + pallet_decls: &[Pallet], + scrate: &TokenStream, +) -> TokenStream { + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_inherent_part_macros = Vec::new(); + + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("Inherent") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + pallet_names.push(name); + pallet_attrs.push(attr); + query_inherent_part_macros.push(quote! { + #path::__substrate_inherent_check::is_inherent_part_defined!(#name); + }); + } + } + + quote! { + #( #query_inherent_part_macros )* + + trait InherentDataExt { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; + } + + impl InherentDataExt for #scrate::inherent::InherentData { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> + { + use #scrate::inherent::ProvideInherent; + + let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); + + #( + #pallet_attrs + if let Some(inherent) = #pallet_names::create_inherent(self) { + let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( + inherent.into(), + None, + ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ + `Some`; qed"); + + inherents.push(inherent); + } + )* + + inherents + } + + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { + use #scrate::inherent::{ProvideInherent, IsFatalError}; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + use #scrate::__private::{sp_inherents::Error, log}; + + let mut result = #scrate::inherent::CheckInherentsResult::new(); + + // This handle assume we abort on the first fatal error. + fn handle_put_error_result(res: Result<(), Error>) { + const LOG_TARGET: &str = "runtime::inherent"; + match res { + Ok(()) => (), + Err(Error::InherentDataExists(id)) => + log::debug!( + target: LOG_TARGET, + "Some error already reported for inherent {:?}, new non fatal \ + error is ignored", + id + ), + Err(Error::FatalErrorReported) => + log::error!( + target: LOG_TARGET, + "Fatal error already reported, unexpected considering there is \ + only one fatal error", + ), + Err(_) => + log::error!( + target: LOG_TARGET, + "Unexpected error from `put_error` operation", + ), + } + } + + for xt in block.extrinsics() { + // Inherents are before any other extrinsics. + // And signed extrinsics are not inherents. + if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { + break + } + + let mut is_inherent = false; + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if #pallet_names::is_inherent(call) { + is_inherent = true; + if let Err(e) = #pallet_names::check_inherent(call, self) { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + } + } + } + )* + + // Inherents are before any other extrinsics. + // No module marked it as inherent thus it is not. + if !is_inherent { + break + } + } + + #( + #pallet_attrs + match #pallet_names::is_inherent_required(self) { + Ok(Some(e)) => { + let found = block.extrinsics().iter().any(|xt| { + let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) + .unwrap_or(false); + + if !is_signed { + let call = < + #unchecked_extrinsic as ExtrinsicCall + >::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + #pallet_names::is_inherent(&call) + } else { + false + } + } else { + // Signed extrinsics are not inherents. + false + } + }); + + if !found { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + }, + Ok(None) => (), + Err(e) => { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + }, + } + )* + + result + } + } + + impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { + fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + + if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { + // Signed extrinsics are never inherents. + return false + } + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if <#pallet_names as ProvideInherent>::is_inherent(&call) { + return true; + } + } + } + )* + false + } + } + + impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { + fn ensure_inherents_are_first(block: &#block) -> Result { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + + let mut num_inherents = 0u32; + + for (i, xt) in block.extrinsics().iter().enumerate() { + if >::is_inherent(xt) { + if num_inherents != i as u32 { + return Err(i as u32); + } + + num_inherents += 1; // Safe since we are in an `enumerate` loop. + } + } + + Ok(num_inherents) + } + } + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs new file mode 100644 index 000000000..e67c0da00 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs @@ -0,0 +1,64 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use super::composite_helper; +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; + +pub fn expand_outer_lock_id(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { + let mut conversion_fns = Vec::new(); + let mut lock_id_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("LockId") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); + + conversion_fns.push(composite_helper::expand_conversion_fn( + "LockId", + path, + instance, + variant_name, + )); + + lock_id_variants.push(composite_helper::expand_variant( + "LockId", + index, + path, + instance, + variant_name, + )); + } + } + + quote! { + /// An identifier for each lock placed on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeLockId { + #( #lock_id_variants )* + } + + #( #conversion_fns )* + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/metadata.rs b/support/procedural-fork/src/construct_runtime/expand/metadata.rs new file mode 100644 index 000000000..0e76f9a92 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/metadata.rs @@ -0,0 +1,258 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::{parse::PalletPath, Pallet}; +use proc_macro2::TokenStream; +use quote::quote; +use std::str::FromStr; +use syn::Ident; + +pub fn expand_runtime_metadata( + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream, + extrinsic: &TokenStream, + system_path: &PalletPath, +) -> TokenStream { + let pallets = pallet_declarations + .iter() + .filter_map(|pallet_declaration| { + pallet_declaration.find_part("Pallet").map(|_| { + let filtered_names: Vec<_> = pallet_declaration + .pallet_parts() + .iter() + .filter(|part| part.name() != "Pallet") + .map(|part| part.name()) + .collect(); + (pallet_declaration, filtered_names) + }) + }) + .map(|(decl, filtered_names)| { + let name = &decl.name; + let index = &decl.index; + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); + let docs = expand_pallet_metadata_docs(runtime, decl); + let attr = decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + #scrate::__private::metadata_ir::PalletMetadataIR { + name: stringify!(#name), + index: #index, + storage: #storage, + calls: #calls, + event: #event, + constants: #constants, + error: #errors, + docs: #docs, + } + } + }) + .collect::>(); + + quote! { + impl #runtime { + fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + // + // `Deref` needs a reference for resolving the function call. + let rt = #runtime; + + let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); + let address_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress + >(); + let call_ty = #scrate::__private::scale_info::meta_type::< + <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call + >(); + let signature_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature + >(); + let extra_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra + >(); + + #scrate::__private::metadata_ir::MetadataIR { + pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], + extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { + ty, + version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + address_ty, + call_ty, + signature_ty, + extra_ty, + signed_extensions: < + < + #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension + >::metadata() + .into_iter() + .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, + }) + .collect(), + }, + ty: #scrate::__private::scale_info::meta_type::<#runtime>(), + apis: (&rt).runtime_metadata(), + outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { + call_enum_ty: #scrate::__private::scale_info::meta_type::< + <#runtime as #system_path::Config>::RuntimeCall + >(), + event_enum_ty: #scrate::__private::scale_info::meta_type::(), + error_enum_ty: #scrate::__private::scale_info::meta_type::(), + } + } + } + + pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { + // Note: this always returns the V14 version. The runtime API function + // must be deprecated. + #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) + } + + pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { + #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { + #scrate::__private::OpaqueMetadata::new(prefixed.into()) + }) + } + + pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { + #scrate::__private::metadata_ir::supported_versions() + } + } + } +} + +fn expand_pallet_metadata_storage( + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, +) -> TokenStream { + if filtered_names.contains(&"Storage") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) + } + } else { + quote!(None) + } +} + +fn expand_pallet_metadata_calls( + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, +) -> TokenStream { + if filtered_names.contains(&"Call") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) + } + } else { + quote!(None) + } +} + +fn expand_pallet_metadata_events( + filtered_names: &[&'static str], + runtime: &Ident, + scrate: &TokenStream, + decl: &Pallet, +) -> TokenStream { + if filtered_names.contains(&"Event") { + let path = &decl.path; + let part_is_generic = !decl + .find_part("Event") + .expect("Event part exists; qed") + .generics + .params + .is_empty(); + let pallet_event = match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::Event::<#path::#inst>), + (None, true) => quote!(#path::Event::<#runtime>), + (None, false) => quote!(#path::Event), + }; + + quote! { + Some( + #scrate::__private::metadata_ir::PalletEventMetadataIR { + ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() + } + ) + } + } else { + quote!(None) + } +} + +fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); + + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() + } +} + +fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); + + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() + } +} + +fn expand_pallet_metadata_docs(runtime: &Ident, decl: &Pallet) -> TokenStream { + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); + + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/mod.rs b/support/procedural-fork/src/construct_runtime/expand/mod.rs new file mode 100644 index 000000000..88f9a3c6e --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/mod.rs @@ -0,0 +1,43 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +mod call; +pub mod composite_helper; +mod config; +mod freeze_reason; +mod hold_reason; +mod inherent; +mod lock_id; +mod metadata; +mod origin; +mod outer_enums; +mod slash_reason; +mod task; +mod unsigned; + +pub use call::expand_outer_dispatch; +pub use config::expand_outer_config; +pub use freeze_reason::expand_outer_freeze_reason; +pub use hold_reason::expand_outer_hold_reason; +pub use inherent::expand_outer_inherent; +pub use lock_id::expand_outer_lock_id; +pub use metadata::expand_runtime_metadata; +pub use origin::expand_outer_origin; +pub use outer_enums::{expand_outer_enum, OuterEnumType}; +pub use slash_reason::expand_outer_slash_reason; +pub use task::expand_outer_task; +pub use unsigned::expand_outer_validate_unsigned; diff --git a/support/procedural-fork/src/construct_runtime/expand/origin.rs b/support/procedural-fork/src/construct_runtime/expand/origin.rs new file mode 100644 index 000000000..83049919d --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/origin.rs @@ -0,0 +1,455 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::{Pallet, SYSTEM_PALLET_NAME}; +use proc_macro2::TokenStream; +use quote::quote; +use std::str::FromStr; +use syn::{Generics, Ident}; + +pub fn expand_outer_origin( + runtime: &Ident, + system_pallet: &Pallet, + pallets: &[Pallet], + scrate: &TokenStream, +) -> syn::Result { + let mut caller_variants = TokenStream::new(); + let mut pallet_conversions = TokenStream::new(); + let mut query_origin_part_macros = Vec::new(); + + for pallet_decl in pallets.iter().filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) { + if let Some(pallet_entry) = pallet_decl.find_part("Origin") { + let instance = pallet_decl.instance.as_ref(); + let index = pallet_decl.index; + let generics = &pallet_entry.generics; + let name = &pallet_decl.name; + let path = &pallet_decl.path; + + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `Origin` cannot \ + be constructed: pallet `{}` must have generic `Origin`", + name + ); + return Err(syn::Error::new(name.span(), msg)) + } + + caller_variants.extend(expand_origin_caller_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); + pallet_conversions.extend(expand_origin_pallet_conversions( + scrate, + runtime, + pallet_decl, + instance, + generics, + )); + query_origin_part_macros.push(quote! { + #path::__substrate_origin_check::is_origin_part_defined!(#name); + }); + } + } + + let system_path = &system_pallet.path; + + let system_index = system_pallet.index; + + let system_path_name = system_path.module_name(); + + let doc_string = get_intra_doc_string( + "Origin is always created with the base filter configured in", + &system_path_name, + ); + + let doc_string_none_origin = + get_intra_doc_string("Create with system none origin and", &system_path_name); + + let doc_string_root_origin = + get_intra_doc_string("Create with system root origin and", &system_path_name); + + let doc_string_signed_origin = + get_intra_doc_string("Create with system signed origin and", &system_path_name); + + let doc_string_runtime_origin = + get_intra_doc_string("Convert to runtime origin, using as filter:", &system_path_name); + + let doc_string_runtime_origin_with_caller = get_intra_doc_string( + "Convert to runtime origin with caller being system signed or none and use filter", + &system_path_name, + ); + + Ok(quote! { + #( #query_origin_part_macros )* + + /// The runtime origin type representing the origin of a call. + /// + #[doc = #doc_string] + #[derive(Clone)] + pub struct RuntimeOrigin { + pub caller: OriginCaller, + filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, + } + + #[cfg(not(feature = "std"))] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.write_str("") + } + } + + #[cfg(feature = "std")] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.debug_struct("Origin") + .field("caller", &self.caller) + .field("filter", &"[function ptr]") + .finish() + } + } + + impl #scrate::traits::OriginTrait for RuntimeOrigin { + type Call = <#runtime as #system_path::Config>::RuntimeCall; + type PalletsOrigin = OriginCaller; + type AccountId = <#runtime as #system_path::Config>::AccountId; + + fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { + let f = self.filter.clone(); + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { + f(call) && filter(call) + })); + } + + fn reset_filter(&mut self) { + let filter = < + <#runtime as #system_path::Config>::BaseCallFilter + as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> + >::contains; + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); + } + + fn set_caller_from(&mut self, other: impl Into) { + self.caller = other.into().caller; + } + + fn filter_call(&self, call: &Self::Call) -> bool { + match self.caller { + // Root bypasses all filters + OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, + _ => (self.filter)(call), + } + } + + fn caller(&self) -> &Self::PalletsOrigin { + &self.caller + } + + fn into_caller(self) -> Self::PalletsOrigin { + self.caller + } + + fn try_with_caller( + mut self, + f: impl FnOnce(Self::PalletsOrigin) -> Result, + ) -> Result { + match f(self.caller) { + Ok(r) => Ok(r), + Err(caller) => { self.caller = caller; Err(self) } + } + } + + fn none() -> Self { + #system_path::RawOrigin::None.into() + } + + fn root() -> Self { + #system_path::RawOrigin::Root.into() + } + + fn signed(by: Self::AccountId) -> Self { + #system_path::RawOrigin::Signed(by).into() + } + } + + #[derive( + Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, + )] + #[allow(non_camel_case_types)] + pub enum OriginCaller { + #[codec(index = #system_index)] + system(#system_path::Origin<#runtime>), + #caller_variants + #[allow(dead_code)] + Void(#scrate::__private::Void) + } + + // For backwards compatibility and ease of accessing these functions. + #[allow(dead_code)] + impl RuntimeOrigin { + #[doc = #doc_string_none_origin] + pub fn none() -> Self { + ::none() + } + + #[doc = #doc_string_root_origin] + pub fn root() -> Self { + ::root() + } + + #[doc = #doc_string_signed_origin] + pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { + ::signed(by) + } + } + + impl From<#system_path::Origin<#runtime>> for OriginCaller { + fn from(x: #system_path::Origin<#runtime>) -> Self { + OriginCaller::system(x) + } + } + + impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { + fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match self { + OriginCaller::system(x) => Some(x), + _ => None, + } + } + fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match &self { + OriginCaller::system(o) => Some(o), + _ => None, + } + } + } + + impl TryFrom for #system_path::Origin<#runtime> { + type Error = OriginCaller; + fn try_from(x: OriginCaller) + -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> + { + if let OriginCaller::system(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { + + #[doc = #doc_string_runtime_origin] + fn from(x: #system_path::Origin<#runtime>) -> Self { + let o: OriginCaller = x.into(); + o.into() + } + } + + impl From for RuntimeOrigin { + fn from(x: OriginCaller) -> Self { + let mut o = RuntimeOrigin { + caller: x, + filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), + }; + + #scrate::traits::OriginTrait::reset_filter(&mut o); + + o + } + } + + impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::system(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + impl From::AccountId>> for RuntimeOrigin { + #[doc = #doc_string_runtime_origin_with_caller] + fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { + <#system_path::Origin<#runtime>>::from(x).into() + } + } + + #pallet_conversions + }) +} + +fn expand_origin_caller_variant( + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, +) -> TokenStream { + let part_is_generic = !generics.params.is_empty(); + let variant_name = &pallet.name; + let path = &pallet.path; + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin), + }, + } +} + +fn expand_origin_pallet_conversions( + scrate: &TokenStream, + runtime: &Ident, + pallet: &Pallet, + instance: Option<&Ident>, + generics: &Generics, +) -> TokenStream { + let path = &pallet.path; + let variant_name = &pallet.name; + + let part_is_generic = !generics.params.is_empty(); + let pallet_origin = match instance { + Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), + Some(inst) => quote!(#path::Origin<#path::#inst>), + None if part_is_generic => quote!(#path::Origin<#runtime>), + None => quote!(#path::Origin), + }; + + let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + impl From<#pallet_origin> for OriginCaller { + fn from(x: #pallet_origin) -> Self { + OriginCaller::#variant_name(x) + } + } + + #attr + impl From<#pallet_origin> for RuntimeOrigin { + #[doc = #doc_string] + fn from(x: #pallet_origin) -> Self { + let x: OriginCaller = x.into(); + x.into() + } + } + + #attr + impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::#variant_name(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + + #attr + impl TryFrom for #pallet_origin { + type Error = OriginCaller; + fn try_from( + x: OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { + if let OriginCaller::#variant_name(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + #attr + impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = x { + Ok(&l) + } else { + Err(()) + } + } + } + + #attr + impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a RuntimeOrigin, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = &x.caller { + Ok(&l) + } else { + Err(()) + } + } + } + } +} + +// Get the actual documentation using the doc information and system path name +fn get_intra_doc_string(doc_info: &str, system_path_name: &String) -> String { + format!(" {} [`{}::Config::BaseCallFilter`].", doc_info, system_path_name) +} diff --git a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs new file mode 100644 index 000000000..80b242ccb --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs @@ -0,0 +1,279 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::{Span, TokenStream}; +use quote::{quote, ToTokens}; +use std::str::FromStr; +use syn::{Generics, Ident}; + +/// Represents the types supported for creating an outer enum. +#[derive(Clone, Copy, PartialEq)] +pub enum OuterEnumType { + /// Collects the Event enums from all pallets. + Event, + /// Collects the Error enums from all pallets. + Error, +} + +impl OuterEnumType { + /// The name of the structure this enum represents. + fn struct_name(&self) -> &str { + match self { + OuterEnumType::Event => "RuntimeEvent", + OuterEnumType::Error => "RuntimeError", + } + } + + /// The name of the variant (ie `Event` or `Error`). + fn variant_name(&self) -> &str { + match self { + OuterEnumType::Event => "Event", + OuterEnumType::Error => "Error", + } + } +} + +impl ToTokens for OuterEnumType { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + OuterEnumType::Event => quote!(Event).to_tokens(tokens), + OuterEnumType::Error => quote!(Error).to_tokens(tokens), + } + } +} + +/// Create an outer enum that encapsulates all pallets as variants. +/// +/// Each variant represents a pallet and contains the corresponding type declared with either: +/// - #[pallet::event] for the [`OuterEnumType::Event`] variant +/// - #[pallet::error] for the [`OuterEnumType::Error`] variant +/// +/// The name of the outer enum is prefixed with Runtime, resulting in names like RuntimeEvent +/// or RuntimeError. +/// +/// This structure facilitates the decoding process by leveraging the metadata. +/// +/// # Example +/// +/// The code generate looks like the following for [`OuterEnumType::Event`]. +/// +/// ```ignore +/// enum RuntimeEvent { +/// #[codec(index = 0)] +/// System(pallet_system::Event), +/// +/// #[codec(index = 5)] +/// Balances(pallet_system::Event), +/// } +/// ``` +/// +/// Notice that the pallet index is preserved using the `#[codec(index = ..)]` attribute. +pub fn expand_outer_enum( + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, + enum_ty: OuterEnumType, +) -> syn::Result { + // Stores all pallet variants. + let mut enum_variants = TokenStream::new(); + // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. + let mut enum_conversions = TokenStream::new(); + // Specific for events to query via `is_event_part_defined!`. + let mut query_enum_part_macros = Vec::new(); + + let enum_name_str = enum_ty.variant_name(); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + + for pallet_decl in pallet_decls { + let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { continue }; + + let path = &pallet_decl.path; + let pallet_name = &pallet_decl.name; + let index = pallet_decl.index; + let instance = pallet_decl.instance.as_ref(); + let generics = &pallet_entry.generics; + + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `{}` cannot \ + be constructed: pallet `{}` must have generic `{}`", + enum_name_str, pallet_name, enum_name_str, + ); + return Err(syn::Error::new(pallet_name.span(), msg)) + } + + let part_is_generic = !generics.params.is_empty(); + let pallet_enum = match (instance, part_is_generic) { + (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), + (None, true) => quote!(#path::#enum_ty::<#runtime>), + (None, false) => quote!(#path::#enum_ty), + }; + + enum_variants.extend(expand_enum_variant( + runtime, + pallet_decl, + index, + instance, + generics, + enum_ty, + )); + enum_conversions.extend(expand_enum_conversion( + pallet_decl, + &pallet_enum, + &enum_name_ident, + )); + + if enum_ty == OuterEnumType::Event { + query_enum_part_macros.push(quote! { + #path::__substrate_event_check::is_event_part_defined!(#pallet_name); + }); + } + } + + // Derives specific for the event. + let event_custom_derives = + if enum_ty == OuterEnumType::Event { quote!(Clone, PartialEq, Eq,) } else { quote!() }; + + // Implementation specific for errors. + let error_custom_impl = generate_error_impl(scrate, enum_ty); + + Ok(quote! { + #( #query_enum_part_macros )* + + #[derive( + #event_custom_derives + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + #[allow(non_camel_case_types)] + pub enum #enum_name_ident { + #enum_variants + } + + #enum_conversions + + #error_custom_impl + }) +} + +fn expand_enum_variant( + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, + enum_ty: OuterEnumType, +) -> TokenStream { + let path = &pallet.path; + let variant_name = &pallet.name; + let part_is_generic = !generics.params.is_empty(); + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty), + }, + } +} + +fn expand_enum_conversion( + pallet: &Pallet, + pallet_enum: &TokenStream, + enum_name_ident: &Ident, +) -> TokenStream { + let variant_name = &pallet.name; + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + impl From<#pallet_enum> for #enum_name_ident { + fn from(x: #pallet_enum) -> Self { + #enum_name_ident + ::#variant_name(x) + } + } + #attr + impl TryInto<#pallet_enum> for #enum_name_ident { + type Error = (); + + fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { + match self { + Self::#variant_name(evt) => Ok(evt), + _ => Err(()), + } + } + } + } +} + +fn generate_error_impl(scrate: &TokenStream, enum_ty: OuterEnumType) -> TokenStream { + // Implementation is specific to `Error`s. + if enum_ty == OuterEnumType::Event { + return quote! {} + } + + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + + quote! { + impl #enum_name_ident { + /// Optionally convert the `DispatchError` into the `RuntimeError`. + /// + /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. + pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { + let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; + + let bytes = #scrate::__private::codec::Encode::encode(&module_error); + #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() + } + } + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs new file mode 100644 index 000000000..892b842b1 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs @@ -0,0 +1,64 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use super::composite_helper; +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; + +pub fn expand_outer_slash_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { + let mut conversion_fns = Vec::new(); + let mut slash_reason_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("SlashReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); + + conversion_fns.push(composite_helper::expand_conversion_fn( + "SlashReason", + path, + instance, + variant_name, + )); + + slash_reason_variants.push(composite_helper::expand_variant( + "SlashReason", + index, + path, + instance, + variant_name, + )); + } + } + + quote! { + /// A reason for slashing funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeSlashReason { + #( #slash_reason_variants )* + } + + #( #conversion_fns )* + } +} diff --git a/support/procedural-fork/src/construct_runtime/expand/task.rs b/support/procedural-fork/src/construct_runtime/expand/task.rs new file mode 100644 index 000000000..6531c0e9e --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/task.rs @@ -0,0 +1,131 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::{Ident, TokenStream as TokenStream2}; +use quote::quote; + +/// Expands aggregate `RuntimeTask` enum. +pub fn expand_outer_task( + runtime_name: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, +) -> TokenStream2 { + let mut from_impls = Vec::new(); + let mut task_variants = Vec::new(); + let mut variant_names = Vec::new(); + let mut task_paths = Vec::new(); + for decl in pallet_decls { + if decl.find_part("Task").is_none() { + continue + } + + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + + from_impls.push(quote! { + impl From<#path::Task<#runtime_name>> for RuntimeTask { + fn from(hr: #path::Task<#runtime_name>) -> Self { + RuntimeTask::#variant_name(hr) + } + } + + impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { + type Error = (); + + fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { + match self { + RuntimeTask::#variant_name(hr) => Ok(hr), + _ => Err(()), + } + } + } + }); + + task_variants.push(quote! { + #[codec(index = #index)] + #variant_name(#path::Task<#runtime_name>), + }); + + variant_names.push(quote!(#variant_name)); + + task_paths.push(quote!(#path::Task)); + } + + let prelude = quote!(#scrate::traits::tasks::__private); + + const INCOMPLETE_MATCH_QED: &'static str = + "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; + + let output = quote! { + /// An aggregation of all `Task` enums across all pallets included in the current runtime. + #[derive( + Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeTask { + #( #task_variants )* + } + + #[automatically_derived] + impl #scrate::traits::Task for RuntimeTask { + type Enumeration = #prelude::IntoIter; + + fn is_valid(&self) -> bool { + match self { + #(RuntimeTask::#variant_names(val) => val.is_valid(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { + match self { + #(RuntimeTask::#variant_names(val) => val.run(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self { + #(RuntimeTask::#variant_names(val) => val.weight(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn task_index(&self) -> u32 { + match self { + #(RuntimeTask::#variant_names(val) => val.task_index(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn iter() -> Self::Enumeration { + let mut all_tasks = Vec::new(); + #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* + all_tasks.into_iter() + } + } + + #( #from_impls )* + }; + + output +} diff --git a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs new file mode 100644 index 000000000..33aadba0d --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs @@ -0,0 +1,89 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::TokenStream; +use quote::quote; +use std::str::FromStr; +use syn::Ident; + +pub fn expand_outer_validate_unsigned( + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, +) -> TokenStream { + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_validate_unsigned_part_macros = Vec::new(); + + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("ValidateUnsigned") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + pallet_names.push(name); + pallet_attrs.push(attr); + query_validate_unsigned_part_macros.push(quote! { + #path::__substrate_validate_unsigned_check::is_validate_unsigned_part_defined!(#name); + }); + } + } + + quote! { + #( #query_validate_unsigned_part_macros )* + + impl #scrate::unsigned::ValidateUnsigned for #runtime { + type Call = RuntimeCall; + + fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), + )* + // pre-dispatch should not stop inherent extrinsics, validation should prevent + // including arbitrary (non-inherent) extrinsics to blocks. + _ => Ok(()), + } + } + + fn validate_unsigned( + #[allow(unused_variables)] + source: #scrate::unsigned::TransactionSource, + call: &Self::Call, + ) -> #scrate::unsigned::TransactionValidity { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), + )* + _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), + } + } + } + } +} diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs new file mode 100644 index 000000000..b083abbb2 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -0,0 +1,809 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of `construct_runtime`. +//! +//! `construct_runtime` implementation is recursive and can generate code which will call itself in +//! order to get all the pallet parts for each pallet. +//! +//! Pallets can define their parts: +//! - Implicitly: `System: frame_system` +//! - Explicitly: `System: frame_system::{Pallet, Call}` +//! +//! The `construct_runtime` transitions from the implicit definition to the explicit one. +//! From the explicit state, Substrate expands the pallets with additional information +//! that is to be included in the runtime metadata. This expansion makes visible some extra +//! parts of the pallets, mainly the `Error` if defined. The expanded state looks like +//! `System: frame_system expanded::{Error} ::{Pallet, Call}` and concatenates the extra expanded +//! parts with the user-provided parts. For example, the `Pallet`, `Call` and `Error` parts are +//! collected. +//! +//! Pallets must provide the `tt_extra_parts` and `tt_default_parts` macros for these transitions. +//! These are automatically implemented by the `#[pallet::pallet]` macro. +//! +//! This macro also generates the following enums for ease of decoding: +//! - `enum RuntimeCall`: This type contains the information needed to decode extrinsics. +//! - `enum RuntimeEvent`: This type contains the information needed to decode events. +//! - `enum RuntimeError`: While this cannot be used directly to decode `sp_runtime::DispatchError` +//! from the chain, it contains the information needed to decode the +//! `sp_runtime::DispatchError::Module`. +//! +//! # State Transitions +//! +//! ```ignore +//! +----------+ +//! | Implicit | -----------+ +//! +----------+ | +//! | | +//! v v +//! +----------+ +------------------+ +//! | Explicit | --> | ExplicitExpanded | +//! +----------+ +------------------+ +//! ``` +//! +//! When all pallet parts are implicit, then the `construct_runtime!` macro expands to its final +//! state, the `ExplicitExpanded`. Otherwise, all implicit parts are converted to an explicit +//! expanded part allow the `construct_runtime!` to expand any remaining explicit parts to an +//! explicit expanded part. +//! +//! # Implicit to Explicit +//! +//! The `construct_runtime` macro transforms the implicit declaration of each pallet +//! `System: frame_system` to an explicit one `System: frame_system::{Pallet, Call}` using the +//! `tt_default_parts` macro. +//! +//! The `tt_default_parts` macro exposes a comma separated list of pallet parts. For example, the +//! `Event` part is exposed only if the pallet implements an event via `#[pallet::event]` macro. +//! The tokens generated by this macro are ` expanded :: { Pallet, Call }` for our example. +//! +//! The `match_and_insert` macro takes in 3 arguments: +//! - target: This is the `TokenStream` that contains the `construct_runtime!` macro. +//! - pattern: The pattern to match against in the target stream. +//! - tokens: The tokens to added after the pattern match. +//! +//! The `construct_runtime` macro uses the `tt_call` to get the default pallet parts via +//! the `tt_default_parts` macro defined by each pallet. The pallet parts are then returned as +//! input to the `match_and_replace` macro. +//! The `match_and_replace` then will modify the the `construct_runtime!` to expand the implicit +//! definition to the explicit one. +//! +//! For example, +//! +//! ```ignore +//! construct_runtime!( +//! //... +//! { +//! System: frame_system = 0, // Implicit definition of parts +//! Balances: pallet_balances = 1, // Implicit definition of parts +//! } +//! ); +//! ``` +//! This call has some implicit pallet parts, thus it will expand to: +//! ```ignore +//! frame_support::__private::tt_call! { +//! macro = [{ pallet_balances::tt_default_parts }] +//! ~~> frame_support::match_and_insert! { +//! target = [{ +//! frame_support::__private::tt_call! { +//! macro = [{ frame_system::tt_default_parts }] +//! ~~> frame_support::match_and_insert! { +//! target = [{ +//! construct_runtime!( +//! //... +//! { +//! System: frame_system = 0, +//! Balances: pallet_balances = 1, +//! } +//! ); +//! }] +//! pattern = [{ System: frame_system }] +//! } +//! } +//! }] +//! pattern = [{ Balances: pallet_balances }] +//! } +//! } +//! ``` +//! `tt_default_parts` must be defined. It returns the pallet parts inside some tokens, and +//! then `tt_call` will pipe the returned pallet parts into the input of `match_and_insert`. +//! Thus `match_and_insert` will initially receive the following inputs: +//! ```ignore +//! frame_support::match_and_insert! { +//! target = [{ +//! frame_support::match_and_insert! { +//! target = [{ +//! construct_runtime!( +//! //... +//! { +//! System: frame_system = 0, +//! Balances: pallet_balances = 1, +//! } +//! ) +//! }] +//! pattern = [{ System: frame_system }] +//! tokens = [{ ::{Pallet, Call} }] +//! } +//! }] +//! pattern = [{ Balances: pallet_balances }] +//! tokens = [{ ::{Pallet, Call} }] +//! } +//! ``` +//! After dealing with `pallet_balances`, the inner `match_and_insert` will expand to: +//! ```ignore +//! frame_support::match_and_insert! { +//! target = [{ +//! construct_runtime!( +//! //... +//! { +//! System: frame_system = 0, // Implicit definition of parts +//! Balances: pallet_balances::{Pallet, Call} = 1, // Explicit definition of parts +//! } +//! ) +//! }] +//! pattern = [{ System: frame_system }] +//! tokens = [{ ::{Pallet, Call} }] +//! } +//! ``` +//! +//! Which will then finally expand to the following: +//! ```ignore +//! construct_runtime!( +//! //... +//! { +//! System: frame_system::{Pallet, Call}, +//! Balances: pallet_balances::{Pallet, Call}, +//! } +//! ) +//! ``` +//! +//! This call has no implicit pallet parts, thus it will expand to the runtime construction: +//! ```ignore +//! pub enum Runtime { ... } +//! pub struct Call { ... } +//! impl Call ... +//! pub enum Origin { ... } +//! ... +//! ``` +//! +//! Visualizing the entire flow of `construct_runtime!`, it would look like the following: +//! +//! ```ignore +//! +--------------------+ +---------------------+ +-------------------+ +//! | | | (defined in pallet) | | | +//! | construct_runtime! | --> | tt_default_parts! | --> | match_and_insert! | +//! | w/ no pallet parts | | | | | +//! +--------------------+ +---------------------+ +-------------------+ +//! +//! +--------------------+ +//! | | +//! --> | construct_runtime! | +//! | w/ pallet parts | +//! +--------------------+ +//! ``` +//! +//! # Explicit to Explicit Expanded +//! +//! Users normally do not care about this transition. +//! +//! Similarly to the previous transition, the macro expansion transforms `System: +//! frame_system::{Pallet, Call}` into `System: frame_system expanded::{Error} ::{Pallet, Call}`. +//! The `expanded` section adds extra parts that the Substrate would like to expose for each pallet +//! by default. This is done to expose the appropriate types for metadata construction. +//! +//! This time, instead of calling `tt_default_parts` we are using the `tt_extra_parts` macro. +//! This macro returns the ` :: expanded { Error }` list of additional parts we would like to +//! expose. + +pub(crate) mod expand; +pub(crate) mod parse; + +use crate::pallet::parse::helper::two128_str; +use cfg_expr::Predicate; +use frame_support_procedural_tools::{ + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, +}; +use itertools::Itertools; +use parse::{ExplicitRuntimeDeclaration, ImplicitRuntimeDeclaration, Pallet, RuntimeDeclaration}; +use proc_macro::TokenStream; +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; +use std::{collections::HashSet, str::FromStr}; +use syn::{spanned::Spanned, Ident, Result}; + +/// The fixed name of the system pallet. +const SYSTEM_PALLET_NAME: &str = "System"; + +/// Implementation of `construct_runtime` macro. Either expand to some code which will call +/// `construct_runtime` again, or expand to the final runtime definition. +pub fn construct_runtime(input: TokenStream) -> TokenStream { + let input_copy = input.clone(); + let definition = syn::parse_macro_input!(input as RuntimeDeclaration); + + let (check_pallet_number_res, res) = match definition { + RuntimeDeclaration::Implicit(implicit_def) => ( + check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), + construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), + ), + RuntimeDeclaration::Explicit(explicit_decl) => ( + check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), + construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), + ), + RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( + check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), + construct_runtime_final_expansion(explicit_decl), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("EXPAND_MACROS").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() +} + +/// All pallets that have implicit pallet parts (ie `System: frame_system`) are +/// expanded with the default parts defined by the pallet's `tt_default_parts` macro. +/// +/// This function transforms the [`RuntimeDeclaration::Implicit`] into +/// [`RuntimeDeclaration::Explicit`] that is not yet fully expanded. +/// +/// For more details, please refer to the root documentation. +fn construct_runtime_implicit_to_explicit( + input: TokenStream2, + definition: ImplicitRuntimeDeclaration, +) -> Result { + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition.pallets.iter().filter(|pallet| pallet.pallet_parts.is_none()) { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) +} + +/// All pallets that have +/// (I): explicit pallet parts (ie `System: frame_system::{Pallet, Call}`) and +/// (II): are not fully expanded (ie do not include the `Error` expansion part) +/// are fully expanded by including the parts from the pallet's `tt_extra_parts` macro. +/// +/// This function transforms the [`RuntimeDeclaration::Explicit`] that is not yet fully expanded +/// into [`RuntimeDeclaration::ExplicitExpanded`] fully expanded. +/// +/// For more details, please refer to the root documentation. +fn construct_runtime_explicit_to_explicit_expanded( + input: TokenStream2, + definition: ExplicitRuntimeDeclaration, +) -> Result { + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition.pallets.iter().filter(|pallet| !pallet.is_expanded) { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_extra_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) +} + +/// All pallets have explicit definition of parts, this will expand to the runtime declaration. +fn construct_runtime_final_expansion( + definition: ExplicitRuntimeDeclaration, +) -> Result { + let ExplicitRuntimeDeclaration { name, pallets, pallets_token, where_section } = definition; + + let system_pallet = + pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { + syn::Error::new( + pallets_token.span.join(), + "`System` pallet declaration is missing. \ + Please add this line: `System: frame_system,`", + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )) + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let outer_event = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; + let outer_error = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; + + let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); + let tasks = expand::expand_outer_task(&name, &pallets, &scrate); + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); + let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); + let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); + let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let warning = where_section.map_or(None, |where_section| { + Some( + proc_macro_warning::Warning::new_deprecated("WhereSection") + .old("use a `where` clause in `construct_runtime`") + .new( + "use `frame_system::Config` to set the `Block` type and delete this clause. + It is planned to be removed in December 2023", + ) + .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) + .span(where_section.span) + .build_or_panic(), + ) + }); + + let res = quote!( + #warning + + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} + + #outer_event + + #outer_error + + #outer_origin + + #all_pallets + + #pallet_to_index + + #dispatch + + #tasks + + #metadata + + #outer_config + + #inherent + + #validate_unsigned + + #freeze_reason + + #hold_reason + + #lock_id + + #slash_reason + + #integrity_test + + #static_assertions + ); + + Ok(res) +} + +pub(crate) fn decl_all_pallets<'a>( + runtime: &'a Ident, + pallet_declarations: impl Iterator, + features: &HashSet<&str>, +) -> TokenStream2 { + let mut types = TokenStream2::new(); + + // Every feature set to the pallet names that should be included by this feature set. + let mut features_to_names = features + .iter() + .map(|f| *f) + .powerset() + .map(|feat| (HashSet::from_iter(feat), Vec::new())) + .collect::, Vec<_>)>>(); + + for pallet_declaration in pallet_declarations { + let type_name = &pallet_declaration.name; + let pallet = &pallet_declaration.path; + let mut generics = vec![quote!(#runtime)]; + generics.extend(pallet_declaration.instance.iter().map(|name| quote!(#pallet::#name))); + let mut attrs = Vec::new(); + for cfg in &pallet_declaration.cfg_pattern { + let feat = format!("#[cfg({})]\n", cfg.original()); + attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); + } + let type_decl = quote!( + #(#attrs)* + pub type #type_name = #pallet::Pallet <#(#generics),*>; + ); + types.extend(type_decl); + + if pallet_declaration.cfg_pattern.is_empty() { + for (_, names) in features_to_names.iter_mut() { + names.push(&pallet_declaration.name); + } + } else { + for (feature_set, names) in &mut features_to_names { + // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the + // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, + // meaning that all of them must match in order to activate the item + let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { + expr.eval(|pred| match pred { + Predicate::Feature(f) => feature_set.contains(f), + Predicate::Test => feature_set.contains(&"test"), + _ => false, + }) + }); + + if is_feature_active { + names.push(&pallet_declaration.name); + } + } + } + } + + // All possible features. This will be used below for the empty feature set. + let mut all_features = features_to_names + .iter() + .flat_map(|f| f.0.iter().cloned()) + .collect::>(); + let attribute_to_names = features_to_names + .into_iter() + .map(|(mut features, names)| { + // If this is the empty feature set, it needs to be changed to negate all available + // features. So, we ensure that there is some type declared when all features are not + // enabled. + if features.is_empty() { + let test_cfg = all_features.remove("test").then_some(quote!(test)).into_iter(); + let features = all_features.iter(); + let attr = quote!(#[cfg(all( #(not(#test_cfg)),* #(not(feature = #features)),* ))]); + + (attr, names) + } else { + let test_cfg = features.remove("test").then_some(quote!(test)).into_iter(); + let disabled_features = all_features.difference(&features); + let features = features.iter(); + let attr = quote!(#[cfg(all( #(#test_cfg,)* #(feature = #features,)* #(not(feature = #disabled_features)),* ))]); + + (attr, names) + } + }) + .collect::>(); + + let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { + let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + /// Excludes the System pallet. + pub type AllPalletsWithoutSystem = ( #(#names,)* ); + } + }); + + let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + pub type AllPalletsWithSystem = ( #(#names,)* ); + } + }); + + quote!( + #types + + #( #all_pallets_with_system )* + + #( #all_pallets_without_system )* + ) +} + +pub(crate) fn decl_pallet_runtime_setup( + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream2, +) -> TokenStream2 { + let names = pallet_declarations.iter().map(|d| &d.name).collect::>(); + let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); + let name_hashes = pallet_declarations.iter().map(|d| two128_str(&d.name.to_string())); + let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); + let indices = pallet_declarations.iter().map(|pallet| pallet.index as usize); + let pallet_structs = pallet_declarations + .iter() + .map(|pallet| { + let path = &pallet.path; + match pallet.instance.as_ref() { + Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), + None => quote!(#path::Pallet<#runtime>), + } + }) + .collect::>(); + let pallet_attrs = pallet_declarations + .iter() + .map(|pallet| { + pallet.cfg_pattern.iter().fold(TokenStream2::new(), |acc, pattern| { + let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }) + }) + .collect::>(); + + quote!( + /// Provides an implementation of `PalletInfo` to provide information + /// about the pallet setup in the runtime. + pub struct PalletInfo; + + impl #scrate::traits::PalletInfo for PalletInfo { + + fn index() -> Option { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#indices) + } + )* + + None + } + + fn name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_strings) + } + )* + + None + } + + fn name_hash() -> Option<[u8; 16]> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_hashes) + } + )* + + None + } + + fn module_name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#module_names) + } + )* + + None + } + + fn crate_version() -> Option<#scrate::traits::CrateVersion> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some( + <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() + ) + } + )* + + None + } + } + ) +} + +pub(crate) fn decl_integrity_test(scrate: &TokenStream2) -> TokenStream2 { + quote!( + #[cfg(test)] + mod __construct_runtime_integrity_test { + use super::*; + + #[test] + pub fn runtime_integrity_tests() { + #scrate::__private::sp_tracing::try_init_simple(); + ::integrity_test(); + } + } + ) +} + +pub(crate) fn decl_static_assertions( + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, +) -> TokenStream2 { + let error_encoded_size_check = pallet_decls.iter().map(|decl| { + let path = &decl.path; + let assert_message = format!( + "The maximum encoded size of the error type in the `{}` pallet exceeds \ + `MAX_MODULE_ERROR_ENCODED_SIZE`", + decl.name, + ); + + quote! { + #scrate::__private::tt_call! { + macro = [{ #path::tt_error_token }] + your_tt_return = [{ #scrate::__private::tt_return }] + ~~> #scrate::assert_error_encoded_size! { + path = [{ #path }] + runtime = [{ #runtime }] + assert_message = [{ #assert_message }] + } + } + } + }); + + quote! { + #(#error_encoded_size_check)* + } +} + +pub(crate) fn check_pallet_number(input: TokenStream2, pallet_num: usize) -> Result<()> { + let max_pallet_num = { + if cfg!(feature = "tuples-96") { + 96 + } else if cfg!(feature = "tuples-128") { + 128 + } else { + 64 + } + }; + + if pallet_num > max_pallet_num { + let no_feature = max_pallet_num == 128; + return Err(syn::Error::new( + input.span(), + format!( + "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", + "The number of pallets exceeds the maximum number of tuple elements.", + max_pallet_num + 32, + if no_feature { + "If the feature does not exist - it needs to be implemented." + } else { + "" + }, + ), + )) + } + + Ok(()) +} diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs new file mode 100644 index 000000000..31866c787 --- /dev/null +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -0,0 +1,786 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support_procedural_tools::syn_ext as ext; +use proc_macro2::{Span, TokenStream}; +use quote::ToTokens; +use std::collections::{HashMap, HashSet}; +use syn::{ + ext::IdentExt, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, Attribute, Error, Ident, Path, Result, Token, +}; + +mod keyword { + syn::custom_keyword!(Block); + syn::custom_keyword!(NodeBlock); + syn::custom_keyword!(UncheckedExtrinsic); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(Call); + syn::custom_keyword!(Storage); + syn::custom_keyword!(Event); + syn::custom_keyword!(Error); + syn::custom_keyword!(Config); + syn::custom_keyword!(Origin); + syn::custom_keyword!(Inherent); + syn::custom_keyword!(ValidateUnsigned); + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(Task); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(exclude_parts); + syn::custom_keyword!(use_parts); + syn::custom_keyword!(expanded); +} + +/// Declaration of a runtime. +/// +/// Pallet declare their part either explicitly or implicitly (using no part declaration) +/// If all pallet have explicit parts then the runtime declaration is explicit, otherwise it is +/// implicit. +#[derive(Debug)] +pub enum RuntimeDeclaration { + Implicit(ImplicitRuntimeDeclaration), + Explicit(ExplicitRuntimeDeclaration), + ExplicitExpanded(ExplicitRuntimeDeclaration), +} + +/// Declaration of a runtime with some pallet with implicit declaration of parts. +#[derive(Debug)] +pub struct ImplicitRuntimeDeclaration { + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, +} + +/// Declaration of a runtime with all pallet having explicit declaration of parts. +#[derive(Debug)] +pub struct ExplicitRuntimeDeclaration { + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, + pub pallets_token: token::Brace, +} + +impl Parse for RuntimeDeclaration { + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + // Support either `enum` or `struct`. + if input.peek(Token![struct]) { + input.parse::()?; + } else { + input.parse::()?; + } + + let name = input.parse::()?; + let where_section = if input.peek(token::Where) { Some(input.parse()?) } else { None }; + let pallets = + input.parse::>>()?; + let pallets_token = pallets.token; + + match convert_pallets(pallets.content.inner.into_iter().collect())? { + PalletsConversion::Implicit(pallets) => + Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { + name, + where_section, + pallets, + })), + PalletsConversion::Explicit(pallets) => + Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })), + PalletsConversion::ExplicitExpanded(pallets) => + Ok(RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })), + } + } +} + +#[derive(Debug)] +pub struct WhereSection { + pub span: Span, + pub block: syn::TypePath, + pub node_block: syn::TypePath, + pub unchecked_extrinsic: syn::TypePath, +} + +impl Parse for WhereSection { + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + let mut definitions = Vec::new(); + while !input.peek(token::Brace) { + let definition: WhereDefinition = input.parse()?; + definitions.push(definition); + if !input.peek(Token![,]) { + if !input.peek(token::Brace) { + return Err(input.error("Expected `,` or `{`")) + } + break + } + input.parse::()?; + } + let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; + let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; + let unchecked_extrinsic = + remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; + if let Some(WhereDefinition { ref kind_span, ref kind, .. }) = definitions.first() { + let msg = format!( + "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", + kind, kind + ); + return Err(Error::new(*kind_span, msg)) + } + Ok(Self { span: input.span(), block, node_block, unchecked_extrinsic }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub enum WhereKind { + Block, + NodeBlock, + UncheckedExtrinsic, +} + +#[derive(Debug)] +pub struct WhereDefinition { + pub kind_span: Span, + pub kind: WhereKind, + pub value: syn::TypePath, +} + +impl Parse for WhereDefinition { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + let (kind_span, kind) = if lookahead.peek(keyword::Block) { + (input.parse::()?.span(), WhereKind::Block) + } else if lookahead.peek(keyword::NodeBlock) { + (input.parse::()?.span(), WhereKind::NodeBlock) + } else if lookahead.peek(keyword::UncheckedExtrinsic) { + (input.parse::()?.span(), WhereKind::UncheckedExtrinsic) + } else { + return Err(lookahead.error()) + }; + + Ok(Self { + kind_span, + kind, + value: { + let _: Token![=] = input.parse()?; + input.parse()? + }, + }) + } +} + +/// The declaration of a pallet. +#[derive(Debug, Clone)] +pub struct PalletDeclaration { + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// Optional fixed index, e.g. `MyPallet ... = 3,`. + pub index: Option, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The declared pallet parts, + /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` + /// or `None` for `System: system`. + pub pallet_parts: Option>, + /// The specified parts, either use_parts or exclude_parts. + pub specified_parts: SpecifiedParts, +} + +/// The possible declaration of pallet parts to use. +#[derive(Debug, Clone)] +pub enum SpecifiedParts { + /// Use all the pallet parts except those specified. + Exclude(Vec), + /// Use only the specified pallet parts. + Use(Vec), + /// Use the all the pallet parts. + All, +} + +impl Parse for PalletDeclaration { + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + + let name = input.parse()?; + let _: Token![:] = input.parse()?; + let path = input.parse()?; + + // Parse for instance. + let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { + let _: Token![::] = input.parse()?; + let _: Token![<] = input.parse()?; + let res = Some(input.parse()?); + let _: Token![>] = input.parse()?; + res + } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) && + !input.peek(keyword::expanded) && + !input.peek(keyword::exclude_parts) && + !input.peek(keyword::use_parts) && + !input.peek(Token![=]) && + !input.peek(Token![,]) && + !input.is_empty() + { + return Err(input.error( + "Unexpected tokens, expected one of `::$ident` `::{`, `exclude_parts`, `use_parts`, `=`, `,`", + )); + } else { + None + }; + + // Check if the pallet is fully expanded. + let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { + let _: keyword::expanded = input.parse()?; + let _: Token![::] = input.parse()?; + (true, parse_pallet_parts(input)?) + } else { + (false, vec![]) + }; + + // Parse for explicit parts + let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { + let _: Token![::] = input.parse()?; + let mut parts = parse_pallet_parts(input)?; + parts.extend(extra_parts.into_iter()); + Some(parts) + } else if !input.peek(keyword::exclude_parts) && + !input.peek(keyword::use_parts) && + !input.peek(Token![=]) && + !input.peek(Token![,]) && + !input.is_empty() + { + return Err(input.error( + "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", + )) + } else { + is_expanded.then_some(extra_parts) + }; + + // Parse for specified parts + let specified_parts = if input.peek(keyword::exclude_parts) { + let _: keyword::exclude_parts = input.parse()?; + SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) + } else if input.peek(keyword::use_parts) { + let _: keyword::use_parts = input.parse()?; + SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) + } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")) + } else { + SpecifiedParts::All + }; + + // Parse for pallet index + let index = if input.peek(Token![=]) { + input.parse::()?; + let index = input.parse::()?; + let index = index.base10_parse::()?; + Some(index) + } else if !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `=`, `,`")) + } else { + None + }; + + Ok(Self { is_expanded, attrs, name, path, instance, pallet_parts, specified_parts, index }) + } +} + +/// A struct representing a path to a pallet. `PalletPath` is almost identical to the standard +/// Rust path with a few restrictions: +/// - No leading colons allowed +/// - Path segments can only consist of identifiers separated by colons +#[derive(Debug, Clone)] +pub struct PalletPath { + pub inner: Path, +} + +impl PalletPath { + pub fn module_name(&self) -> String { + self.inner.segments.iter().fold(String::new(), |mut acc, segment| { + if !acc.is_empty() { + acc.push_str("::"); + } + acc.push_str(&segment.ident.to_string()); + acc + }) + } +} + +impl Parse for PalletPath { + fn parse(input: ParseStream) -> Result { + let mut res = + PalletPath { inner: Path { leading_colon: None, segments: Punctuated::new() } }; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![crate]) || + lookahead.peek(Token![self]) || + lookahead.peek(Token![super]) || + lookahead.peek(Ident) + { + let ident = input.call(Ident::parse_any)?; + res.inner.segments.push(ident.into()); + } else { + return Err(lookahead.error()) + } + + while input.peek(Token![::]) && input.peek3(Ident) { + input.parse::()?; + let ident = input.parse::()?; + res.inner.segments.push(ident.into()); + } + Ok(res) + } +} + +impl quote::ToTokens for PalletPath { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.inner.to_tokens(tokens); + } +} + +/// Parse [`PalletPart`]'s from a braces enclosed list that is split by commas, e.g. +/// +/// `{ Call, Event }` +fn parse_pallet_parts(input: ParseStream) -> Result> { + let pallet_parts: ext::Braces> = input.parse()?; + + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.name(), + ); + return Err(Error::new(part.keyword.span(), msg)) + } + } + + Ok(pallet_parts.content.inner.into_iter().collect()) +} + +#[derive(Debug, Clone)] +pub enum PalletPartKeyword { + Pallet(keyword::Pallet), + Call(keyword::Call), + Storage(keyword::Storage), + Event(keyword::Event), + Error(keyword::Error), + Config(keyword::Config), + Origin(keyword::Origin), + Inherent(keyword::Inherent), + ValidateUnsigned(keyword::ValidateUnsigned), + FreezeReason(keyword::FreezeReason), + HoldReason(keyword::HoldReason), + Task(keyword::Task), + LockId(keyword::LockId), + SlashReason(keyword::SlashReason), +} + +impl Parse for PalletPartKeyword { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(keyword::Pallet) { + Ok(Self::Pallet(input.parse()?)) + } else if lookahead.peek(keyword::Call) { + Ok(Self::Call(input.parse()?)) + } else if lookahead.peek(keyword::Storage) { + Ok(Self::Storage(input.parse()?)) + } else if lookahead.peek(keyword::Event) { + Ok(Self::Event(input.parse()?)) + } else if lookahead.peek(keyword::Error) { + Ok(Self::Error(input.parse()?)) + } else if lookahead.peek(keyword::Config) { + Ok(Self::Config(input.parse()?)) + } else if lookahead.peek(keyword::Origin) { + Ok(Self::Origin(input.parse()?)) + } else if lookahead.peek(keyword::Inherent) { + Ok(Self::Inherent(input.parse()?)) + } else if lookahead.peek(keyword::ValidateUnsigned) { + Ok(Self::ValidateUnsigned(input.parse()?)) + } else if lookahead.peek(keyword::FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(keyword::Task) { + Ok(Self::Task(input.parse()?)) + } else if lookahead.peek(keyword::LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(keyword::SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else { + Err(lookahead.error()) + } + } +} + +impl PalletPartKeyword { + /// Returns the name of `Self`. + fn name(&self) -> &'static str { + match self { + Self::Pallet(_) => "Pallet", + Self::Call(_) => "Call", + Self::Storage(_) => "Storage", + Self::Event(_) => "Event", + Self::Error(_) => "Error", + Self::Config(_) => "Config", + Self::Origin(_) => "Origin", + Self::Inherent(_) => "Inherent", + Self::ValidateUnsigned(_) => "ValidateUnsigned", + Self::FreezeReason(_) => "FreezeReason", + Self::HoldReason(_) => "HoldReason", + Self::Task(_) => "Task", + Self::LockId(_) => "LockId", + Self::SlashReason(_) => "SlashReason", + } + } + + /// Returns `true` if this pallet part is allowed to have generic arguments. + fn allows_generic(&self) -> bool { + Self::all_generic_arg().iter().any(|n| *n == self.name()) + } + + /// Returns the names of all pallet parts that allow to have a generic argument. + fn all_generic_arg() -> &'static [&'static str] { + &["Event", "Error", "Origin", "Config", "Task"] + } +} + +impl ToTokens for PalletPartKeyword { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Pallet(inner) => inner.to_tokens(tokens), + Self::Call(inner) => inner.to_tokens(tokens), + Self::Storage(inner) => inner.to_tokens(tokens), + Self::Event(inner) => inner.to_tokens(tokens), + Self::Error(inner) => inner.to_tokens(tokens), + Self::Config(inner) => inner.to_tokens(tokens), + Self::Origin(inner) => inner.to_tokens(tokens), + Self::Inherent(inner) => inner.to_tokens(tokens), + Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), + Self::FreezeReason(inner) => inner.to_tokens(tokens), + Self::HoldReason(inner) => inner.to_tokens(tokens), + Self::Task(inner) => inner.to_tokens(tokens), + Self::LockId(inner) => inner.to_tokens(tokens), + Self::SlashReason(inner) => inner.to_tokens(tokens), + } + } +} + +#[derive(Debug, Clone)] +pub struct PalletPart { + pub keyword: PalletPartKeyword, + pub generics: syn::Generics, +} + +impl Parse for PalletPart { + fn parse(input: ParseStream) -> Result { + let keyword: PalletPartKeyword = input.parse()?; + + let generics: syn::Generics = input.parse()?; + if !generics.params.is_empty() && !keyword.allows_generic() { + let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); + let msg = format!( + "`{}` is not allowed to have generics. \ + Only the following pallets are allowed to have generics: {}.", + keyword.name(), + valid_generics, + ); + return Err(syn::Error::new(keyword.span(), msg)) + } + + Ok(Self { keyword, generics }) + } +} + +impl PalletPart { + pub fn format_names(names: &[&'static str]) -> String { + let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); + res.join(", ") + } + + /// The name of this pallet part. + pub fn name(&self) -> &'static str { + self.keyword.name() + } +} + +fn remove_kind( + input: ParseStream, + kind: WhereKind, + definitions: &mut Vec, +) -> Result { + if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { + Ok(definitions.remove(pos)) + } else { + let msg = format!( + "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", + kind, kind + ); + Err(input.error(msg)) + } +} + +/// The declaration of a part without its generics +#[derive(Debug, Clone)] +pub struct PalletPartNoGeneric { + keyword: PalletPartKeyword, +} + +impl Parse for PalletPartNoGeneric { + fn parse(input: ParseStream) -> Result { + Ok(Self { keyword: input.parse()? }) + } +} + +/// Parse [`PalletPartNoGeneric`]'s from a braces enclosed list that is split by commas, e.g. +/// +/// `{ Call, Event }` +fn parse_pallet_parts_no_generic(input: ParseStream) -> Result> { + let pallet_parts: ext::Braces> = + input.parse()?; + + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.keyword.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.keyword.name(), + ); + return Err(Error::new(part.keyword.span(), msg)) + } + } + + Ok(pallet_parts.content.inner.into_iter().collect()) +} + +/// The final definition of a pallet with the resulting fixed index and explicit parts. +#[derive(Debug, Clone)] +pub struct Pallet { + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). + pub index: u8, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The pallet parts to use for the pallet. + pub pallet_parts: Vec, + /// Expressions specified inside of a #[cfg] attribute. + pub cfg_pattern: Vec, +} + +impl Pallet { + /// Get resolved pallet parts + pub fn pallet_parts(&self) -> &[PalletPart] { + &self.pallet_parts + } + + /// Find matching parts + pub fn find_part(&self, name: &str) -> Option<&PalletPart> { + self.pallet_parts.iter().find(|part| part.name() == name) + } + + /// Return whether pallet contains part + pub fn exists_part(&self, name: &str) -> bool { + self.find_part(name).is_some() + } +} + +/// Result of a conversion of a declaration of pallets. +/// +/// # State Transitions +/// +/// ```ignore +/// +----------+ +----------+ +------------------+ +/// | Implicit | -> | Explicit | -> | ExplicitExpanded | +/// +----------+ +----------+ +------------------+ +/// ``` +enum PalletsConversion { + /// Pallets implicitly declare parts. + /// + /// `System: frame_system`. + Implicit(Vec), + /// Pallets explicitly declare parts. + /// + /// `System: frame_system::{Pallet, Call}` + /// + /// However, for backwards compatibility with Polkadot/Kusama + /// we must propagate some other parts to the pallet by default. + Explicit(Vec), + /// Pallets explicitly declare parts that are fully expanded. + /// + /// This is the end state that contains extra parts included by + /// default by Substrate. + /// + /// `System: frame_system expanded::{Error} ::{Pallet, Call}` + /// + /// For this example, the `Pallet`, `Call` and `Error` parts are collected. + ExplicitExpanded(Vec), +} + +/// Convert from the parsed pallet declaration to their final information. +/// +/// Check if all pallet have explicit declaration of their parts, if so then assign index to each +/// pallet using same rules as rust for fieldless enum. I.e. implicit are assigned number +/// incrementally from last explicit or 0. +fn convert_pallets(pallets: Vec) -> syn::Result { + if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { + return Ok(PalletsConversion::Implicit(pallets)) + } + + let mut indices = HashMap::new(); + let mut last_index: Option = None; + let mut names = HashMap::new(); + let mut is_expanded = true; + + let pallets = pallets + .into_iter() + .map(|pallet| { + let final_index = match pallet.index { + Some(i) => i, + None => last_index.map_or(Some(0), |i| i.checked_add(1)).ok_or_else(|| { + let msg = "Pallet index doesn't fit into u8, index is 256"; + syn::Error::new(pallet.name.span(), msg) + })?, + }; + + last_index = Some(final_index); + + if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, final_index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err) + } + + if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet.name.span(), &msg)); + return Err(err) + } + + let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); + + let available_parts = + pallet_parts.iter().map(|part| part.keyword.name()).collect::>(); + + // Check parts are correctly specified + match &pallet.specified_parts { + SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => + for part in parts { + if !available_parts.contains(part.keyword.name()) { + let msg = format!( + "Invalid pallet part specified, the pallet `{}` doesn't have the \ + `{}` part. Available parts are: {}.", + pallet.name, + part.keyword.name(), + pallet_parts.iter().fold(String::new(), |fold, part| { + if fold.is_empty() { + format!("`{}`", part.keyword.name()) + } else { + format!("{}, `{}`", fold, part.keyword.name()) + } + }) + ); + return Err(syn::Error::new(part.keyword.span(), msg)) + } + }, + SpecifiedParts::All => (), + } + + // Set only specified parts. + match pallet.specified_parts { + SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { + !excluded_parts + .iter() + .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { + used_parts.iter().any(|use_part| use_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::All => (), + } + + let cfg_pattern = pallet + .attrs + .iter() + .map(|attr| { + if attr.path().segments.first().map_or(false, |s| s.ident != "cfg") { + let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ + declarations in `construct_runtime`"; + return Err(syn::Error::new(attr.span(), msg)) + } + + attr.parse_args_with(|input: syn::parse::ParseStream| { + // Required, otherwise the parse stream doesn't advance and will result in + // an error. + let input = input.parse::()?; + cfg_expr::Expression::parse(&input.to_string()) + .map_err(|e| syn::Error::new(attr.span(), e.to_string())) + }) + }) + .collect::>>()?; + + is_expanded &= pallet.is_expanded; + + Ok(Pallet { + is_expanded: pallet.is_expanded, + name: pallet.name, + index: final_index, + path: pallet.path, + instance: pallet.instance, + cfg_pattern, + pallet_parts, + }) + }) + .collect::>>()?; + + if is_expanded { + Ok(PalletsConversion::ExplicitExpanded(pallets)) + } else { + Ok(PalletsConversion::Explicit(pallets)) + } +} diff --git a/support/procedural-fork/src/crate_version.rs b/support/procedural-fork/src/crate_version.rs new file mode 100644 index 000000000..8c8975a42 --- /dev/null +++ b/support/procedural-fork/src/crate_version.rs @@ -0,0 +1,54 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of macros related to crate versioning. + +use super::get_cargo_env_var; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro2::{Span, TokenStream}; +use syn::{Error, Result}; + +/// Create an error that will be shown by rustc at the call site of the macro. +fn create_error(message: &str) -> Error { + Error::new(Span::call_site(), message) +} + +/// Implementation of the `crate_to_crate_version!` macro. +pub fn crate_to_crate_version(input: proc_macro::TokenStream) -> Result { + if !input.is_empty() { + return Err(create_error("No arguments expected!")) + } + + let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") + .map_err(|_| create_error("Major version needs to fit into `u16`"))?; + + let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") + .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; + + let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") + .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + + Ok(quote::quote! { + #crate_::traits::CrateVersion { + major: #major_version, + minor: #minor_version, + patch: #patch_version, + } + }) +} diff --git a/support/procedural-fork/src/derive_impl.rs b/support/procedural-fork/src/derive_impl.rs new file mode 100644 index 000000000..54755f116 --- /dev/null +++ b/support/procedural-fork/src/derive_impl.rs @@ -0,0 +1,303 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of the `derive_impl` attribute macro. + +use derive_syn_parse::Parse; +use macro_magic::mm_core::ForeignPath; +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens}; +use std::collections::HashSet; +use syn::{ + parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, +}; + +mod keyword { + syn::custom_keyword!(inject_runtime_type); + syn::custom_keyword!(no_aggregated_types); +} + +#[derive(derive_syn_parse::Parse, PartialEq, Eq)] +pub enum PalletAttrType { + #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] + RuntimeType(keyword::inject_runtime_type), +} + +#[derive(derive_syn_parse::Parse)] +pub struct PalletAttr { + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + typ: PalletAttrType, +} + +fn is_runtime_type(item: &syn::ImplItemType) -> bool { + item.attrs.iter().any(|attr| { + if let Ok(PalletAttr { typ: PalletAttrType::RuntimeType(_), .. }) = + parse2::(attr.into_token_stream()) + { + return true + } + false + }) +} + +#[derive(Parse, Debug)] +pub struct DeriveImplAttrArgs { + pub default_impl_path: Path, + _as: Option, + #[parse_if(_as.is_some())] + pub disambiguation_path: Option, + _comma: Option, + #[parse_if(_comma.is_some())] + pub no_aggregated_types: Option, +} + +impl ForeignPath for DeriveImplAttrArgs { + fn foreign_path(&self) -> &Path { + &self.default_impl_path + } +} + +impl ToTokens for DeriveImplAttrArgs { + fn to_tokens(&self, tokens: &mut TokenStream2) { + tokens.extend(self.default_impl_path.to_token_stream()); + tokens.extend(self._as.to_token_stream()); + tokens.extend(self.disambiguation_path.to_token_stream()); + tokens.extend(self._comma.to_token_stream()); + tokens.extend(self.no_aggregated_types.to_token_stream()); + } +} + +/// Gets the [`Ident`] representation of the given [`ImplItem`], if one exists. Otherwise +/// returns [`None`]. +/// +/// Used by [`combine_impls`] to determine whether we can compare [`ImplItem`]s by [`Ident`] +/// or not. +fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { + match impl_item { + ImplItem::Const(item) => Some(&item.ident), + ImplItem::Fn(item) => Some(&item.sig.ident), + ImplItem::Type(item) => Some(&item.ident), + ImplItem::Macro(item) => item.mac.path.get_ident(), + _ => None, + } +} + +/// The real meat behind `derive_impl`. Takes in a `local_impl`, which is the impl for which we +/// want to implement defaults (i.e. the one the attribute macro is attached to), and a +/// `foreign_impl`, which is the impl containing the defaults we want to use, and returns an +/// [`ItemImpl`] containing the final generated impl. +/// +/// This process has the following caveats: +/// * Colliding items that have an ident are not copied into `local_impl` +/// * Uncolliding items that have an ident are copied into `local_impl` but are qualified as `type +/// #ident = <#default_impl_path as #disambiguation_path>::#ident;` +/// * Items that lack an ident are de-duplicated so only unique items that lack an ident are copied +/// into `local_impl`. Items that lack an ident and also exist verbatim in `local_impl` are not +/// copied over. +fn combine_impls( + local_impl: ItemImpl, + foreign_impl: ItemImpl, + default_impl_path: Path, + disambiguation_path: Path, + inject_runtime_types: bool, +) -> ItemImpl { + let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = + local_impl + .items + .iter() + .cloned() + .partition(|impl_item| impl_item_ident(impl_item).is_some()); + let existing_local_keys: HashSet = existing_local_keys + .into_iter() + .filter_map(|item| impl_item_ident(&item).cloned()) + .collect(); + let mut final_impl = local_impl; + let extended_items = foreign_impl.items.into_iter().filter_map(|item| { + if let Some(ident) = impl_item_ident(&item) { + if existing_local_keys.contains(&ident) { + // do not copy colliding items that have an ident + return None + } + if let ImplItem::Type(typ) = item.clone() { + let cfg_attrs = typ + .attrs + .iter() + .filter(|attr| attr.path().get_ident().map_or(false, |ident| ident == "cfg")) + .map(|attr| attr.to_token_stream()); + if is_runtime_type(&typ) { + let item: ImplItem = if inject_runtime_types { + parse_quote! { + #( #cfg_attrs )* + type #ident = #ident; + } + } else { + item + }; + return Some(item) + } + // modify and insert uncolliding type items + let modified_item: ImplItem = parse_quote! { + #( #cfg_attrs )* + type #ident = <#default_impl_path as #disambiguation_path>::#ident; + }; + return Some(modified_item) + } + // copy uncolliding non-type items that have an ident + Some(item) + } else { + // do not copy colliding items that lack an ident + (!existing_unsupported_items.contains(&item)) + // copy uncolliding items without an ident verbatim + .then_some(item) + } + }); + final_impl.items.extend(extended_items); + final_impl +} + +/// Computes the disambiguation path for the `derive_impl` attribute macro. +/// +/// When specified explicitly using `as [disambiguation_path]` in the macro attr, the +/// disambiguation is used as is. If not, we infer the disambiguation path from the +/// `foreign_impl_path` and the computed scope. +fn compute_disambiguation_path( + disambiguation_path: Option, + foreign_impl: ItemImpl, + default_impl_path: Path, +) -> Result { + match (disambiguation_path, foreign_impl.clone().trait_) { + (Some(disambiguation_path), _) => Ok(disambiguation_path), + (None, Some((_, foreign_impl_path, _))) => + if default_impl_path.segments.len() > 1 { + let scope = default_impl_path.segments.first(); + Ok(parse_quote!(#scope :: #foreign_impl_path)) + } else { + Ok(foreign_impl_path) + }, + _ => Err(syn::Error::new( + default_impl_path.span(), + "Impl statement must have a defined type being implemented \ + for a defined type such as `impl A for B`", + )), + } +} + +/// Internal implementation behind [`#[derive_impl(..)]`](`macro@crate::derive_impl`). +/// +/// `default_impl_path`: the module path of the external `impl` statement whose tokens we are +/// importing via `macro_magic` +/// +/// `foreign_tokens`: the tokens for the external `impl` statement +/// +/// `local_tokens`: the tokens for the local `impl` statement this attribute is attached to +/// +/// `disambiguation_path`: the module path of the external trait we will use to qualify +/// defaults imported from the external `impl` statement +pub fn derive_impl( + default_impl_path: TokenStream2, + foreign_tokens: TokenStream2, + local_tokens: TokenStream2, + disambiguation_path: Option, + no_aggregated_types: Option, +) -> Result { + let local_impl = parse2::(local_tokens)?; + let foreign_impl = parse2::(foreign_tokens)?; + let default_impl_path = parse2::(default_impl_path)?; + + let disambiguation_path = compute_disambiguation_path( + disambiguation_path, + foreign_impl.clone(), + default_impl_path.clone(), + )?; + + // generate the combined impl + let combined_impl = combine_impls( + local_impl, + foreign_impl, + default_impl_path, + disambiguation_path, + no_aggregated_types.is_none(), + ); + + Ok(quote!(#combined_impl)) +} + +#[test] +fn test_derive_impl_attr_args_parsing() { + parse2::(quote!( + some::path::TestDefaultConfig as some::path::DefaultConfig + )) + .unwrap(); + parse2::(quote!( + frame_system::prelude::testing::TestDefaultConfig as DefaultConfig + )) + .unwrap(); + parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); + parse2::(quote!(Something as DefaultConfig)).unwrap(); + parse2::(quote!(DefaultConfig)).unwrap(); + assert!(parse2::(quote!()).is_err()); + assert!(parse2::(quote!(Config Config)).is_err()); +} + +#[test] +fn test_runtime_type_with_doc() { + trait TestTrait { + type Test; + } + #[allow(unused)] + struct TestStruct; + let p = parse2::(quote!( + impl TestTrait for TestStruct { + /// Some doc + #[inject_runtime_type] + type Test = u32; + } + )) + .unwrap(); + for item in p.items { + if let ImplItem::Type(typ) = item { + assert_eq!(is_runtime_type(&typ), true); + } + } +} + +#[test] +fn test_disambiguation_path() { + let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); + let default_impl_path: Path = parse_quote!(SomeScope::SomeType); + + // disambiguation path is specified + let disambiguation_path = compute_disambiguation_path( + Some(parse_quote!(SomeScope::SomePath)), + foreign_impl.clone(), + default_impl_path.clone(), + ); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomePath)); + + // disambiguation path is not specified and the default_impl_path has more than one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomeTrait)); + + // disambiguation path is not specified and the default_impl_path has only one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); +} diff --git a/support/procedural-fork/src/dummy_part_checker.rs b/support/procedural-fork/src/dummy_part_checker.rs new file mode 100644 index 000000000..34d9a3e23 --- /dev/null +++ b/support/procedural-fork/src/dummy_part_checker.rs @@ -0,0 +1,79 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::COUNTER; +use proc_macro::TokenStream; + +pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { + if !input.is_empty() { + return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") + .to_compile_error() + .into() + } + + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let no_op_macro_ident = + syn::Ident::new(&format!("__dummy_part_checker_{}", count), proc_macro2::Span::call_site()); + + quote::quote!( + #[macro_export] + #[doc(hidden)] + macro_rules! #no_op_macro_ident { + ( $( $tt:tt )* ) => {}; + } + + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #no_op_macro_ident as is_std_enabled_for_genesis; + } + + #[doc(hidden)] + pub mod __substrate_event_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_event_part_defined; + } + + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_inherent_part_defined; + } + + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_validate_unsigned_part_defined; + } + + #[doc(hidden)] + pub mod __substrate_call_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_call_part_defined; + } + + #[doc(hidden)] + pub mod __substrate_origin_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_origin_part_defined; + } + ) + .into() +} diff --git a/support/procedural-fork/src/dynamic_params.rs b/support/procedural-fork/src/dynamic_params.rs new file mode 100644 index 000000000..29399a885 --- /dev/null +++ b/support/procedural-fork/src/dynamic_params.rs @@ -0,0 +1,563 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Code for the `#[dynamic_params]`, `#[dynamic_pallet_params]` and +//! `#[dynamic_aggregated_params_internal]` macros. + +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use inflector::Inflector; +use proc_macro2::{Span, TokenStream}; +use quote::{format_ident, quote, ToTokens}; +use syn::{parse2, spanned::Spanned, visit_mut, visit_mut::VisitMut, Result, Token}; + +/// Parse and expand a `#[dynamic_params(..)]` module. +pub fn dynamic_params(attr: TokenStream, item: TokenStream) -> Result { + DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) +} + +/// Parse and expand `#[dynamic_pallet_params(..)]` attribute. +pub fn dynamic_pallet_params(attr: TokenStream, item: TokenStream) -> Result { + DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) +} + +/// Parse and expand `#[dynamic_aggregated_params_internal]` attribute. +pub fn dynamic_aggregated_params_internal( + _attr: TokenStream, + item: TokenStream, +) -> Result { + parse2::(item).map(ToTokens::into_token_stream) +} + +/// A top `#[dynamic_params(..)]` attribute together with a mod. +#[derive(derive_syn_parse::Parse)] +pub struct DynamicParamModAttr { + params_mod: syn::ItemMod, + meta: DynamicParamModAttrMeta, +} + +/// The inner meta of a `#[dynamic_params(..)]` attribute. +#[derive(derive_syn_parse::Parse)] +pub struct DynamicParamModAttrMeta { + name: syn::Ident, + _comma: Option, + #[parse_if(_comma.is_some())] + params_pallet: Option, +} + +impl DynamicParamModAttr { + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + let params_mod = parse2(item)?; + let meta = parse2(attr)?; + Ok(Self { params_mod, meta }) + } + + pub fn inner_mods(&self) -> Vec { + self.params_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Mod(m) => Some(m), + _ => None, + }) + .cloned() + .collect() + }) + } +} + +impl ToTokens for DynamicParamModAttr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); + let dynam_params_ident = ¶ms_mod.ident; + + let mut quoted_enum = quote! {}; + for m in self.inner_mods() { + let aggregate_name = + syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); + let mod_name = &m.ident; + + let mut attrs = m.attrs.clone(); + attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); + if let Err(err) = ensure_codec_index(&attrs, m.span()) { + tokens.extend(err.into_compile_error()); + return + } + + quoted_enum.extend(quote! { + #(#attrs)* + #aggregate_name(#dynam_params_ident::#mod_name::Parameters), + }); + } + + // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. + if let Some(params_pallet) = &self.meta.params_pallet { + MacroInjectArgs { runtime_params: name.clone(), params_pallet: params_pallet.clone() } + .visit_item_mod_mut(&mut params_mod); + } + + tokens.extend(quote! { + #params_mod + + #[#scrate::dynamic_params::dynamic_aggregated_params_internal] + pub enum #name { + #quoted_enum + } + }); + } +} + +/// Ensure there is a `#[codec(index = ..)]` attribute. +fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { + let mut found = false; + + for attr in attrs.iter() { + if attr.path().is_ident("codec") { + let meta: syn::ExprAssign = attr.parse_args()?; + if meta.left.to_token_stream().to_string() == "index" { + found = true; + break + } + } + } + + if !found { + Err(syn::Error::new(span, "Missing explicit `#[codec(index = ..)]` attribute")) + } else { + Ok(()) + } +} + +/// Used to inject arguments into the inner `#[dynamic_pallet_params(..)]` attribute. +/// +/// This allows the outer `#[dynamic_params(..)]` attribute to specify some arguments that don't +/// need to be repeated every time. +struct MacroInjectArgs { + runtime_params: syn::Ident, + params_pallet: syn::Type, +} +impl VisitMut for MacroInjectArgs { + fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { + // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. + let attr = item.attrs.iter_mut().find(|attr| attr.path().is_ident("dynamic_pallet_params")); + + if let Some(attr) = attr { + match &attr.meta { + syn::Meta::Path(path) => + assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params"), + _ => (), + } + + let runtime_params = &self.runtime_params; + let params_pallet = &self.params_pallet; + + attr.meta = syn::parse2::(quote! { + dynamic_pallet_params(#runtime_params, #params_pallet) + }) + .unwrap() + .into(); + } + + visit_mut::visit_item_mod_mut(self, item); + } +} +/// The helper attribute of a `#[dynamic_pallet_params(runtime_params, params_pallet)]` +/// attribute. +#[derive(derive_syn_parse::Parse)] +pub struct DynamicPalletParamAttr { + inner_mod: syn::ItemMod, + meta: DynamicPalletParamAttrMeta, +} + +/// The inner meta of a `#[dynamic_pallet_params(..)]` attribute. +#[derive(derive_syn_parse::Parse)] +pub struct DynamicPalletParamAttrMeta { + runtime_params: syn::Ident, + _comma: Token![,], + parameter_pallet: syn::Type, +} + +impl DynamicPalletParamAttr { + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + Ok(Self { inner_mod: parse2(item)?, meta: parse2(attr)? }) + } + + pub fn statics(&self) -> Vec { + self.inner_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Static(s) => Some(s), + _ => None, + }) + .cloned() + .collect() + }) + } +} + +impl ToTokens for DynamicPalletParamAttr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (params_mod, parameter_pallet, runtime_params) = + (&self.inner_mod, &self.meta.parameter_pallet, &self.meta.runtime_params); + + let aggregate_name = + syn::Ident::new(¶ms_mod.ident.to_string().to_class_case(), params_mod.ident.span()); + let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); + let statics = self.statics(); + + let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = Default::default(); + + for s in statics.iter() { + if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { + tokens.extend(err.into_compile_error()); + return + } + + key_names.push(&s.ident); + key_values.push(format_ident!("{}Value", &s.ident)); + defaults.push(&s.expr); + attrs.push(&s.attrs); + value_types.push(&s.ty); + } + + let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); + let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); + let runtime_key_ident = format_ident!("{}Key", runtime_params); + let runtime_value_ident = format_ident!("{}Value", runtime_params); + + tokens.extend(quote! { + pub mod #mod_name { + use super::*; + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum Parameters { + #( + #(#attrs)* + #key_names(#key_names, Option<#value_types>), + )* + } + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum #key_ident { + #( + #(#attrs)* + #key_names(#key_names), + )* + } + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum #value_ident { + #( + #(#attrs)* + #key_names(#value_types), + )* + } + + impl #scrate::traits::dynamic_params::AggregatedKeyValue for Parameters { + type Key = #key_ident; + type Value = #value_ident; + + fn into_parts(self) -> (Self::Key, Option) { + match self { + #( + Parameters::#key_names(key, value) => { + (#key_ident::#key_names(key), value.map(#value_ident::#key_names)) + }, + )* + } + } + } + + #( + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis struct #key_names; + + impl #scrate::__private::Get<#value_types> for #key_names { + fn get() -> #value_types { + match + <#parameter_pallet as + #scrate::storage::StorageMap<#runtime_key_ident, #runtime_value_ident> + >::get(#runtime_key_ident::#aggregate_name(#key_ident::#key_names(#key_names))) + { + Some(#runtime_value_ident::#aggregate_name( + #value_ident::#key_names(inner))) => inner, + Some(_) => { + #scrate::defensive!("Unexpected value type at key - returning default"); + #defaults + }, + None => #defaults, + } + } + } + + impl #scrate::traits::dynamic_params::Key for #key_names { + type Value = #value_types; + type WrappedValue = #key_values; + } + + impl From<#key_names> for #key_ident { + fn from(key: #key_names) -> Self { + #key_ident::#key_names(key) + } + } + + impl TryFrom<#key_ident> for #key_names { + type Error = (); + + fn try_from(key: #key_ident) -> Result { + match key { + #key_ident::#key_names(key) => Ok(key), + _ => Err(()), + } + } + } + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::sp_runtime::RuntimeDebug, + )] + #vis struct #key_values(pub #value_types); + + impl From<#key_values> for #value_ident { + fn from(value: #key_values) -> Self { + #value_ident::#key_names(value.0) + } + } + + impl From<(#key_names, #value_types)> for Parameters { + fn from((key, value): (#key_names, #value_types)) -> Self { + Parameters::#key_names(key, Some(value)) + } + } + + impl From<#key_names> for Parameters { + fn from(key: #key_names) -> Self { + Parameters::#key_names(key, None) + } + } + + impl TryFrom<#value_ident> for #key_values { + type Error = (); + + fn try_from(value: #value_ident) -> Result { + match value { + #value_ident::#key_names(value) => Ok(#key_values(value)), + _ => Err(()), + } + } + } + + impl From<#key_values> for #value_types { + fn from(value: #key_values) -> Self { + value.0 + } + } + )* + } + }); + } +} + +#[derive(derive_syn_parse::Parse)] +pub struct DynamicParamAggregatedEnum { + aggregated_enum: syn::ItemEnum, +} + +impl ToTokens for DynamicParamAggregatedEnum { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let params_enum = &self.aggregated_enum; + let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); + + let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = + Default::default(); + let mut attributes = Vec::new(); + for (i, variant) in params_enum.variants.iter().enumerate() { + indices.push(i); + param_names.push(&variant.ident); + attributes.push(&variant.attrs); + + param_types.push(match &variant.fields { + syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, + _ => { + *tokens = quote! { compile_error!("Only unnamed enum variants with one inner item are supported") }; + return + }, + }); + } + + let params_key_ident = format_ident!("{}Key", params_enum.ident); + let params_value_ident = format_ident!("{}Value", params_enum.ident); + + tokens.extend(quote! { + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum #name { + #( + //#[codec(index = #indices)] + #(#attributes)* + #param_names(#param_types), + )* + } + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum #params_key_ident { + #( + #(#attributes)* + #param_names(<#param_types as #scrate::traits::dynamic_params::AggregatedKeyValue>::Key), + )* + } + + #[doc(hidden)] + #[derive( + Clone, + PartialEq, + Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::codec::MaxEncodedLen, + #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + #vis enum #params_value_ident { + #( + #(#attributes)* + #param_names(<#param_types as #scrate::traits::dynamic_params::AggregatedKeyValue>::Value), + )* + } + + impl #scrate::traits::dynamic_params::AggregatedKeyValue for #name { + type Key = #params_key_ident; + type Value = #params_value_ident; + + fn into_parts(self) -> (Self::Key, Option) { + match self { + #( + #name::#param_names(parameter) => { + let (key, value) = parameter.into_parts(); + (#params_key_ident::#param_names(key), value.map(#params_value_ident::#param_names)) + }, + )* + } + } + } + + #( + impl ::core::convert::From<<#param_types as #scrate::traits::dynamic_params::AggregatedKeyValue>::Key> for #params_key_ident { + fn from(key: <#param_types as #scrate::traits::dynamic_params::AggregatedKeyValue>::Key) -> Self { + #params_key_ident::#param_names(key) + } + } + + impl ::core::convert::TryFrom<#params_value_ident> for <#param_types as #scrate::traits::dynamic_params::AggregatedKeyValue>::Value { + type Error = (); + + fn try_from(value: #params_value_ident) -> Result { + match value { + #params_value_ident::#param_names(value) => Ok(value), + _ => Err(()), + } + } + } + )* + }); + } +} + +/// Get access to the current crate and convert the error to a compile error. +fn crate_access() -> core::result::Result { + generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) +} diff --git a/support/procedural-fork/src/key_prefix.rs b/support/procedural-fork/src/key_prefix.rs new file mode 100644 index 000000000..7f1ab6866 --- /dev/null +++ b/support/procedural-fork/src/key_prefix.rs @@ -0,0 +1,104 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro2::{Span, TokenStream}; +use quote::{format_ident, quote, ToTokens}; +use syn::{Ident, Result}; + +const MAX_IDENTS: usize = 18; + +pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result { + if !input.is_empty() { + return Err(syn::Error::new(Span::call_site(), "No arguments expected")) + } + + let mut all_trait_impls = TokenStream::new(); + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + for i in 2..=MAX_IDENTS { + let current_tuple = (0..i) + .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) + .collect::>(); + + for prefix_count in 1..i { + let (prefixes, suffixes) = current_tuple.split_at(prefix_count); + + let hashers = current_tuple + .iter() + .map(|ident| format_ident!("Hasher{}", ident)) + .collect::>(); + let kargs = + prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); + let partial_keygen = generate_keygen(prefixes); + let suffix_keygen = generate_keygen(suffixes); + let suffix_tuple = generate_tuple(suffixes); + + let trait_impls = quote! { + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: StorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + type Suffix = #suffix_tuple; + + fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { + <#partial_keygen>::final_key(prefix) + } + } + + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: ReversibleStorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + fn decode_partial_key(key_material: &[u8]) -> Result< + Self::Suffix, + #frame_support::__private::codec::Error, + > { + <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) + } + } + }; + + all_trait_impls.extend(trait_impls); + } + } + + Ok(all_trait_impls) +} + +fn generate_tuple(idents: &[Ident]) -> TokenStream { + if idents.len() == 1 { + idents[0].to_token_stream() + } else { + quote!((#(#idents),*)) + } +} + +fn generate_keygen(idents: &[Ident]) -> TokenStream { + if idents.len() == 1 { + let key = &idents[0]; + let hasher = format_ident!("Hasher{}", key); + + quote!(Key<#hasher, #key>) + } else { + let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); + + quote!((#(Key<#hashers, #idents>),*)) + } +} diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 8b1378917..08ce0a73c 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -1 +1,67 @@ +#![recursion_limit = "512"] +#![deny(rustdoc::broken_intra_doc_links)] +extern crate proc_macro; + +mod benchmark; +mod construct_runtime; +mod crate_version; +mod derive_impl; +mod dummy_part_checker; +mod dynamic_params; +mod key_prefix; +mod match_and_insert; +mod no_bound; +mod pallet; +mod pallet_error; +mod runtime; +mod storage_alias; +mod transactional; +mod tt_macro; + +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use macro_magic::{import_tokens_attr, import_tokens_attr_verbatim}; +use proc_macro::TokenStream; +use quote::{quote, ToTokens}; +use std::{cell::RefCell, str::FromStr}; +use syn::{parse_macro_input, Error, ItemImpl, ItemMod, TraitItemType}; + +pub(crate) const INHERENT_INSTANCE_NAME: &str = "__InherentHiddenInstance"; + +/// The number of module instances supported by the runtime, starting at index 1, +/// and up to `NUMBER_OF_INSTANCE`. +pub(crate) const NUMBER_OF_INSTANCE: u8 = 16; + +thread_local! { + /// A global counter, can be used to generate a relatively unique identifier. + static COUNTER: RefCell = RefCell::new(Counter(0)); +} + +/// Counter to generate a relatively unique identifier for macros. This is necessary because +/// declarative macros gets hoisted to the crate root, which shares the namespace with other pallets +/// containing the very same macros. +struct Counter(u64); + +impl Counter { + fn inc(&mut self) -> u64 { + let ret = self.0; + self.0 += 1; + ret + } +} + +/// Get the value from the given environment variable set by cargo. +/// +/// The value is parsed into the requested destination type. +fn get_cargo_env_var(version_env: &str) -> std::result::Result { + let version = std::env::var(version_env) + .unwrap_or_else(|_| panic!("`{}` is always set by cargo; qed", version_env)); + + T::from_str(&version).map_err(drop) +} + +/// Generate the counter_prefix related to the storage. +/// counter_prefix is used by counted storage map. +fn counter_prefix(prefix: &str) -> String { + format!("CounterFor{}", prefix) +} diff --git a/support/procedural-fork/src/match_and_insert.rs b/support/procedural-fork/src/match_and_insert.rs new file mode 100644 index 000000000..aa9cc56d7 --- /dev/null +++ b/support/procedural-fork/src/match_and_insert.rs @@ -0,0 +1,159 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of the `match_and_insert` macro. + +use proc_macro2::{Group, Span, TokenStream, TokenTree}; +use std::iter::once; +use syn::spanned::Spanned; + +mod keyword { + syn::custom_keyword!(target); + syn::custom_keyword!(pattern); + syn::custom_keyword!(tokens); +} + +pub fn match_and_insert(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let MatchAndInsertDef { pattern, tokens, target } = + syn::parse_macro_input!(input as MatchAndInsertDef); + + match expand_in_stream(&pattern, &mut Some(tokens), target) { + Ok(stream) => stream.into(), + Err(err) => err.to_compile_error().into(), + } +} + +struct MatchAndInsertDef { + // Token stream to search and insert tokens into. + target: TokenStream, + // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal + // (i.e. contains only Punct or Ident), and not being empty. + pattern: Vec, + // Token stream to insert after the match pattern. + tokens: TokenStream, +} + +impl syn::parse::Parse for MatchAndInsertDef { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut target; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); + let target = target.parse()?; + + let mut pattern; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); + let pattern = pattern.parse::()?.into_iter().collect::>(); + + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { + return Err(syn::Error::new(t.span(), "Unexpected group token tree")) + } + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { + return Err(syn::Error::new(t.span(), "Unexpected literal token tree")) + } + + if pattern.is_empty() { + return Err(syn::Error::new(Span::call_site(), "empty match pattern is invalid")) + } + + let mut tokens; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); + let tokens = tokens.parse()?; + + Ok(Self { tokens, pattern, target }) + } +} + +// Insert `tokens` after the first matching `pattern`. +// `tokens` must be some (Option is used for internal simplification). +// `pattern` must not be empty and should only contain Ident or Punct. +fn expand_in_stream( + pattern: &[TokenTree], + tokens: &mut Option, + stream: TokenStream, +) -> syn::Result { + assert!( + tokens.is_some(), + "`tokens` must be some, Option is used because `tokens` is used only once" + ); + assert!( + !pattern.is_empty(), + "`pattern` must not be empty, otherwise there is nothing to match against" + ); + + let stream_span = stream.span(); + let mut stream = stream.into_iter(); + let mut extended = TokenStream::new(); + let mut match_cursor = 0; + + while let Some(token) = stream.next() { + match token { + TokenTree::Group(group) => { + match_cursor = 0; + let group_stream = group.stream(); + match expand_in_stream(pattern, tokens, group_stream) { + Ok(s) => { + extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); + extended.extend(stream); + return Ok(extended) + }, + Err(_) => { + extended.extend(once(TokenTree::Group(group))); + }, + } + }, + other => { + advance_match_cursor(&other, pattern, &mut match_cursor); + + extended.extend(once(other)); + + if match_cursor == pattern.len() { + extended + .extend(once(tokens.take().expect("tokens is used to replace only once"))); + extended.extend(stream); + return Ok(extended) + } + }, + } + } + // if we reach this point, it means the stream is empty and we haven't found a matching pattern + let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); + Err(syn::Error::new(stream_span, msg)) +} + +fn advance_match_cursor(other: &TokenTree, pattern: &[TokenTree], match_cursor: &mut usize) { + use TokenTree::{Ident, Punct}; + + let does_match_other_pattern = match (other, &pattern[*match_cursor]) { + (Ident(i1), Ident(i2)) => i1 == i2, + (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), + _ => false, + }; + + if does_match_other_pattern { + *match_cursor += 1; + } else { + *match_cursor = 0; + } +} diff --git a/support/procedural-fork/src/no_bound/clone.rs b/support/procedural-fork/src/no_bound/clone.rs new file mode 100644 index 000000000..346bf450f --- /dev/null +++ b/support/procedural-fork/src/no_bound/clone.rs @@ -0,0 +1,107 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; + +/// Derive Clone but do not bound any generic. +pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = syn::parse_macro_input!(input as syn::DeriveInput); + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(&self.#i) + ) + }); + + quote::quote!( Self { #( #fields, )* } ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = + unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(&self.#i) + ) + }); + + quote::quote!( Self ( #( #fields, )* ) ) + }, + syn::Fields::Unit => { + quote::quote!(Self) + }, + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let cloned = captured.clone().map(|i| { + ::quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + ) + }, + syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), + } + }); + + quote::quote!(match self { + #( #variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(CloneNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { + fn clone(&self) -> Self { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/no_bound/debug.rs b/support/procedural-fork/src/no_bound/debug.rs new file mode 100644 index 000000000..a1b3f4f0d --- /dev/null +++ b/support/procedural-fork/src/no_bound/debug.rs @@ -0,0 +1,121 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; + +/// Derive Debug but do not bound any generics. +pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = syn::parse_macro_input!(input as syn::DeriveInput); + + let input_ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), + ); + + quote::quote!( + fmt.debug_struct(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); + + quote::quote!( + fmt.debug_tuple(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + }, + syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + let full_variant_str = format!("{}::{}", input_ident, ident); + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let debugged = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + .field(stringify!(#i), &#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => { + fmt.debug_struct(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let debugged = captured + .clone() + .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => { + fmt.debug_tuple(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unit => quote::quote!( + Self::#ident => fmt.write_str(#full_variant_str) + ), + } + }); + + quote::quote!(match *self { + #( #variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(DebugNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs new file mode 100644 index 000000000..0524247d2 --- /dev/null +++ b/support/procedural-fork/src/no_bound/default.rs @@ -0,0 +1,161 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use proc_macro2::Span; +use quote::{quote, quote_spanned}; +use syn::{spanned::Spanned, Data, DeriveInput, Fields}; + +/// Derive Default but do not bound any generic. +pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = syn::parse_macro_input!(input as DeriveInput); + + let name = &input.ident; + + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + }, + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + }, + Fields::Unit => { + quote!(Self) + }, + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into() + } + + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| variant.attrs.iter().any(|attr| attr.path().is_ident("default"))) + .collect::>(); + + match &*default_variants { + [] => return syn::Error::new( + name.clone().span(), + "no default declared, make a variant default by placing `#[default]` above it", + ) + .into_compile_error() + .into(), + // only one variant with the #[default] attribute set + [default_variant] => { + let variant_attrs = default_variant + .attrs + .iter() + .filter(|a| a.path().is_ident("default")) + .collect::>(); + + // check that there is only one #[default] attribute on the variant + if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { + let mut err = + syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); + + err.combine(syn::Error::new_spanned(first_attr, "`#[default]` used here")); + + err.extend([second_attr].into_iter().chain(additional_attrs).map( + |variant| { + syn::Error::new_spanned(variant, "`#[default]` used again here") + }, + )); + + return err.into_compile_error().into() + } + + let variant_ident = &default_variant.ident; + + let fully_qualified_variant_path = quote!(Self::#variant_ident); + + match &default_variant.fields { + Fields::Named(named) => { + let fields = + named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span()=> + #ident: ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path { #( #fields, )* }) + }, + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path( #( #fields, )* )) + }, + Fields::Unit => fully_qualified_variant_path, + } + }, + [first, additional @ ..] => { + let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); + + err.combine(syn::Error::new_spanned(first, "first default")); + + err.extend( + additional + .into_iter() + .map(|variant| syn::Error::new_spanned(variant, "additional default")), + ); + + return err.into_compile_error().into() + }, + } + }, + Data::Union(union_) => + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into(), + }; + + quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { + fn default() -> Self { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/no_bound/mod.rs b/support/procedural-fork/src/no_bound/mod.rs new file mode 100644 index 000000000..9e0377dda --- /dev/null +++ b/support/procedural-fork/src/no_bound/mod.rs @@ -0,0 +1,25 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Derive macros to derive traits without bounding generic parameters. + +pub mod clone; +pub mod debug; +pub mod default; +pub mod ord; +pub mod partial_eq; +pub mod partial_ord; diff --git a/support/procedural-fork/src/no_bound/ord.rs b/support/procedural-fork/src/no_bound/ord.rs new file mode 100644 index 000000000..b24d27c04 --- /dev/null +++ b/support/procedural-fork/src/no_bound/ord.rs @@ -0,0 +1,75 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; + +/// Derive Ord but do not bound any generic. +pub fn derive_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + }, + syn::Fields::Unit => { + quote::quote!(core::cmp::Ordering::Equal) + }, + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/no_bound/partial_eq.rs b/support/procedural-fork/src/no_bound/partial_eq.rs new file mode 100644 index 000000000..a1be71a96 --- /dev/null +++ b/support/procedural-fork/src/no_bound/partial_eq.rs @@ -0,0 +1,137 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; + +/// Derive PartialEq but do not bound any generic. +pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = syn::parse_macro_input!(input as syn::DeriveInput); + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + + quote::quote!( true #( && #fields )* ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + + quote::quote!( true #( && #fields )* ) + }, + syn::Fields::Unit => { + quote::quote!(true) + }, + }, + syn::Data::Enum(enum_) => { + let variants = + enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let names = named.named.iter().map(|i| &i.ident); + let other_names = names.clone().enumerate().map(|(n, ident)| { + syn::Ident::new(&format!("_{}", n), ident.span()) + }); + + let capture = names.clone(); + let other_capture = names + .clone() + .zip(other_names.clone()) + .map(|(i, other_i)| quote::quote!(#i: #other_i)); + let eq = names.zip(other_names).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident { #( #capture, )* }, + Self::#ident { #( #other_capture, )* }, + ) => true #( && #eq )* + ) + }, + syn::Fields::Unnamed(unnamed) => { + let names = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let other_names = + unnamed.unnamed.iter().enumerate().map(|(i, f)| { + syn::Ident::new(&format!("_{}_other", i), f.span()) + }); + let eq = names.clone().zip(other_names.clone()).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident ( #( #names, )* ), + Self::#ident ( #( #other_names, )* ), + ) => true #( && #eq )* + ) + }, + syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), + } + }); + + let mut different_variants = vec![]; + for (i, i_variant) in enum_.variants.iter().enumerate() { + for (j, j_variant) in enum_.variants.iter().enumerate() { + if i != j { + let i_ident = &i_variant.ident; + let j_ident = &j_variant.ident; + different_variants.push(quote::quote!( + (Self::#i_ident { .. }, Self::#j_ident { .. }) => false + )) + } + } + } + + quote::quote!( match (self, other) { + #( #variants, )* + #( #different_variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialEqNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { + fn eq(&self, other: &Self) -> bool { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/no_bound/partial_ord.rs b/support/procedural-fork/src/no_bound/partial_ord.rs new file mode 100644 index 000000000..86aa42be9 --- /dev/null +++ b/support/procedural-fork/src/no_bound/partial_ord.rs @@ -0,0 +1,89 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; + +/// Derive PartialOrd but do not bound any generic. +pub fn derive_partial_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + syn::Data::Struct(struct_) => + match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), + ); + + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = + unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map( + |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), + ); + + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + }, + syn::Fields::Unit => { + quote::quote!(Some(core::cmp::Ordering::Equal)) + }, + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { + fn partial_cmp(&self, other: &Self) -> Option { + #impl_ + } + } + }; + ) + .into() +} diff --git a/support/procedural-fork/src/pallet/expand/call.rs b/support/procedural-fork/src/pallet/expand/call.rs new file mode 100644 index 000000000..f395872c8 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/call.rs @@ -0,0 +1,452 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + pallet::{ + expand::warnings::{weight_constant_warning, weight_witness_warning}, + parse::call::CallWeightDef, + Def, + }, + COUNTER, +}; +use proc_macro2::TokenStream as TokenStream2; +use proc_macro_warning::Warning; +use quote::{quote, ToTokens}; +use syn::spanned::Spanned; + +/// +/// * Generate enum call and implement various trait on it. +/// * Implement Callable and call_function on `Pallet` +pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { + let (span, where_clause, methods, docs) = match def.call.as_ref() { + Some(call) => { + let span = call.attr_span; + let where_clause = call.where_clause.clone(); + let methods = call.methods.clone(); + let docs = call.docs.clone(); + + (span, where_clause, methods, docs) + }, + None => (def.item.span(), def.config.where_clause.clone(), Vec::new(), Vec::new()), + }; + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(span); + let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); + let type_use_gen = &def.type_use_generics(span); + let call_ident = syn::Ident::new("Call", span); + let pallet_ident = &def.pallet_struct.pallet; + + let fn_name = methods.iter().map(|method| &method.name).collect::>(); + let call_index = methods.iter().map(|method| method.call_index).collect::>(); + let new_call_variant_fn_name = fn_name + .iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name + .iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); + + let mut call_index_warnings = Vec::new(); + // Emit a warning for each call that is missing `call_index` when not in dev-mode. + for method in &methods { + if method.explicit_call_index || def.dev_mode { + continue + } + + let warning = Warning::new_deprecated("ImplicitCallIndex") + .index(call_index_warnings.len()) + .old("use implicit call indices") + .new("ensure that all calls have a `pallet::call_index` attribute or put the pallet into `dev` mode") + .help_links(&[ + "https://github.com/paritytech/substrate/pull/12891", + "https://github.com/paritytech/substrate/pull/11381" + ]) + .span(method.name.span()) + .build_or_panic(); + call_index_warnings.push(warning); + } + + let mut fn_weight = Vec::::new(); + let mut weight_warnings = Vec::new(); + for method in &methods { + match &method.weight { + CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), + CallWeightDef::Immediate(e) => { + weight_constant_warning(e, def.dev_mode, &mut weight_warnings); + weight_witness_warning(method, def.dev_mode, &mut weight_warnings); + + fn_weight.push(e.into_token_stream()); + }, + CallWeightDef::Inherited => { + let pallet_weight = def + .call + .as_ref() + .expect("we have methods; we have calls; qed") + .inherited_call_weight + .as_ref() + .expect("the parser prevents this"); + + // Expand `<::WeightInfo>::call_name()`. + let t = &pallet_weight.typename; + let n = &method.name; + fn_weight.push(quote!({ < #t > :: #n () })); + }, + } + } + debug_assert_eq!(fn_weight.len(), methods.len()); + + let fn_doc = methods.iter().map(|method| &method.docs).collect::>(); + + let args_name = methods + .iter() + .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) + .collect::>(); + + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + + let args_type = methods + .iter() + .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) + .collect::>(); + + let args_compact_attr = methods.iter().map(|method| { + method + .args + .iter() + .map(|(is_compact, _, type_)| { + if *is_compact { + quote::quote_spanned!(type_.span() => #[codec(compact)] ) + } else { + quote::quote!() + } + }) + .collect::>() + }); + + let default_docs = + [syn::parse_quote!(r"Contains a variant per dispatchable extrinsic that this pallet has.")]; + let docs = if docs.is_empty() { &default_docs[..] } else { &docs[..] }; + + let maybe_compile_error = if def.call.is_none() { + quote::quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ + construct_runtime?", + )); + } + } else { + proc_macro2::TokenStream::new() + }; + + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // Wrap all calls inside of storage layers + if let Some(syn::Item::Impl(item_impl)) = def + .call + .as_ref() + .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) + { + item_impl.items.iter_mut().for_each(|i| { + if let syn::ImplItem::Fn(method) = i { + let block = &method.block; + method.block = syn::parse_quote! {{ + // We execute all dispatchable in a new storage layer, allowing them + // to return an error at any point, and undoing any storage changes. + #frame_support::storage::with_storage_layer(|| #block) + }}; + } + }); + } + + // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings + let maybe_allow_attrs = methods + .iter() + .map(|method| { + method + .attrs + .iter() + .find(|attr| attr.path().is_ident("allow")) + .map_or(proc_macro2::TokenStream::new(), |attr| attr.to_token_stream()) + }) + .collect::>(); + + let cfg_attrs = methods + .iter() + .map(|method| { + let attrs = + method.cfg_attrs.iter().map(|attr| attr.to_token_stream()).collect::>(); + quote::quote!( #( #attrs )* ) + }) + .collect::>(); + + let feeless_check = methods.iter().map(|method| &method.feeless_check).collect::>(); + let feeless_check_result = + feeless_check.iter().zip(args_name.iter()).map(|(feeless_check, arg_name)| { + if let Some(feeless_check) = feeless_check { + quote::quote!(#feeless_check(origin, #( #arg_name, )*)) + } else { + quote::quote!(false) + } + }); + + quote::quote_spanned!(span => + #[doc(hidden)] + mod warnings { + #( + #call_index_warnings + )* + #( + #weight_warnings + )* + } + + #[allow(unused_imports)] + #[doc(hidden)] + pub mod __substrate_call_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + }; + } + + #[doc(hidden)] + pub use #macro_ident as is_call_part_defined; + } + + #( #[doc = #docs] )* + #[derive( + #frame_support::RuntimeDebugNoBound, + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + #[allow(non_camel_case_types)] + pub enum #call_ident<#type_decl_bounded_gen> #where_clause { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#type_use_gen,)>, + #frame_support::Never, + ), + #( + #cfg_attrs + #( #[doc = #fn_doc] )* + #[codec(index = #call_index)] + #fn_name { + #( + #[allow(missing_docs)] + #args_compact_attr #args_name_stripped: #args_type + ),* + }, + )* + } + + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #cfg_attrs + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name_stripped: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name_stripped ),* + } + } + )* + } + + impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo + for #call_ident<#type_use_gen> + #where_clause + { + fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + let __pallet_base_weight = #fn_weight; + + let __pallet_weight = < + dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> + >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_class = < + dyn #frame_support::dispatch::ClassifyDispatch< + ( #( & #args_type, )* ) + > + >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_pays_fee = < + dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> + >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); + + #frame_support::dispatch::DispatchInfo { + weight: __pallet_weight, + class: __pallet_class, + pays_fee: __pallet_pays_fee, + } + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> + #where_clause + { + type Origin = #frame_system::pallet_prelude::OriginFor; + #[allow(unused_variables)] + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + #feeless_check_result + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_name(&self) -> &'static str { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ #( #cfg_attrs stringify!(#fn_name), )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_index(&self) -> u8 { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_indices() -> &'static [u8] { + &[ #( #cfg_attrs #call_index, )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable + for #call_ident<#type_use_gen> + #where_clause + { + type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; + fn dispatch_bypass_filter( + self, + origin: Self::RuntimeOrigin + ) -> #frame_support::dispatch::DispatchResultWithPostInfo { + #frame_support::dispatch_context::run_in_context(|| { + match self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern, )* } => { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) + ); + #maybe_allow_attrs + <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) + .map(Into::into).map_err(Into::into) + }, + )* + Self::__Ignore(_, _) => { + let _ = origin; // Use origin for empty Call enum + unreachable!("__PhantomItem cannot be used."); + }, + } + }) + } + } + + impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> + #where_clause + { + type RuntimeCall = #call_ident<#type_use_gen>; + } + + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { + #[allow(dead_code)] + #[doc(hidden)] + pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { + #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() + } + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/composite.rs b/support/procedural-fork/src/pallet/expand/composite.rs new file mode 100644 index 000000000..d449afe8f --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/composite.rs @@ -0,0 +1,40 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; +use proc_macro2::TokenStream; + +/// Expands `composite_enum` and adds the `VariantCount` implementation for it. +pub fn expand_composites(def: &mut Def) -> TokenStream { + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; + + for composite in &def.composites { + let name = &composite.ident; + let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); + let variants_count = composite.variant_count; + + // add `VariantCount` implementation for `composite_enum` + expand.extend(quote::quote_spanned!(composite.attr_span => + impl #impl_generics #frame_support::traits::VariantCount for #name #ty_generics #where_clause { + const VARIANT_COUNT: u32 = #variants_count; + } + )); + } + + expand +} diff --git a/support/procedural-fork/src/pallet/expand/config.rs b/support/procedural-fork/src/pallet/expand/config.rs new file mode 100644 index 000000000..5cf4035a8 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/config.rs @@ -0,0 +1,97 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; +use proc_macro2::TokenStream; +use quote::quote; +use syn::{parse_quote, Item}; + +/// +/// * Generate default rust doc +pub fn expand_config(def: &mut Def) -> TokenStream { + let config = &def.config; + let config_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; + if let Item::Trait(item) = item { + item + } else { + unreachable!("Checked by config parser") + } + }; + + config_item.attrs.insert( + 0, + parse_quote!( + #[doc = r" +Configuration trait of this pallet. + +The main purpose of this trait is to act as an interface between this pallet and the runtime in +which it is embedded in. A type, function, or constant in this trait is essentially left to be +configured by the runtime that includes this pallet. + +Consequently, a runtime that wants to include this pallet must implement this trait." + ] + ), + ); + + // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is + // impossible consequently. + match &config.default_sub_trait { + Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { + let trait_items = &default_sub_trait + .items + .iter() + .map(|item| { + if item.1 { + if let syn::TraitItem::Type(item) = item.0.clone() { + let mut item = item.clone(); + item.bounds.clear(); + syn::TraitItem::Type(item) + } else { + item.0.clone() + } + } else { + item.0.clone() + } + }) + .collect::>(); + + let type_param_bounds = if default_sub_trait.has_system { + let system = &def.frame_system; + quote::quote!(: #system::DefaultConfig) + } else { + quote::quote!() + }; + + quote!( + /// Based on [`Config`]. Auto-generated by + /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). + /// Can be used in tandem with + /// [`#[register_default_config]`](`frame_support::register_default_config`) and + /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits + /// based on existing pallet config traits in a safe and developer-friendly way. + /// + /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about + /// the auto-generated `DefaultConfig` trait and how it is generated. + pub trait DefaultConfig #type_param_bounds { + #(#trait_items)* + } + ) + }, + _ => Default::default(), + } +} diff --git a/support/procedural-fork/src/pallet/expand/constants.rs b/support/procedural-fork/src/pallet/expand/constants.rs new file mode 100644 index 000000000..57fa8b7f3 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/constants.rs @@ -0,0 +1,108 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; + +struct ConstDef { + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// default_byte implementation + pub default_byte_impl: proc_macro2::TokenStream, + /// Constant name for Metadata (optional) + pub metadata_name: Option, +} + +/// +/// * Impl fn module_constant_metadata for pallet. +pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); + + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + + let config_consts = def.config.consts_metadata.iter().map(|const_| { + let ident = &const_.ident; + let const_type = &const_.type_; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = <::#ident as + #frame_support::traits::Get<#const_type>>::get(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: None, + } + }); + + let extra_consts = def.extra_constants.iter().flat_map(|d| &d.extra_constants).map(|const_| { + let ident = &const_.ident; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = >::#ident(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: const_.metadata_name.clone(), + } + }); + + let consts = config_consts.chain(extra_consts).map(|const_| { + let const_type = &const_.type_; + let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); + + let no_docs = vec![]; + let doc = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &const_.doc }; + + let default_byte_impl = &const_.default_byte_impl; + + quote::quote!({ + #frame_support::__private::metadata_ir::PalletConstantMetadataIR { + name: #ident_str, + ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), + value: { #default_byte_impl }, + docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], + } + }) + }); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ + + #[doc(hidden)] + pub fn pallet_constants_metadata() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> + { + #frame_support::__private::sp_std::vec![ #( #consts ),* ] + } + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/doc_only.rs b/support/procedural-fork/src/pallet/expand/doc_only.rs new file mode 100644 index 000000000..621a051ac --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/doc_only.rs @@ -0,0 +1,103 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use proc_macro2::Span; + +use crate::pallet::Def; + +pub fn expand_doc_only(def: &mut Def) -> proc_macro2::TokenStream { + let dispatchables = if let Some(call_def) = &def.call { + let type_impl_generics = def.type_impl_generics(Span::call_site()); + call_def + .methods + .iter() + .map(|method| { + let name = &method.name; + let args = &method + .args + .iter() + .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) + .collect::(); + let docs = &method.docs; + + let real = format!(" [`Pallet::{}`].", name); + quote::quote!( + #( #[doc = #docs] )* + /// + /// # Warning: Doc-Only + /// + /// This function is an automatically generated, and is doc-only, uncallable + /// stub. See the real version in + #[ doc = #real ] + pub fn #name<#type_impl_generics>(#args) { unreachable!(); } + ) + }) + .collect::() + } else { + quote::quote!() + }; + + let storage_types = def + .storages + .iter() + .map(|storage| { + let storage_name = &storage.ident; + let storage_type_docs = &storage.docs; + let real = format!("[`pallet::{}`].", storage_name); + quote::quote!( + #( #[doc = #storage_type_docs] )* + /// + /// # Warning: Doc-Only + /// + /// This type is automatically generated, and is doc-only. See the real version in + #[ doc = #real ] + pub struct #storage_name(); + ) + }) + .collect::(); + + quote::quote!( + /// Auto-generated docs-only module listing all (public and private) defined storage types + /// for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. + /// + /// To see the actual storage type, find a struct with the same name at the root of the + /// pallet, in the list of [*Type Definitions*](../index.html#types). + #[cfg(doc)] + pub mod storage_types { + use super::*; + #storage_types + } + + /// Auto-generated docs-only module listing all defined dispatchables for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or + /// [`Call`]. + #[cfg(doc)] + pub mod dispatchables { + use super::*; + #dispatchables + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs new file mode 100644 index 000000000..ec19f889a --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -0,0 +1,172 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; +use proc_macro2::TokenStream; +use quote::ToTokens; +use syn::{spanned::Spanned, Attribute, Lit, LitStr}; + +const DOC: &'static str = "doc"; +const PALLET_DOC: &'static str = "pallet_doc"; + +/// Get the documentation file path from the `pallet_doc` attribute. +/// +/// Supported format: +/// `#[pallet_doc(PATH)]`: The path of the file from which the documentation is loaded +fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { + let lit: syn::LitStr = attr.parse_args().map_err(|_| { + let msg = "The `pallet_doc` received an unsupported argument. Supported format: `pallet_doc(\"PATH\")`"; + syn::Error::new(attr.span(), msg) + })?; + + Ok(DocMetaValue::Path(lit)) +} + +/// Get the value from the `doc` comment attribute: +/// +/// Supported formats: +/// - `#[doc = "A doc string"]`: Documentation as a string literal +/// - `#[doc = include_str!(PATH)]`: Documentation obtained from a path +fn parse_doc_value(attr: &Attribute) -> syn::Result> { + if !attr.path().is_ident(DOC) { + return Ok(None) + } + + let meta = attr.meta.require_name_value()?; + + match &meta.value { + syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), + syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => + Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))), + _ => + Err(syn::Error::new(attr.span(), "Expected `= \"docs\"` or `= include_str!(\"PATH\")`")), + } +} + +/// Supported documentation tokens. +#[derive(Debug)] +enum DocMetaValue { + /// Documentation with string literals. + /// + /// `#[doc = "Lit"]` + Lit(Lit), + /// Documentation with `include_str!` macro. + /// + /// The string literal represents the file `PATH`. + /// + /// `#[doc = include_str!(PATH)]` + Path(LitStr), +} + +impl ToTokens for DocMetaValue { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + DocMetaValue::Lit(lit) => lit.to_tokens(tokens), + DocMetaValue::Path(path_lit) => { + let decl = quote::quote!(include_str!(#path_lit)); + tokens.extend(decl) + }, + } + } +} + +/// Extract the documentation from the given pallet definition +/// to include in the runtime metadata. +/// +/// Implement a `pallet_documentation_metadata` function to fetch the +/// documentation that is included in the metadata. +/// +/// The documentation is placed on the pallet similar to: +/// +/// ```ignore +/// #[pallet] +/// /// Documentation for pallet +/// #[doc = "Documentation for pallet"] +/// #[doc = include_str!("../README.md")] +/// #[pallet_doc("../documentation1.md")] +/// #[pallet_doc("../documentation2.md")] +/// pub mod pallet {} +/// ``` +/// +/// # pallet_doc +/// +/// The `pallet_doc` attribute can only be provided with one argument, +/// which is the file path that holds the documentation to be added to the metadata. +/// +/// Unlike the `doc` attribute, the documentation provided to the `proc_macro` attribute is +/// not added to the pallet. +pub fn expand_documentation(def: &mut Def) -> proc_macro2::TokenStream { + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let where_clauses = &def.config.where_clause; + + // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. + + // The `pallet_doc` attributes are excluded from the generation of the pallet, + // but they are included in the runtime metadata. + let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); + let mut index = 0; + while index < def.item.attrs.len() { + let attr = &def.item.attrs[index]; + if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { + pallet_docs.push(def.item.attrs.remove(index)); + // Do not increment the index, we have just removed the + // element from the attributes. + continue + } + + index += 1; + } + + // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. + let docs = match def + .item + .attrs + .iter() + .filter_map(|v| parse_doc_value(v).transpose()) + .collect::>>() + { + Ok(r) => r, + Err(err) => return err.into_compile_error(), + }; + + // Capture the `#[pallet_doc("../README.md")]`. + let pallet_docs = match pallet_docs + .into_iter() + .map(|attr| parse_pallet_doc_value(&attr)) + .collect::>>() + { + Ok(docs) => docs, + Err(err) => return err.into_compile_error(), + }; + + let docs = docs.iter().chain(pallet_docs.iter()); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ + + #[doc(hidden)] + pub fn pallet_documentation_metadata() + -> #frame_support::__private::sp_std::vec::Vec<&'static str> + { + #frame_support::__private::sp_std::vec![ #( #docs ),* ] + } + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/error.rs b/support/procedural-fork/src/pallet/expand/error.rs new file mode 100644 index 000000000..72fb6e923 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/error.rs @@ -0,0 +1,191 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + pallet::{ + parse::error::{VariantDef, VariantField}, + Def, + }, + COUNTER, +}; +use frame_support_procedural_tools::get_doc_literals; +use quote::ToTokens; +use syn::spanned::Spanned; + +/// +/// * impl various trait on Error +pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let error_token_unique_id = + syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let config_where_clause = &def.config.where_clause; + + let error = if let Some(error) = &def.error { + error + } else { + return quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + } + }; + + let error_ident = &error.error; + let type_impl_gen = &def.type_impl_generics(error.attr_span); + let type_use_gen = &def.type_use_generics(error.attr_span); + + let phantom_variant: syn::Variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, + #frame_support::Never, + ) + ); + + let as_str_matches = error.variants.iter().map( + |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { + let variant_str = variant.to_string(); + let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); + match field_ty { + Some(VariantField { is_named: true }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant { .. } => #variant_str,) + }, + Some(VariantField { is_named: false }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant(..) => #variant_str,) + }, + None => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant => #variant_str,) + }, + } + }, + ); + + let error_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; + + error_item.variants.insert(0, phantom_variant); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // derive TypeInfo for error metadata + error_item.attrs.push(syn::parse_quote! { + #[derive( + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + #frame_support::PalletError, + )] + }); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + )); + + if get_doc_literals(&error_item.attrs).is_empty() { + error_item.attrs.push(syn::parse_quote!( + #[doc = "The `Error` enum of this pallet."] + )); + } + + quote::quote_spanned!(error.attr_span => + impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> + #config_where_clause + { + fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) + -> #frame_support::__private::sp_std::fmt::Result + { + f.write_str(self.as_str()) + } + } + + impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn as_str(&self) -> &'static str { + match &self { + Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), + #( #as_str_matches )* + } + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> &'static str { + err.as_str() + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> + for #frame_support::sp_runtime::DispatchError + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> Self { + use #frame_support::__private::codec::Encode; + let index = < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::index::>() + .expect("Every active module has an index in the runtime; qed") as u8; + let mut encoded = err.encode(); + encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); + + #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { + index, + error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), + message: Some(err.as_str()), + }) + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + error = [{ #error_ident }] + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + ) +} diff --git a/support/procedural-fork/src/pallet/expand/event.rs b/support/procedural-fork/src/pallet/expand/event.rs new file mode 100644 index 000000000..655fc5507 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/event.rs @@ -0,0 +1,174 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + pallet::{parse::event::PalletEventDepositAttr, Def}, + COUNTER, +}; +use frame_support_procedural_tools::get_doc_literals; +use syn::{spanned::Spanned, Ident}; + +/// +/// * Add __Ignore variant on Event +/// * Impl various trait on Event including metadata +/// * if deposit_event is defined, implement deposit_event on module. +pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (event, macro_ident) = if let Some(event) = &def.event { + let ident = Ident::new(&format!("__is_event_part_defined_{}", count), event.attr_span); + (event, ident) + } else { + let macro_ident = + Ident::new(&format!("__is_event_part_defined_{}", count), def.item.span()); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::event] defined, perhaps you should \ + remove `Event` from construct_runtime?", + )); + } + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + } + }; + + let event_where_clause = &event.where_clause; + + // NOTE: actually event where clause must be a subset of config where clause because of + // `type RuntimeEvent: From>`. But we merge either way for potential better error + // message + let completed_where_clause = + super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); + + let event_ident = &event.event; + let frame_system = &def.frame_system; + let frame_support = &def.frame_support; + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); + + let event_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by event parser") + } + }; + + // Phantom data is added for generic event. + if event.gen_kind.is_generic() { + let variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#event_use_gen)>, + #frame_support::Never, + ) + ); + + // Push ignore variant at the end. + event_item.variants.push(variant); + } + + if get_doc_literals(&event_item.attrs).is_empty() { + event_item + .attrs + .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); + } + + // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug + event_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + )); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // skip requirement for type params to implement `TypeInfo`, and set docs capture + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] + )); + + let deposit_event = if let Some(deposit_event) = &event.deposit_event { + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let trait_use_gen = &def.trait_use_generics(event.attr_span); + let type_impl_gen = &def.type_impl_generics(event.attr_span); + let type_use_gen = &def.type_use_generics(event.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + + let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; + + quote::quote_spanned!(*fn_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #fn_vis fn deposit_event(event: Event<#event_use_gen>) { + let event = < + ::RuntimeEvent as + From> + >::from(event); + + let event = < + ::RuntimeEvent as + Into<::RuntimeEvent> + >::into(event); + + <#frame_system::Pallet>::deposit_event(event) + } + } + ) + } else { + Default::default() + }; + + quote::quote_spanned!(event.attr_span => + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => {}; + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + + #deposit_event + + impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { + fn from(_: #event_ident<#event_use_gen>) {} + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/genesis_build.rs b/support/procedural-fork/src/pallet/expand/genesis_build.rs new file mode 100644 index 000000000..248e83469 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/genesis_build.rs @@ -0,0 +1,49 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; + +/// +/// * implement the trait `sp_runtime::BuildStorage` +pub fn expand_genesis_build(def: &mut Def) -> proc_macro2::TokenStream { + let genesis_config = if let Some(genesis_config) = &def.genesis_config { + genesis_config + } else { + return Default::default() + }; + let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); + + let frame_support = &def.frame_support; + let type_impl_gen = &genesis_config.gen_kind.type_impl_gen(genesis_build.attr_span); + let gen_cfg_ident = &genesis_config.genesis_config; + let gen_cfg_use_gen = &genesis_config.gen_kind.type_use_gen(genesis_build.attr_span); + + let where_clause = &genesis_build.where_clause; + + quote::quote_spanned!(genesis_build.attr_span => + #[cfg(feature = "std")] + impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause + { + fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { + #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { + self.build(); + Ok(()) + }) + } + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/genesis_config.rs b/support/procedural-fork/src/pallet/expand/genesis_config.rs new file mode 100644 index 000000000..31d519ef2 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/genesis_config.rs @@ -0,0 +1,147 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{pallet::Def, COUNTER}; +use frame_support_procedural_tools::get_doc_literals; +use quote::ToTokens; +use syn::{spanned::Spanned, Ident}; + +/// +/// * add various derive trait on GenesisConfig struct. +pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (genesis_config, def_macro_ident, std_macro_ident) = + if let Some(genesis_config) = &def.genesis_config { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + genesis_config.genesis_config.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_macro_defined_for_genesis_{}", count), + genesis_config.genesis_config.span(), + ); + + (genesis_config, def_macro_ident, std_macro_ident) + } else { + let def_macro_ident = + Ident::new(&format!("__is_genesis_config_defined_{}", count), def.item.span()); + + let std_macro_ident = + Ident::new(&format!("__is_std_enabled_for_genesis_{}", count), def.item.span()); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::genesis_config] defined, perhaps you should \ + remove `Config` from construct_runtime?", + )); + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } + }; + + let frame_support = &def.frame_support; + + let genesis_config_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; + + let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); + + match genesis_config_item { + syn::Item::Enum(syn::ItemEnum { attrs, .. }) | + syn::Item::Struct(syn::ItemStruct { attrs, .. }) | + syn::Item::Type(syn::ItemType { attrs, .. }) => { + if get_doc_literals(attrs).is_empty() { + attrs.push(syn::parse_quote!( + #[doc = r" + Can be used to configure the + [genesis state](https://docs.substrate.io/build/genesis-configuration/) + of this pallet. + "] + )); + } + attrs.push(syn::parse_quote!( + #[derive(#frame_support::Serialize, #frame_support::Deserialize)] + )); + attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); + attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); + attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); + }, + _ => unreachable!("Checked by genesis_config parser"), + } + + quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => {}; + } + + #[cfg(not(feature = "std"))] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have the std feature enabled, this will cause the `", + $pallet_path, + "::GenesisConfig` type to not implement serde traits." + )); + }; + } + + #[cfg(feature = "std")] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } +} diff --git a/support/procedural-fork/src/pallet/expand/hooks.rs b/support/procedural-fork/src/pallet/expand/hooks.rs new file mode 100644 index 000000000..3623b5952 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/hooks.rs @@ -0,0 +1,340 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; + +/// * implement the individual traits using the Hooks trait +pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { + let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { + Some(hooks) => { + let where_clause = hooks.where_clause.clone(); + let span = hooks.attr_span; + let has_runtime_upgrade = hooks.has_runtime_upgrade; + (where_clause, span, has_runtime_upgrade) + }, + None => (def.config.where_clause.clone(), def.pallet_struct.attr_span, false), + }; + + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(span); + let type_use_gen = &def.type_use_generics(span); + let pallet_ident = &def.pallet_struct.pallet; + let frame_system = &def.frame_system; + let pallet_name = quote::quote! { + < + ::PalletInfo + as + #frame_support::traits::PalletInfo + >::name::().unwrap_or("") + }; + + let initialize_on_chain_storage_version = if let Some(in_code_version) = + &def.pallet_struct.storage_version + { + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", + #pallet_name, + #in_code_version + ); + #in_code_version.put::(); + } + } else { + quote::quote! { + let default_version = #frame_support::traits::StorageVersion::new(0); + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", + #pallet_name, + default_version + ); + default_version.put::(); + } + }; + + let log_runtime_upgrade = if has_runtime_upgrade { + // a migration is defined here. + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "⚠️ {} declares internal migrations (which *might* execute). \ + On-chain `{:?}` vs in-code storage version `{:?}`", + #pallet_name, + ::on_chain_storage_version(), + ::in_code_storage_version(), + ); + } + } else { + // default. + quote::quote! { + #frame_support::__private::log::debug!( + target: #frame_support::LOG_TARGET, + "✅ no migration for {}", + #pallet_name, + ); + } + }; + + let hooks_impl = if def.hooks.is_none() { + let frame_system = &def.frame_system; + quote::quote! { + impl<#type_impl_gen> + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause {} + } + } else { + proc_macro2::TokenStream::new() + }; + + // If a storage version is set, we should ensure that the storage version on chain matches the + // in-code storage version. This assumes that `Executive` is running custom migrations before + // the pallets are called. + let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); + let in_code_version = ::in_code_storage_version(); + + if on_chain_version != in_code_version { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", + #pallet_name, + on_chain_version, + in_code_version, + ); + + return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); + } + } + } else { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); + + if on_chain_version != #frame_support::traits::StorageVersion::new(0) { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} is set to non zero, \ + while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", + #pallet_name, + on_chain_version, + ); + + return Err("On chain storage version set, while the pallet doesn't \ + have the `#[pallet::storage_version(VERSION)]` attribute.".into()); + } + } + }; + + quote::quote_spanned!(span => + #hooks_impl + + impl<#type_impl_gen> + #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_finalize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_finalize(n) + } + } + + impl<#type_impl_gen> + #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_idle( + n: #frame_system::pallet_prelude::BlockNumberFor::, + remaining_weight: #frame_support::weights::Weight + ) -> #frame_support::weights::Weight { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_idle(n, remaining_weight) + } + } + + impl<#type_impl_gen> + #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_poll( + n: #frame_system::pallet_prelude::BlockNumberFor::, + weight: &mut #frame_support::weights::WeightMeter + ) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_poll(n, weight); + } + } + + impl<#type_impl_gen> + #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_initialize( + n: #frame_system::pallet_prelude::BlockNumberFor:: + ) -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_initialize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_initialize(n) + } + } + + impl<#type_impl_gen> + #frame_support::traits::BeforeAllRuntimeMigrations + for #pallet_ident<#type_use_gen> #where_clause + { + fn before_all_runtime_migrations() -> #frame_support::weights::Weight { + use #frame_support::traits::{Get, PalletInfoAccess}; + use #frame_support::__private::hashing::twox_128; + use #frame_support::storage::unhashed::contains_prefixed_key; + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("before_all") + ); + + // Check if the pallet has any keys set, including the storage version. If there are + // no keys set, the pallet was just added to the runtime and needs to have its + // version initialized. + let pallet_hashed_prefix = ::name_hash(); + let exists = contains_prefixed_key(&pallet_hashed_prefix); + if !exists { + #initialize_on_chain_storage_version + ::DbWeight::get().reads_writes(1, 1) + } else { + ::DbWeight::get().reads(1) + } + } + } + + impl<#type_impl_gen> + #frame_support::traits::OnRuntimeUpgrade + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_runtime_upgrade() -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") + ); + + // log info about the upgrade. + #log_runtime_upgrade + + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_runtime_upgrade() + } + + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::pre_upgrade() + } + + #[cfg(feature = "try-runtime")] + fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #post_storage_version_check + + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::post_upgrade(state) + } + } + + impl<#type_impl_gen> + #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::offchain_worker(n) + } + } + + // Integrity tests are only required for when `std` is enabled. + #frame_support::std_enabled! { + impl<#type_impl_gen> + #frame_support::traits::IntegrityTest + for #pallet_ident<#type_use_gen> #where_clause + { + fn integrity_test() { + #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::integrity_test() + }); + } + } + } + + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> + #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn try_state( + n: #frame_system::pallet_prelude::BlockNumberFor::, + _s: #frame_support::traits::TryStateSelect + ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🩺 Running {:?} try-state checks", + #pallet_name, + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::try_state(n).map_err(|err| { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "❌ {:?} try_state checks failed: {:?}", + #pallet_name, + err + ); + + err + }) + } + } + ) +} diff --git a/support/procedural-fork/src/pallet/expand/inherent.rs b/support/procedural-fork/src/pallet/expand/inherent.rs new file mode 100644 index 000000000..182d79f5b --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/inherent.rs @@ -0,0 +1,55 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{pallet::Def, COUNTER}; +use proc_macro2::TokenStream; +use quote::quote; +use syn::{spanned::Spanned, Ident}; + +pub fn expand_inherents(def: &mut Def) -> TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new(&format!("__is_inherent_part_defined_{}", count), def.item.span()); + + let maybe_compile_error = if def.inherent.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::inherent] defined, perhaps you should \ + remove `Inherent` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; + + quote! { + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } + + #[doc(hidden)] + pub use #macro_ident as is_inherent_part_defined; + } + } +} diff --git a/support/procedural-fork/src/pallet/expand/instances.rs b/support/procedural-fork/src/pallet/expand/instances.rs new file mode 100644 index 000000000..b6dfa7e6d --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/instances.rs @@ -0,0 +1,43 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{pallet::Def, NUMBER_OF_INSTANCE}; +use proc_macro2::Span; + +/// +/// * Provide inherent instance to be used by construct_runtime +/// * Provide Instance1 ..= Instance16 for instantiable pallet +pub fn expand_instances(def: &mut Def) -> proc_macro2::TokenStream { + let frame_support = &def.frame_support; + let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); + let instances = if def.config.has_instance { + (1..=NUMBER_OF_INSTANCE) + .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) + .collect() + } else { + vec![] + }; + + quote::quote!( + /// Hidden instance generated to be internally used when module is used without + /// instance. + #[doc(hidden)] + pub type #inherent_ident = (); + + #( pub use #frame_support::instances::#instances; )* + ) +} diff --git a/support/procedural-fork/src/pallet/expand/mod.rs b/support/procedural-fork/src/pallet/expand/mod.rs new file mode 100644 index 000000000..067839c28 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/mod.rs @@ -0,0 +1,130 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +mod call; +mod composite; +mod config; +mod constants; +mod doc_only; +mod documentation; +mod error; +mod event; +mod genesis_build; +mod genesis_config; +mod hooks; +mod inherent; +mod instances; +mod origin; +mod pallet_struct; +mod storage; +mod tasks; +mod tt_default_parts; +mod type_value; +mod validate_unsigned; +mod warnings; + +use crate::pallet::Def; +use quote::ToTokens; + +/// Merge where clause together, `where` token span is taken from the first not none one. +pub fn merge_where_clauses(clauses: &[&Option]) -> Option { + let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); + let mut res = clauses.next()?.clone(); + for other in clauses { + res.predicates.extend(other.predicates.iter().cloned()) + } + Some(res) +} + +/// Expand definition, in particular: +/// * add some bounds and variants to type defined, +/// * create some new types, +/// * impl stuff on them. +pub fn expand(mut def: Def) -> proc_macro2::TokenStream { + // Remove the `pallet_doc` attribute first. + let metadata_docs = documentation::expand_documentation(&mut def); + let constants = constants::expand_constants(&mut def); + let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); + let config = config::expand_config(&mut def); + let call = call::expand_call(&mut def); + let tasks = tasks::expand_tasks(&mut def); + let error = error::expand_error(&mut def); + let event = event::expand_event(&mut def); + let storages = storage::expand_storages(&mut def); + let inherents = inherent::expand_inherents(&mut def); + let instances = instances::expand_instances(&mut def); + let hooks = hooks::expand_hooks(&mut def); + let genesis_build = genesis_build::expand_genesis_build(&mut def); + let genesis_config = genesis_config::expand_genesis_config(&mut def); + let type_values = type_value::expand_type_values(&mut def); + let origins = origin::expand_origins(&mut def); + let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); + let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); + let doc_only = doc_only::expand_doc_only(&mut def); + let composites = composite::expand_composites(&mut def); + + def.item.attrs.insert( + 0, + syn::parse_quote!( + #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed +to construct this pallet. + +The main components of this pallet are: +- [`Pallet`], which implements all of the dispatchable extrinsics of the pallet, among +other public functions. + - The subset of the functions that are dispatchable can be identified either in the + [`dispatchables`] module or in the [`Call`] enum. +- [`storage_types`], which contains the list of all types that are representing a +storage item. Otherwise, all storage items are listed among [*Type Definitions*](#types). +- [`Config`], which contains the configuration trait of this pallet. +- [`Event`] and [`Error`], which are listed among the [*Enums*](#enums). + "] + ), + ); + + let new_items = quote::quote!( + #metadata_docs + #constants + #pallet_struct + #config + #call + #tasks + #error + #event + #storages + #inherents + #instances + #hooks + #genesis_build + #genesis_config + #type_values + #origins + #validate_unsigned + #tt_default_parts + #doc_only + #composites + ); + + def.item + .content + .as_mut() + .expect("This is checked by parsing") + .1 + .push(syn::Item::Verbatim(new_items)); + + def.item.into_token_stream() +} diff --git a/support/procedural-fork/src/pallet/expand/origin.rs b/support/procedural-fork/src/pallet/expand/origin.rs new file mode 100644 index 000000000..55865b424 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/origin.rs @@ -0,0 +1,55 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{pallet::Def, COUNTER}; +use proc_macro2::TokenStream; +use quote::quote; +use syn::{spanned::Spanned, Ident}; + +pub fn expand_origins(def: &mut Def) -> TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new(&format!("__is_origin_part_defined_{}", count), def.item.span()); + + let maybe_compile_error = if def.origin.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::origin] defined, perhaps you should \ + remove `Origin` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; + + quote! { + #[doc(hidden)] + pub mod __substrate_origin_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } + + #[doc(hidden)] + pub use #macro_ident as is_origin_part_defined; + } + } +} diff --git a/support/procedural-fork/src/pallet/expand/pallet_struct.rs b/support/procedural-fork/src/pallet/expand/pallet_struct.rs new file mode 100644 index 000000000..7cdf6bde9 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/pallet_struct.rs @@ -0,0 +1,290 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::{expand::merge_where_clauses, Def}; +use frame_support_procedural_tools::get_doc_literals; + +/// +/// * Add derive trait on Pallet +/// * Implement GetStorageVersion on Pallet +/// * Implement OnGenesis on Pallet +/// * Implement `fn error_metadata` on Pallet +/// * declare Module type alias for construct_runtime +/// * replace the first field type of `struct Pallet` with `PhantomData` if it is `_` +/// * implementation of `PalletInfoAccess` information +/// * implementation of `StorageInfoTrait` on Pallet +pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); + let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); + let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + let config_where_clause = &def.config.where_clause; + + let mut storages_where_clauses = vec![&def.config.where_clause]; + storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let storages_where_clauses = merge_where_clauses(&storages_where_clauses); + + let pallet_item = { + let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; + let item = &mut pallet_module_items[def.pallet_struct.index]; + if let syn::Item::Struct(item) = item { + item + } else { + unreachable!("Checked by pallet struct parser") + } + }; + + // If the first field type is `_` then we replace with `PhantomData` + if let Some(field) = pallet_item.fields.iter_mut().next() { + if field.ty == syn::parse_quote!(_) { + field.ty = syn::parse_quote!( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> + ); + } + } + + if get_doc_literals(&pallet_item.attrs).is_empty() { + pallet_item.attrs.push(syn::parse_quote!( + #[doc = r" + The `Pallet` struct, the main type that implements traits and standalone + functions within the pallet. + "] + )); + } + + pallet_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + )] + )); + + let pallet_error_metadata = if let Some(error_def) = &def.error { + let error_ident = &error_def.error; + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { + ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) + } + } + ) + } else { + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + None + } + } + ) + }; + + let storage_info_span = + def.pallet_struct.without_storage_info.unwrap_or(def.pallet_struct.attr_span); + + let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); + let storage_cfg_attrs = + &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); + + // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use + // partial or full storage info from storage. + let storage_info_traits = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) + } else { + quote::quote_spanned!(storage_info_span => StorageInfoTrait) + } + }) + .collect::>(); + + let storage_info_methods = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => partial_storage_info) + } else { + quote::quote_spanned!(storage_info_span => storage_info) + } + }) + .collect::>(); + + let storage_info = quote::quote_spanned!(storage_info_span => + impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait + for #pallet_ident<#type_use_gen> + #storages_where_clauses + { + fn storage_info() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> + { + #[allow(unused_mut)] + let mut res = #frame_support::__private::sp_std::vec![]; + + #( + #(#storage_cfg_attrs)* + { + let mut storage_info = < + #storage_names<#type_use_gen> + as #frame_support::traits::#storage_info_traits + >::#storage_info_methods(); + res.append(&mut storage_info); + } + )* + + res + } + } + ); + + let (storage_version, in_code_storage_version_ty) = + if let Some(v) = def.pallet_struct.storage_version.as_ref() { + (quote::quote! { #v }, quote::quote! { #frame_support::traits::StorageVersion }) + } else { + ( + quote::quote! { core::default::Default::default() }, + quote::quote! { #frame_support::traits::NoStorageVersionSet }, + ) + }; + + let whitelisted_storage_idents: Vec = def + .storages + .iter() + .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) + .collect(); + + let whitelisted_storage_keys_impl = quote::quote![ + use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; + impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { + fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { + use #frame_support::__private::sp_std::vec; + vec![#( + TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) + ),*] + } + } + ]; + + quote::quote_spanned!(def.pallet_struct.attr_span => + #pallet_error_metadata + + /// Type alias to `Pallet`, to be used by `construct_runtime`. + /// + /// Generated by `pallet` attribute macro. + #[deprecated(note = "use `Pallet` instead")] + #[allow(dead_code)] + pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; + + // Implement `GetStorageVersion` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::GetStorageVersion + for #pallet_ident<#type_use_gen> + #config_where_clause + { + type InCodeStorageVersion = #in_code_storage_version_ty; + + fn in_code_storage_version() -> Self::InCodeStorageVersion { + #storage_version + } + + fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { + #frame_support::traits::StorageVersion::get::() + } + } + + // Implement `OnGenesis` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::OnGenesis + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn on_genesis() { + let storage_version: #frame_support::traits::StorageVersion = #storage_version; + storage_version.put::(); + } + } + + // Implement `PalletInfoAccess` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn index() -> usize { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::index::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name_hash() -> [u8; 16] { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name_hash::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn module_name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::module_name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn crate_version() -> #frame_support::traits::CrateVersion { + #frame_support::crate_to_crate_version!() + } + } + + impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn count() -> usize { 1 } + fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { + use #frame_support::traits::PalletInfoAccess; + let item = #frame_support::traits::PalletInfoData { + index: Self::index(), + name: Self::name(), + module_name: Self::module_name(), + crate_version: Self::crate_version(), + }; + #frame_support::__private::sp_std::vec![item] + } + } + + #storage_info + #whitelisted_storage_keys_impl + ) +} diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs new file mode 100644 index 000000000..937b068cf --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -0,0 +1,919 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + counter_prefix, + pallet::{ + parse::{ + helper::two128_str, + storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, + }, + Def, + }, +}; +use quote::ToTokens; +use std::{collections::HashMap, ops::IndexMut}; +use syn::spanned::Spanned; + +/// Generate the prefix_ident related to the storage. +/// prefix_ident is used for the prefix struct to be given to storage as first generic param. +fn prefix_ident(storage: &StorageDef) -> syn::Ident { + let storage_ident = &storage.ident; + syn::Ident::new(&format!("_GeneratedPrefixForStorage{}", storage_ident), storage_ident.span()) +} + +/// Generate the counter_prefix_ident related to the storage. +/// counter_prefix_ident is used for the prefix struct to be given to counted storage map. +fn counter_prefix_ident(storage_ident: &syn::Ident) -> syn::Ident { + syn::Ident::new( + &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) +} + +/// Check for duplicated storage prefixes. This step is necessary since users can specify an +/// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure +/// that the prefix specified by the user is not a duplicate of an existing one. +fn check_prefix_duplicates( + storage_def: &StorageDef, + // A hashmap of all already used prefix and their associated error if duplication + used_prefixes: &mut HashMap, +) -> syn::Result<()> { + let prefix = storage_def.prefix(); + let dup_err = syn::Error::new( + storage_def.prefix_span(), + format!("Duplicate storage prefixes found for `{}`", prefix), + ); + + if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { + let mut err = dup_err; + err.combine(other_dup_err); + return Err(err) + } + + if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix = counter_prefix(&prefix); + let counter_dup_err = syn::Error::new( + storage_def.prefix_span(), + format!( + "Duplicate storage prefixes found for `{}`, used for counter associated to \ + counted storage map", + counter_prefix, + ), + ); + + if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { + let mut err = counter_dup_err; + err.combine(other_dup_err); + return Err(err) + } + } + + Ok(()) +} + +pub struct ResultOnEmptyStructMetadata { + /// The Rust ident that is going to be used as the name of the OnEmpty struct. + pub name: syn::Ident, + /// The path to the error type being returned by the ResultQuery. + pub error_path: syn::Path, + /// The visibility of the OnEmpty struct. + pub visibility: syn::Visibility, + /// The type of the storage item. + pub value_ty: syn::Type, + /// The name of the pallet error enum variant that is going to be returned. + pub variant_name: syn::Ident, + /// The span used to report compilation errors about the OnEmpty struct. + pub span: proc_macro2::Span, +} + +/// +/// * if generics are unnamed: replace the first generic `_` by the generated prefix structure +/// * if generics are named: reorder the generic, remove their name, and add the missing ones. +/// * Add `#[allow(type_alias_bounds)]` +pub fn process_generics(def: &mut Def) -> syn::Result> { + let frame_support = &def.frame_support; + let mut on_empty_struct_metadata = Vec::new(); + + for storage_def in def.storages.iter_mut() { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; + + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + + typ_item.attrs.push(syn::parse_quote!(#[allow(type_alias_bounds)])); + + let typ_path = match &mut *typ_item.ty { + syn::Type::Path(p) => p, + _ => unreachable!("Checked by def"), + }; + + let args = match &mut typ_path.path.segments[0].arguments { + syn::PathArguments::AngleBracketed(args) => args, + _ => unreachable!("Checked by def"), + }; + + let prefix_ident = prefix_ident(storage_def); + let type_use_gen = if def.config.has_instance { + quote::quote_spanned!(storage_def.attr_span => T, I) + } else { + quote::quote_spanned!(storage_def.attr_span => T) + }; + + let default_query_kind: syn::Type = + syn::parse_quote!(#frame_support::storage::types::OptionQuery); + let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { + if let Some(QueryKind::ResultQuery(error_path, variant_name)) = + storage_def.query_kind.as_ref() + { + let on_empty_ident = + quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); + on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { + name: on_empty_ident.clone(), + visibility: storage_def.vis.clone(), + value_ty, + error_path: error_path.clone(), + variant_name: variant_name.clone(), + span: storage_def.attr_span, + }); + return syn::parse_quote!(#on_empty_ident) + } + syn::parse_quote!(#frame_support::traits::GetDefault) + }; + let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); + + let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { + if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { + if let syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) = + query_type + { + if let Some(seg) = segments.last_mut() { + if let syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { args, .. }, + ) = &mut seg.arguments + { + args.clear(); + args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); + } + } + } else { + let msg = format!( + "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ + with 1 type parameter, found `{}`", + query_type.to_token_stream().to_string() + ); + return Err(syn::Error::new(query_type.span(), msg)) + } + } + Ok(()) + }; + + if let Some(named_generics) = storage_def.named_generics.clone() { + args.args.clear(); + args.args.push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); + match named_generics { + StorageGenerics::Value { value, query_kind, on_empty } => { + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + }, + StorageGenerics::Map { hasher, key, value, query_kind, on_empty, max_values } | + StorageGenerics::CountedMap { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher)); + args.args.push(syn::GenericArgument::Type(key)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + StorageGenerics::DoubleMap { + hasher1, + key1, + hasher2, + key2, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher1)); + args.args.push(syn::GenericArgument::Type(key1)); + args.args.push(syn::GenericArgument::Type(hasher2)); + args.args.push(syn::GenericArgument::Type(key2)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values } | + StorageGenerics::CountedNMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(keygen)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + } + } else { + args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); + + let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { + Metadata::Value { .. } => (1, 2, 3), + Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), + Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), + Metadata::DoubleMap { .. } => (5, 6, 7), + }; + + if storage_def.use_default_hasher { + let hasher_indices: Vec = match storage_def.metadata { + Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], + Metadata::DoubleMap { .. } => vec![1, 3], + _ => vec![], + }; + for hasher_idx in hasher_indices { + args.args[hasher_idx] = syn::GenericArgument::Type( + syn::parse_quote!(#frame_support::Blake2_128Concat), + ); + } + } + + if query_idx < args.args.len() { + if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { + set_result_query_type_parameter(query_kind)?; + } + } else if let Some(QueryKind::ResultQuery(error_path, _)) = + storage_def.query_kind.as_ref() + { + args.args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) + } + + // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to + // generate a default OnEmpty struct for it. + if on_empty_idx >= args.args.len() && + matches!(storage_def.query_kind.as_ref(), Some(QueryKind::ResultQuery(_, _))) + { + let value_ty = match args.args[value_idx].clone() { + syn::GenericArgument::Type(ty) => ty, + _ => unreachable!(), + }; + let on_empty = default_on_empty(value_ty); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + } + } + + Ok(on_empty_struct_metadata) +} + +fn augment_final_docs(def: &mut Def) { + // expand the docs with a new line showing the storage type (value, map, double map, etc), and + // the key/value type(s). + let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); + typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); + }; + def.storages.iter_mut().for_each(|storage| match &storage.metadata { + Metadata::Value { value } => { + let doc_line = format!( + "Storage type is [`StorageValue`] with value type `{}`.", + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::Map { key, value } => { + let doc_line = format!( + "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::DoubleMap { key1, key2, value } => { + let doc_line = format!( + "Storage type is [`StorageDoubleMap`] with key1 type {}, key2 type {} and value type {}.", + key1.to_token_stream(), + key2.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::NMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::CountedNMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::CountedMap { key, value } => { + let doc_line = format!( + "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + }); +} + +/// +/// * generate StoragePrefix structs (e.g. for a storage `MyStorage` a struct with the name +/// `_GeneratedPrefixForStorage$NameOfStorage` is generated) and implements StorageInstance trait. +/// * if generics are unnamed: replace the first generic `_` by the generated prefix structure +/// * if generics are named: reorder the generic, remove their name, and add the missing ones. +/// * Add `#[allow(type_alias_bounds)]` on storages type alias +/// * generate metadatas +pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { + let on_empty_struct_metadata = match process_generics(def) { + Ok(idents) => idents, + Err(e) => return e.into_compile_error(), + }; + + augment_final_docs(def); + + // Check for duplicate prefixes + let mut prefix_set = HashMap::new(); + let mut errors = def + .storages + .iter() + .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); + if let Some(mut final_error) = errors.next() { + errors.for_each(|error| final_error.combine(error)); + return final_error.into_compile_error() + } + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let pallet_ident = &def.pallet_struct.pallet; + + let entries_builder = def.storages.iter().map(|storage| { + let no_docs = vec![]; + let docs = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &storage.docs }; + + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + + let cfg_attrs = &storage.cfg_attrs; + + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + { + <#full_ident as #frame_support::storage::StorageEntryMetadataBuilder>::build_metadata( + #frame_support::__private::sp_std::vec![ + #( #docs, )* + ], + &mut entries, + ); + } + ) + }); + + let getters = def.storages.iter().map(|storage| { + if let Some(getter) = &storage.getter { + let completed_where_clause = + super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); + + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let type_impl_gen = &def.type_impl_generics(storage.attr_span); + let type_use_gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + + let cfg_attrs = &storage.cfg_attrs; + + // If the storage item is public, link it and otherwise just mention it. + // + // We can not just copy the docs from a non-public type as it may links to internal + // types which makes the compiler very unhappy :( + let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { + format!("An auto-generated getter for [`{}`].", storage.ident) + } else { + format!("An auto-generated getter for `{}`.", storage.ident) + }; + + match &storage.metadata { + Metadata::Value { value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter() -> #query { + < + #full_ident as #frame_support::storage::StorageValue<#value> + >::get() + } + } + ) + }, + Metadata::Map { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + < + #full_ident as #frame_support::storage::StorageMap<#key, #value> + >::get(k) + } + } + ) + }, + Metadata::CountedMap { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + // NOTE: we can't use any trait here because CountedStorageMap + // doesn't implement any. + <#full_ident>::get(k) + } + } + ) + }, + Metadata::DoubleMap { key1, key2, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k1: KArg1, k2: KArg2) -> #query where + KArg1: #frame_support::__private::codec::EncodeLike<#key1>, + KArg2: #frame_support::__private::codec::EncodeLike<#key2>, + { + < + #full_ident as + #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> + >::get(k1, k2) + } + } + ) + }, + Metadata::NMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + < + #full_ident as + #frame_support::storage::StorageNMap<#keygen, #value> + >::get(key) + } + } + ) + }, + Metadata::CountedNMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + // NOTE: we can't use any trait here because CountedStorageNMap + // doesn't implement any. + <#full_ident>::get(key) + } + } + ) + }, + } + } else { + Default::default() + } + }); + + let prefix_structs = def.storages.iter().map(|storage_def| { + let type_impl_gen = &def.type_impl_generics(storage_def.attr_span); + let type_use_gen = &def.type_use_generics(storage_def.attr_span); + let prefix_struct_ident = prefix_ident(storage_def); + let prefix_struct_vis = &storage_def.vis; + let prefix_struct_const = storage_def.prefix(); + let config_where_clause = &def.config.where_clause; + + let cfg_attrs = &storage_def.cfg_attrs; + + let maybe_counter = match storage_def.metadata { + Metadata::CountedMap { .. } => { + let counter_prefix_struct_ident = counter_prefix_ident(&storage_def.ident); + let counter_prefix_struct_const = counter_prefix(&prefix_struct_const); + let storage_prefix_hash = two128_str(&counter_prefix_struct_const); + quote::quote_spanned!(storage_def.attr_span => + #(#cfg_attrs)* + #[doc(hidden)] + #prefix_struct_vis struct #counter_prefix_struct_ident<#type_use_gen>( + core::marker::PhantomData<(#type_use_gen,)> + ); + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::traits::StorageInstance + for #counter_prefix_struct_ident<#type_use_gen> + #config_where_clause + { + fn pallet_prefix() -> &'static str { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name::>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + + fn pallet_prefix_hash() -> [u8; 16] { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name_hash::>() + .expect("No name_hash found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + + const STORAGE_PREFIX: &'static str = #counter_prefix_struct_const; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::storage::types::CountedStorageMapInstance + for #prefix_struct_ident<#type_use_gen> + #config_where_clause + { + type CounterPrefix = #counter_prefix_struct_ident<#type_use_gen>; + } + ) + }, + Metadata::CountedNMap { .. } => { + let counter_prefix_struct_ident = counter_prefix_ident(&storage_def.ident); + let counter_prefix_struct_const = counter_prefix(&prefix_struct_const); + let storage_prefix_hash = two128_str(&counter_prefix_struct_const); + quote::quote_spanned!(storage_def.attr_span => + #(#cfg_attrs)* + #[doc(hidden)] + #prefix_struct_vis struct #counter_prefix_struct_ident<#type_use_gen>( + core::marker::PhantomData<(#type_use_gen,)> + ); + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::traits::StorageInstance + for #counter_prefix_struct_ident<#type_use_gen> + #config_where_clause + { + fn pallet_prefix() -> &'static str { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name::>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + fn pallet_prefix_hash() -> [u8; 16] { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name_hash::>() + .expect("No name_hash found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + const STORAGE_PREFIX: &'static str = #counter_prefix_struct_const; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::storage::types::CountedStorageNMapInstance + for #prefix_struct_ident<#type_use_gen> + #config_where_clause + { + type CounterPrefix = #counter_prefix_struct_ident<#type_use_gen>; + } + ) + }, + _ => proc_macro2::TokenStream::default(), + }; + + let storage_prefix_hash = two128_str(&prefix_struct_const); + quote::quote_spanned!(storage_def.attr_span => + #maybe_counter + + #(#cfg_attrs)* + #[doc(hidden)] + #prefix_struct_vis struct #prefix_struct_ident<#type_use_gen>( + core::marker::PhantomData<(#type_use_gen,)> + ); + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::traits::StorageInstance + for #prefix_struct_ident<#type_use_gen> + #config_where_clause + { + fn pallet_prefix() -> &'static str { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name::>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + + fn pallet_prefix_hash() -> [u8; 16] { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name_hash::>() + .expect("No name_hash found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`.") + } + + const STORAGE_PREFIX: &'static str = #prefix_struct_const; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + ) + }); + + let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { + use crate::pallet::parse::GenericKind; + use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; + + let ResultOnEmptyStructMetadata { + name, + visibility, + value_ty, + error_path, + variant_name, + span, + } = metadata; + + let generic_kind = match error_path.segments.last() { + Some(PathSegment { arguments: PathArguments::AngleBracketed(args), .. }) => { + let (has_config, has_instance) = + args.args.iter().fold((false, false), |(has_config, has_instance), arg| { + match arg { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments, .. }, + .. + })) => { + let maybe_config = + segments.first().map_or(false, |seg| seg.ident == "T"); + let maybe_instance = + segments.first().map_or(false, |seg| seg.ident == "I"); + + (has_config || maybe_config, has_instance || maybe_instance) + }, + _ => (has_config, has_instance), + } + }); + GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) + }, + _ => GenericKind::None, + }; + let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); + let config_where_clause = &def.config.where_clause; + + quote::quote_spanned!(span => + #[doc(hidden)] + #[allow(non_camel_case_types)] + #visibility struct #name; + + impl<#type_impl_gen> #frame_support::traits::Get> + for #name + #config_where_clause + { + fn get() -> Result<#value_ty, #error_path> { + Err(<#error_path>::#variant_name) + } + } + ) + }); + + // aggregated where clause of all storage types and the whole pallet. + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + + let try_decode_entire_state = { + let mut storage_names = def + .storages + .iter() + .filter_map(|storage| { + // A little hacky; don't generate for cfg gated storages to not get compile errors + // when building "frame-feature-testing" gated storages in the "frame-support-test" + // crate. + if storage.try_decode && storage.cfg_attrs.is_empty() { + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) + } else { + None + } + }) + .collect::>(); + storage_names.sort_by_cached_key(|ident| ident.to_string()); + + quote::quote!( + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage + for #pallet_ident<#type_use_gen> #completed_where_clause + { + fn try_decode_entire_state() -> Result> { + let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> + ::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"); + + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); + + // NOTE: for now, we have to exclude storage items that are feature gated. + let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); + let mut decoded = 0usize; + + #( + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ + {pallet_name}::{}", stringify!(#storage_names)); + + match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { + Ok(count) => { + decoded += count; + }, + Err(err) => { + errors.extend(err); + }, + } + )* + + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) + } + } + } + ) + }; + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> + #completed_where_clause + { + #[doc(hidden)] + pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), + entries: { + #[allow(unused_mut)] + let mut entries = #frame_support::__private::sp_std::vec![]; + #( #entries_builder )* + entries + }, + } + } + } + + #( #getters )* + #( #prefix_structs )* + #( #on_empty_structs )* + + #try_decode_entire_state + ) +} diff --git a/support/procedural-fork/src/pallet/expand/tasks.rs b/support/procedural-fork/src/pallet/expand/tasks.rs new file mode 100644 index 000000000..6697e5c82 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/tasks.rs @@ -0,0 +1,267 @@ +//! Contains logic for expanding task-related items. + +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Home of the expansion code for the Tasks API + +use crate::pallet::{parse::tasks::*, Def}; +use derive_syn_parse::Parse; +use inflector::Inflector; +use proc_macro2::TokenStream as TokenStream2; +use quote::{format_ident, quote, ToTokens}; +use syn::{parse_quote, spanned::Spanned, ItemEnum, ItemImpl}; + +impl TaskEnumDef { + /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the + /// event they _don't_ specify one (which is actually the most common behavior) we have to + /// generate one based on the existing [`TasksDef`]. This method performs that generation. + pub fn generate( + tasks: &TasksDef, + type_decl_bounded_generics: TokenStream2, + type_use_generics: TokenStream2, + ) -> Self { + let variants = if tasks.tasks_attr.is_some() { + tasks + .tasks + .iter() + .map(|task| { + let ident = &task.item.sig.ident; + let ident = + format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); + + let args = task.item.sig.inputs.iter().collect::>(); + + if args.is_empty() { + quote!(#ident) + } else { + quote!(#ident { + #(#args),* + }) + } + }) + .collect::>() + } else { + Vec::new() + }; + let mut task_enum_def: TaskEnumDef = parse_quote! { + /// Auto-generated enum that encapsulates all tasks defined by this pallet. + /// + /// Conceptually similar to the [`Call`] enum, but for tasks. This is only + /// generated if there are tasks present in this pallet. + #[pallet::task_enum] + pub enum Task<#type_decl_bounded_generics> { + #( + #variants, + )* + } + }; + task_enum_def.type_use_generics = type_use_generics; + task_enum_def + } +} + +impl ToTokens for TaskEnumDef { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let item_enum = &self.item_enum; + let ident = &item_enum.ident; + let vis = &item_enum.vis; + let attrs = &item_enum.attrs; + let generics = &item_enum.generics; + let variants = &item_enum.variants; + let scrate = &self.scrate; + let type_use_generics = &self.type_use_generics; + if self.attr.is_some() { + // `item_enum` is short-hand / generated enum + tokens.extend(quote! { + #(#attrs)* + #[derive( + #scrate::CloneNoBound, + #scrate::EqNoBound, + #scrate::PartialEqNoBound, + #scrate::pallet_prelude::Encode, + #scrate::pallet_prelude::Decode, + #scrate::pallet_prelude::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_generics))] + #vis enum #ident #generics { + #variants + #[doc(hidden)] + #[codec(skip)] + __Ignore(core::marker::PhantomData, #scrate::Never), + } + + impl core::fmt::Debug for #ident<#type_use_generics> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct(stringify!(#ident)).field("value", self).finish() + } + } + }); + } else { + // `item_enum` is a manually specified enum (no attribute) + tokens.extend(item_enum.to_token_stream()); + } + } +} + +/// Represents an already-expanded [`TasksDef`]. +#[derive(Parse)] +pub struct ExpandedTasksDef { + pub task_item_impl: ItemImpl, + pub task_trait_impl: ItemImpl, +} + +impl ToTokens for TasksDef { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let scrate = &self.scrate; + let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); + let enum_arguments = &self.enum_arguments; + let enum_use = quote!(#enum_ident #enum_arguments); + + let task_fn_idents = self + .tasks + .iter() + .map(|task| { + format_ident!( + "{}", + &task.item.sig.ident.to_string().to_class_case(), + span = task.item.sig.ident.span() + ) + }) + .collect::>(); + let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); + let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); + let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); + let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); + + let task_fn_impls = self.tasks.iter().map(|task| { + let mut task_fn_impl = task.item.clone(); + task_fn_impl.attrs = vec![]; + task_fn_impl + }); + + let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); + let task_arg_names = self.tasks.iter().map(|task| &task.arg_names).collect::>(); + + let sp_std = quote!(#scrate::__private::sp_std); + let impl_generics = &self.item_impl.generics; + tokens.extend(quote! { + impl #impl_generics #enum_use + { + #(#task_fn_impls)* + } + + impl #impl_generics #scrate::traits::Task for #enum_use + { + type Enumeration = #sp_std::vec::IntoIter<#enum_use>; + + fn iter() -> Self::Enumeration { + let mut all_tasks = #sp_std::vec![]; + #(all_tasks + .extend(#task_iters.map(|(#(#task_arg_names),*)| #enum_ident::#task_fn_idents { #(#task_arg_names: #task_arg_names.clone()),* }) + .collect::<#sp_std::vec::Vec<_>>()); + )* + all_tasks.into_iter() + } + + fn task_index(&self) -> u32 { + match self.clone() { + #(#enum_ident::#task_fn_idents { .. } => #task_indices,)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + fn is_valid(&self) -> bool { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => (#task_conditions)(#(#task_arg_names),* ),)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + fn run(&self) -> Result<(), #scrate::pallet_prelude::DispatchError> { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => { + <#enum_use>::#task_fn_names(#( #task_arg_names, )* ) + },)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + #[allow(unused_variables)] + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => #task_weights,)* + Task::__Ignore(_, _) => unreachable!(), + } + } + } + }); + } +} + +/// Expands the [`TasksDef`] in the enclosing [`Def`], if present, and returns its tokens. +/// +/// This modifies the underlying [`Def`] in addition to returning any tokens that were added. +pub fn expand_tasks_impl(def: &mut Def) -> TokenStream2 { + let Some(tasks) = &mut def.tasks else { return quote!() }; + let ExpandedTasksDef { task_item_impl, task_trait_impl } = parse_quote!(#tasks); + quote! { + #task_item_impl + #task_trait_impl + } +} + +/// Represents a fully-expanded [`TaskEnumDef`]. +#[derive(Parse)] +pub struct ExpandedTaskEnum { + pub item_enum: ItemEnum, + pub debug_impl: ItemImpl, +} + +/// Modifies a [`Def`] to expand the underlying [`TaskEnumDef`] if present, and also returns +/// its tokens. A blank [`TokenStream2`] is returned if no [`TaskEnumDef`] has been generated +/// or defined. +pub fn expand_task_enum(def: &mut Def) -> TokenStream2 { + let Some(task_enum) = &mut def.task_enum else { return quote!() }; + let ExpandedTaskEnum { item_enum, debug_impl } = parse_quote!(#task_enum); + quote! { + #item_enum + #debug_impl + } +} + +/// Modifies a [`Def`] to expand the underlying [`TasksDef`] and also generate a +/// [`TaskEnumDef`] if applicable. The tokens for these items are returned if they are created. +pub fn expand_tasks(def: &mut Def) -> TokenStream2 { + if let Some(tasks_def) = &def.tasks { + if def.task_enum.is_none() { + def.task_enum = Some(TaskEnumDef::generate( + &tasks_def, + def.type_decl_bounded_generics(tasks_def.item_impl.span()), + def.type_use_generics(tasks_def.item_impl.span()), + )); + } + } + let tasks_extra_output = expand_tasks_impl(def); + let task_enum_extra_output = expand_task_enum(def); + quote! { + #tasks_extra_output + #task_enum_extra_output + } +} diff --git a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs new file mode 100644 index 000000000..99364aaa9 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs @@ -0,0 +1,216 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + pallet::{CompositeKeyword, Def}, + COUNTER, +}; +use syn::spanned::Spanned; + +/// Generate the `tt_default_parts` macro. +pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let default_parts_unique_id = + syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); + let extra_parts_unique_id = + syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); + let default_parts_unique_id_v2 = + syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); + + let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); + + let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); + + let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); + + let event_part = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Event #gen , ) + }); + + let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); + + let origin_part = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!( )); + quote::quote!( Origin #gen , ) + }); + + let config_part = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Config #gen , ) + }); + + let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); + + let validate_unsigned_part = + def.validate_unsigned.as_ref().map(|_| quote::quote!(ValidateUnsigned,)); + + let freeze_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(FreezeReason,)); + + let hold_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(HoldReason,)); + + let lock_id_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(LockId,)); + + let slash_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(SlashReason,)); + + let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); + + let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); + + let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); + + let event_part_v2 = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Event #gen) + }); + + let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); + + let origin_part_v2 = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!()); + quote::quote!(+ Origin #gen) + }); + + let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Config #gen) + }); + + let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); + + let validate_unsigned_part_v2 = + def.validate_unsigned.as_ref().map(|_| quote::quote!(+ ValidateUnsigned)); + + let freeze_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(+ FreezeReason)); + + let hold_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(+ HoldReason)); + + let lock_id_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(+ LockId)); + + let slash_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(+ SlashReason)); + + quote::quote!( + // This macro follows the conventions as laid out by the `tt-call` crate. It does not + // accept any arguments and simply returns the pallet parts, separated by commas, then + // wrapped inside of braces and finally prepended with double colons, to the caller inside + // of a key named `tokens`. + // + // We need to accept a path argument here, because this macro gets expanded on the + // crate that called the `construct_runtime!` macro, and the actual path is unknown. + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part + #inherent_part #validate_unsigned_part #freeze_reason_part #task_part + #hold_reason_part #lock_id_part #slash_reason_part + } + }] + } + }; + } + + pub use #default_parts_unique_id as tt_default_parts; + + + // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared + // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. + // + // For example, after expansion an explicit pallet would look like: + // `System: expanded::{Error} ::{Pallet, Call}`. + // + // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. + #[macro_export] + #[doc(hidden)] + macro_rules! #extra_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + #error_part + } + }] + } + }; + } + + pub use #extra_parts_unique_id as tt_extra_parts; + + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id_v2 { + { + $caller:tt + frame_support = [{ $($frame_support:ident)::* }] + } => { + $($frame_support)*::__private::tt_return! { + $caller + tokens = [{ + + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 + #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 + #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 + }] + } + }; + } + + pub use #default_parts_unique_id_v2 as tt_default_parts_v2; + ) +} diff --git a/support/procedural-fork/src/pallet/expand/type_value.rs b/support/procedural-fork/src/pallet/expand/type_value.rs new file mode 100644 index 000000000..5dc6309c0 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/type_value.rs @@ -0,0 +1,77 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::Def; + +/// +/// * Generate the struct +/// * implement the `Get<..>` on it +/// * Rename the name of the function to internal name +pub fn expand_type_values(def: &mut Def) -> proc_macro2::TokenStream { + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; + + for type_value in &def.type_values { + let fn_name_str = &type_value.ident.to_string(); + let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); + let fn_ident_renamed = syn::Ident::new( + &format!("__type_value_for_{}", fn_name_snakecase), + type_value.ident.span(), + ); + + let type_value_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; + if let syn::Item::Fn(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; + + // Rename the type_value function name + type_value_item.sig.ident = fn_ident_renamed.clone(); + + let vis = &type_value.vis; + let ident = &type_value.ident; + let type_ = &type_value.type_; + let where_clause = &type_value.where_clause; + + let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { + ( + def.type_impl_generics(type_value.attr_span), + def.type_use_generics(type_value.attr_span), + ) + } else { + (Default::default(), Default::default()) + }; + + let docs = &type_value.docs; + + expand.extend(quote::quote_spanned!(type_value.attr_span => + #( #[doc = #docs] )* + #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); + impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> + #where_clause + { + fn get() -> #type_ { + #fn_ident_renamed::<#struct_use_gen>() + } + } + )); + } + expand +} diff --git a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs new file mode 100644 index 000000000..876995585 --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs @@ -0,0 +1,56 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{pallet::Def, COUNTER}; +use proc_macro2::TokenStream; +use quote::quote; +use syn::{spanned::Spanned, Ident}; + +pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = + Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); + + let maybe_compile_error = if def.validate_unsigned.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ + remove `ValidateUnsigned` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; + + quote! { + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } + + #[doc(hidden)] + pub use #macro_ident as is_validate_unsigned_part_defined; + } + } +} diff --git a/support/procedural-fork/src/pallet/expand/warnings.rs b/support/procedural-fork/src/pallet/expand/warnings.rs new file mode 100644 index 000000000..030e3ddaf --- /dev/null +++ b/support/procedural-fork/src/pallet/expand/warnings.rs @@ -0,0 +1,98 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Generates warnings for undesirable pallet code. + +use crate::pallet::parse::call::{CallVariantDef, CallWeightDef}; +use proc_macro_warning::Warning; +use syn::{ + spanned::Spanned, + visit::{self, Visit}, +}; + +/// Warn if any of the call arguments starts with a underscore and is used in a weight formula. +pub(crate) fn weight_witness_warning( + method: &CallVariantDef, + dev_mode: bool, + warnings: &mut Vec, +) { + if dev_mode { + return + } + let CallWeightDef::Immediate(w) = &method.weight else { return }; + + let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") + .old("not check weight witness data") + .new("ensure that all witness data for weight calculation is checked before usage") + .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); + + for (_, arg_ident, _) in method.args.iter() { + if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { + continue + } + + let warning = partial_warning + .clone() + .index(warnings.len()) + .span(arg_ident.span()) + .build_or_panic(); + + warnings.push(warning); + } +} + +/// Warn if the weight is a constant and the pallet not in `dev_mode`. +pub(crate) fn weight_constant_warning( + weight: &syn::Expr, + dev_mode: bool, + warnings: &mut Vec, +) { + if dev_mode { + return + } + let syn::Expr::Lit(lit) = weight else { return }; + + let warning = Warning::new_deprecated("ConstantWeight") + .index(warnings.len()) + .old("use hard-coded constant as call weight") + .new("benchmark all calls or put the pallet into `dev` mode") + .help_link("https://github.com/paritytech/substrate/pull/13798") + .span(lit.span()) + .build_or_panic(); + + warnings.push(warning); +} + +/// Returns whether `expr` contains `ident`. +fn contains_ident(mut expr: syn::Expr, ident: &syn::Ident) -> bool { + struct ContainsIdent { + ident: syn::Ident, + found: bool, + } + + impl<'a> Visit<'a> for ContainsIdent { + fn visit_ident(&mut self, i: &syn::Ident) { + if *i == self.ident { + self.found = true; + } + } + } + + let mut visitor = ContainsIdent { ident: ident.clone(), found: false }; + visit::visit_expr(&mut visitor, &mut expr); + visitor.found +} diff --git a/support/procedural-fork/src/pallet/mod.rs b/support/procedural-fork/src/pallet/mod.rs new file mode 100644 index 000000000..42d8272fb --- /dev/null +++ b/support/procedural-fork/src/pallet/mod.rs @@ -0,0 +1,61 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation for pallet attribute macro. +//! +//! General workflow: +//! 1 - parse all pallet attributes: +//! This step remove all attributes `#[pallet::*]` from the ItemMod and build the `Def` struct +//! which holds the ItemMod without `#[pallet::*]` and information given by those attributes +//! 2 - expand from the parsed information +//! This step will modify the ItemMod by adding some derive attributes or phantom data variants +//! to user defined types. And also crate new types and implement block. + +mod expand; +pub(crate) mod parse; + +pub use parse::{composite::keyword::CompositeKeyword, Def}; +use syn::spanned::Spanned; + +mod keyword { + syn::custom_keyword!(dev_mode); +} + +pub fn pallet( + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + let mut dev_mode = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + dev_mode = true; + } else { + let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ + bare, such as `#[frame_support::pallet]` or `#[pallet]`, or must specify the \ + `dev_mode` attribute, such as `#[frame_support::pallet(dev_mode)]` or \ + #[pallet(dev_mode)]."; + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into() + } + } + + let item = syn::parse_macro_input!(item as syn::ItemMod); + match parse::Def::try_from(item, dev_mode) { + Ok(def) => expand::expand(def).into(), + Err(e) => e.to_compile_error().into(), + } +} diff --git a/support/procedural-fork/src/pallet/parse/call.rs b/support/procedural-fork/src/pallet/parse/call.rs new file mode 100644 index 000000000..4e09b86fd --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/call.rs @@ -0,0 +1,467 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::{helper, InheritedCallWeightAttr}; +use frame_support_procedural_tools::get_doc_literals; +use proc_macro2::Span; +use quote::ToTokens; +use std::collections::HashMap; +use syn::{spanned::Spanned, ExprClosure}; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(RuntimeOrigin); + syn::custom_keyword!(weight); + syn::custom_keyword!(call_index); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(feeless_if); +} + +/// Definition of dispatchables typically `impl Pallet { ... }` +pub struct CallDef { + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// Information on methods (used for expansion). + pub methods: Vec, + /// The span of the pallet::call attribute. + pub attr_span: proc_macro2::Span, + /// Docs, specified on the impl Block. + pub docs: Vec, + /// The optional `weight` attribute on the `pallet::call`. + pub inherited_call_weight: Option, +} + +/// The weight of a call. +#[derive(Clone)] +pub enum CallWeightDef { + /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. + Immediate(syn::Expr), + + /// The default value that should be set for dev-mode pallets. Usually zero. + DevModeDefault, + + /// Inherits whatever value is configured on the pallet level. + /// + /// The concrete value is not known at this point. + Inherited, +} + +/// Definition of dispatchable typically: `#[weight...] fn foo(origin .., param1: ...) -> ..` +#[derive(Clone)] +pub struct CallVariantDef { + /// Function name. + pub name: syn::Ident, + /// Information on args: `(is_compact, name, type)` + pub args: Vec<(bool, syn::Ident, Box)>, + /// Weight for the call. + pub weight: CallWeightDef, + /// Call index of the dispatchable. + pub call_index: u8, + /// Whether an explicit call index was specified. + pub explicit_call_index: bool, + /// Docs, used for metadata. + pub docs: Vec, + /// Attributes annotated at the top of the dispatchable function. + pub attrs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// The optional `feeless_if` attribute on the `pallet::call`. + pub feeless_check: Option, +} + +/// Attributes for functions in call impl block. +pub enum FunctionAttr { + /// Parse for `#[pallet::call_index(expr)]` + CallIndex(u8), + /// Parse for `#[pallet::weight(expr)]` + Weight(syn::Expr), + /// Parse for `#[pallet::feeless_if(expr)]` + FeelessIf(Span, syn::ExprClosure), +} + +impl syn::parse::Parse for FunctionAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::weight) { + content.parse::()?; + let weight_content; + syn::parenthesized!(weight_content in content); + Ok(FunctionAttr::Weight(weight_content.parse::()?)) + } else if lookahead.peek(keyword::call_index) { + content.parse::()?; + let call_index_content; + syn::parenthesized!(call_index_content in content); + let index = call_index_content.parse::()?; + if !index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(index.span(), msg)) + } + Ok(FunctionAttr::CallIndex(index.base10_parse()?)) + } else if lookahead.peek(keyword::feeless_if) { + content.parse::()?; + let closure_content; + syn::parenthesized!(closure_content in content); + Ok(FunctionAttr::FeelessIf( + closure_content.span(), + closure_content.parse::().map_err(|e| { + let msg = "Invalid feeless_if attribute: expected a closure"; + let mut err = syn::Error::new(closure_content.span(), msg); + err.combine(e); + err + })?, + )) + } else { + Err(lookahead.error()) + } + } +} + +/// Attribute for arguments in function in call impl block. +/// Parse for `#[pallet::compact]| +pub struct ArgAttrIsCompact; + +impl syn::parse::Parse for ArgAttrIsCompact { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(ArgAttrIsCompact) + } +} + +/// Check the syntax is `OriginFor`, `&OriginFor` or `T::RuntimeOrigin`. +pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::Result<()> { + pub struct CheckOriginFor(bool); + impl syn::parse::Parse for CheckOriginFor { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let is_ref = input.parse::().is_ok(); + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(is_ref)) + } + } + + pub struct CheckRuntimeOrigin; + impl syn::parse::Parse for CheckRuntimeOrigin { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + let result_origin_for = syn::parse2::(ty.to_token_stream()); + let result_runtime_origin = syn::parse2::(ty.to_token_stream()); + return match (result_origin_for, result_runtime_origin) { + (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), + (_, Ok(_)) => Ok(()), + (_, _) => { + let msg = if is_ref { + "Invalid type: expected `&OriginFor`" + } else { + "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" + }; + return Err(syn::Error::new(ty.span(), msg)) + }, + } +} + +impl CallDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + inherited_call_weight: Option, + ) -> syn::Result { + let item_impl = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) + }; + + let instances = vec![ + helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, + helper::check_pallet_struct_usage(&item_impl.self_ty)?, + ]; + + if let Some((_, _, for_)) = item_impl.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ + `impl<..> Pallet<..> { .. }`"; + return Err(syn::Error::new(for_.span(), msg)) + } + + let mut methods = vec![]; + let mut indices = HashMap::new(); + let mut last_index: Option = None; + for item in &mut item_impl.items { + if let syn::ImplItem::Fn(method) = item { + if !matches!(method.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::call, dispatchable function must be public: \ + `pub fn`"; + + let span = match method.vis { + syn::Visibility::Inherited => method.sig.span(), + _ => method.vis.span(), + }; + + return Err(syn::Error::new(span, msg)) + } + + match method.sig.inputs.first() { + None => { + let msg = "Invalid pallet::call, must have at least origin arg"; + return Err(syn::Error::new(method.sig.span(), msg)) + }, + Some(syn::FnArg::Receiver(_)) => { + let msg = "Invalid pallet::call, first argument must be a typed argument, \ + e.g. `origin: OriginFor`"; + return Err(syn::Error::new(method.sig.span(), msg)) + }, + Some(syn::FnArg::Typed(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, false)?; + }, + } + + if let syn::ReturnType::Type(_, type_) = &method.sig.output { + helper::check_pallet_call_return_type(type_)?; + } else { + let msg = "Invalid pallet::call, require return type \ + DispatchResultWithPostInfo"; + return Err(syn::Error::new(method.sig.span(), msg)) + } + + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); + let mut call_idx_attrs = vec![]; + let mut weight_attrs = vec![]; + let mut feeless_attrs = vec![]; + for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { + match attr { + FunctionAttr::CallIndex(_) => { + call_idx_attrs.push(attr); + }, + FunctionAttr::Weight(_) => { + weight_attrs.push(attr); + }, + FunctionAttr::FeelessIf(span, _) => { + feeless_attrs.push((span, attr)); + }, + } + } + + if weight_attrs.is_empty() && dev_mode { + // inject a default O(1) weight when dev mode is enabled and no weight has + // been specified on the call + let empty_weight: syn::Expr = syn::parse_quote!(0); + weight_attrs.push(FunctionAttr::Weight(empty_weight)); + } + + let weight = match weight_attrs.len() { + 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, + 0 if dev_mode => CallWeightDef::DevModeDefault, + 0 => return Err(syn::Error::new( + method.sig.span(), + "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an + inherited weight from the `#[pallet:call(weight($type))]` attribute, but + none were given.", + )), + 1 => match weight_attrs.pop().unwrap() { + FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), + _ => unreachable!("checked during creation of the let binding"), + }, + _ => { + let msg = "Invalid pallet::call, too many weight attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)) + }, + }; + + if call_idx_attrs.len() > 1 { + let msg = "Invalid pallet::call, too many call_index attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)) + } + let call_index = call_idx_attrs.pop().map(|attr| match attr { + FunctionAttr::CallIndex(idx) => idx, + _ => unreachable!("checked during creation of the let binding"), + }); + let explicit_call_index = call_index.is_some(); + + let final_index = match call_index { + Some(i) => i, + None => + last_index.map_or(Some(0), |idx| idx.checked_add(1)).ok_or_else(|| { + let msg = "Call index doesn't fit into u8, index is 256"; + syn::Error::new(method.sig.span(), msg) + })?, + }; + last_index = Some(final_index); + + if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { + let msg = format!( + "Call indices are conflicting: Both functions {} and {} are at index {}", + used_fn, method.sig.ident, final_index, + ); + let mut err = syn::Error::new(used_fn.span(), &msg); + err.combine(syn::Error::new(method.sig.ident.span(), msg)); + return Err(err) + } + + let mut args = vec![]; + for arg in method.sig.inputs.iter_mut().skip(1) { + let arg = if let syn::FnArg::Typed(arg) = arg { + arg + } else { + unreachable!("Only first argument can be receiver"); + }; + + let arg_attrs: Vec = + helper::take_item_pallet_attrs(&mut arg.attrs)?; + + if arg_attrs.len() > 1 { + let msg = "Invalid pallet::call, argument has too many attributes"; + return Err(syn::Error::new(arg.span(), msg)) + } + + let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { + pat.ident.clone() + } else { + let msg = "Invalid pallet::call, argument must be ident"; + return Err(syn::Error::new(arg.pat.span(), msg)) + }; + + args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); + } + + let docs = get_doc_literals(&method.attrs); + + if feeless_attrs.len() > 1 { + let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; + return Err(syn::Error::new(feeless_attrs[1].0, msg)) + } + let feeless_check: Option = + feeless_attrs.pop().map(|(_, attr)| match attr { + FunctionAttr::FeelessIf(_, closure) => closure, + _ => unreachable!("checked during creation of the let binding"), + }); + + if let Some(ref feeless_check) = feeless_check { + if feeless_check.inputs.len() != args.len() + 1 { + let msg = "Invalid pallet::call, feeless_if closure must have same \ + number of arguments as the dispatchable function"; + return Err(syn::Error::new(feeless_check.span(), msg)) + } + + match feeless_check.inputs.first() { + None => { + let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; + return Err(syn::Error::new(feeless_check.span(), msg)) + }, + Some(syn::Pat::Type(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, true)?; + }, + _ => { + let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ + e.g. `origin: OriginFor`"; + return Err(syn::Error::new(feeless_check.span(), msg)) + }, + } + + for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { + let feeless_arg_type = + if let syn::Pat::Type(syn::PatType { ty, .. }) = feeless_arg.clone() { + if let syn::Type::Reference(pat) = *ty { + pat.elem.clone() + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; + return Err(syn::Error::new(ty.span(), msg)) + } + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; + return Err(syn::Error::new(feeless_arg.span(), msg)) + }; + + if feeless_arg_type != arg.2 { + let msg = + "Invalid pallet::call, feeless_if closure argument must have \ + a reference to the same type as the dispatchable function argument"; + return Err(syn::Error::new(feeless_arg.span(), msg)) + } + } + + let valid_return = match &feeless_check.output { + syn::ReturnType::Type(_, type_) => match *(type_.clone()) { + syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), + _ => false, + }, + _ => false, + }; + if !valid_return { + let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; + return Err(syn::Error::new(feeless_check.output.span(), msg)) + } + } + + methods.push(CallVariantDef { + name: method.sig.ident.clone(), + weight, + call_index: final_index, + explicit_call_index, + args, + docs, + attrs: method.attrs.clone(), + cfg_attrs, + feeless_check, + }); + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(item.span(), msg)) + } + } + + Ok(Self { + index, + attr_span, + instances, + methods, + where_clause: item_impl.generics.where_clause.clone(), + docs: get_doc_literals(&item_impl.attrs), + inherited_call_weight, + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/composite.rs b/support/procedural-fork/src/pallet/parse/composite.rs new file mode 100644 index 000000000..c3ac74846 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/composite.rs @@ -0,0 +1,191 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use quote::ToTokens; +use syn::spanned::Spanned; + +pub mod keyword { + use super::*; + + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(Task); + + pub enum CompositeKeyword { + FreezeReason(FreezeReason), + HoldReason(HoldReason), + LockId(LockId), + SlashReason(SlashReason), + Task(Task), + } + + impl ToTokens for CompositeKeyword { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + use CompositeKeyword::*; + match self { + FreezeReason(inner) => inner.to_tokens(tokens), + HoldReason(inner) => inner.to_tokens(tokens), + LockId(inner) => inner.to_tokens(tokens), + SlashReason(inner) => inner.to_tokens(tokens), + Task(inner) => inner.to_tokens(tokens), + } + } + } + + impl syn::parse::Parse for CompositeKeyword { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else if lookahead.peek(Task) { + Ok(Self::Task(input.parse()?)) + } else { + Err(lookahead.error()) + } + } + } + + impl std::fmt::Display for CompositeKeyword { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CompositeKeyword::*; + write!( + f, + "{}", + match self { + FreezeReason(_) => "FreezeReason", + HoldReason(_) => "HoldReason", + Task(_) => "Task", + LockId(_) => "LockId", + SlashReason(_) => "SlashReason", + } + ) + } + } +} + +pub struct CompositeDef { + /// The index of the CompositeDef item in the pallet module. + pub index: usize, + /// The composite keyword used (contains span). + pub composite_keyword: keyword::CompositeKeyword, + /// Name of the associated type. + pub ident: syn::Ident, + /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. + pub generics: syn::Generics, + /// The span of the pallet::composite_enum attribute. + pub attr_span: proc_macro2::Span, + /// Variant count of the pallet::composite_enum. + pub variant_count: u32, +} + +impl CompositeDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + scrate: &syn::Path, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + // check variants: composite enums support only field-less enum variants. This is + // because fields can introduce too many possibilities, making it challenging to compute + // a fixed variant count. + for variant in &item.variants { + match variant.fields { + syn::Fields::Named(_) | syn::Fields::Unnamed(_) => + return Err(syn::Error::new( + variant.ident.span(), + "The composite enum does not support variants with fields!", + )), + syn::Fields::Unit => (), + } + } + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::composite_enum, expected enum item", + )) + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = format!("Invalid pallet::composite_enum, `{}` must be public", item.ident); + return Err(syn::Error::new(item.span(), msg)) + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_derive_attr = item.attrs.iter().any(|attr| { + if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { + path.get_ident().map(|ident| ident == "derive").unwrap_or(false) + } else { + false + } + }); + + if !has_derive_attr { + let derive_attr: syn::Attribute = syn::parse_quote! { + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + }; + item.attrs.push(derive_attr); + } + + if has_instance { + item.attrs.push(syn::parse_quote! { + #[scale_info(skip_type_params(I))] + }); + + item.variants.push(syn::parse_quote! { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData, + ) + }); + } + + let composite_keyword = + syn::parse2::(item.ident.to_token_stream())?; + + Ok(CompositeDef { + index, + composite_keyword, + attr_span, + generics: item.generics.clone(), + variant_count: item.variants.len() as u32, + ident: item.ident.clone(), + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs new file mode 100644 index 000000000..fbab92db1 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -0,0 +1,590 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use frame_support_procedural_tools::{get_doc_literals, is_using_frame_crate}; +use quote::ToTokens; +use syn::{spanned::Spanned, token, Token}; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(Config); + syn::custom_keyword!(From); + syn::custom_keyword!(T); + syn::custom_keyword!(I); + syn::custom_keyword!(config); + syn::custom_keyword!(pallet); + syn::custom_keyword!(IsType); + syn::custom_keyword!(RuntimeEvent); + syn::custom_keyword!(Event); + syn::custom_keyword!(frame_system); + syn::custom_keyword!(disable_frame_system_supertrait_check); + syn::custom_keyword!(no_default); + syn::custom_keyword!(no_default_bounds); + syn::custom_keyword!(constant); +} + +#[derive(Default)] +pub struct DefaultTrait { + /// A bool for each sub-trait item indicates whether the item has + /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds + /// in the generated default sub-trait. + pub items: Vec<(syn::TraitItem, bool)>, + pub has_system: bool, +} + +/// Input definition for the pallet config. +pub struct ConfigDef { + /// The index of item in pallet module. + pub index: usize, + /// Whether the trait has instance (i.e. define with `Config`) + pub has_instance: bool, + /// Const associated type. + pub consts_metadata: Vec, + /// Whether the trait has the associated type `Event`, note that those bounds are + /// checked: + /// * `IsType::RuntimeEvent` + /// * `From` or `From>` or `From>` + pub has_event_type: bool, + /// The where clause on trait definition but modified so `Self` is `T`. + pub where_clause: Option, + /// The span of the pallet::config attribute. + pub attr_span: proc_macro2::Span, + /// Whether a default sub-trait should be generated. + /// + /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). + /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are + /// no trait items. + pub default_sub_trait: Option, +} + +/// Input definition for a constant in pallet config. +pub struct ConstMetadataDef { + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, +} + +impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { + type Error = syn::Error; + + fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let err = |span, msg| { + syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) + }; + let doc = get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty + .bounds + .iter() + .find_map(|b| { + if let syn::TypeParamBound::Trait(tb) = b { + tb.path + .segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) + } else { + None + } + }) + .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(err(ab.args[0].span(), "Expected a type argument")) + } + } else { + Err(err(bound.span(), "Expected a single type argument")) + } + } else { + Err(err(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); + + Ok(Self { ident, type_, doc }) + } +} + +/// Parse for `#[pallet::disable_frame_system_supertrait_check]` +pub struct DisableFrameSystemSupertraitCheck; + +impl syn::parse::Parse for DisableFrameSystemSupertraitCheck { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(Self) + } +} + +/// Parsing for the `typ` portion of `PalletAttr` +#[derive(derive_syn_parse::Parse, PartialEq, Eq)] +pub enum PalletAttrType { + #[peek(keyword::no_default, name = "no_default")] + NoDefault(keyword::no_default), + #[peek(keyword::no_default_bounds, name = "no_default_bounds")] + NoBounds(keyword::no_default_bounds), + #[peek(keyword::constant, name = "constant")] + Constant(keyword::constant), +} + +/// Parsing for `#[pallet::X]` +#[derive(derive_syn_parse::Parse)] +pub struct PalletAttr { + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + _pallet: keyword::pallet, + #[prefix(Token![::] in _bracket)] + #[inside(_bracket)] + typ: PalletAttrType, +} + +/// Parse for `IsType<::RuntimeEvent>` and retrieve `$path` +pub struct IsTypeBoundEventParse(syn::Path); + +impl syn::parse::Parse for IsTypeBoundEventParse { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + let config_path = input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(config_path)) + } +} + +/// Parse for `From` or `From>` or `From>` +pub struct FromEventParse { + is_generic: bool, + has_instance: bool, +} + +impl syn::parse::Parse for FromEventParse { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut is_generic = false; + let mut has_instance = false; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + is_generic = true; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + input.parse::()?; + input.parse::()?; + has_instance = true; + } + input.parse::]>()?; + } + input.parse::]>()?; + + Ok(Self { is_generic, has_instance }) + } +} + +/// Check if trait_item is `type RuntimeEvent`, if so checks its bounds are those expected. +/// (Event type is reserved type) +fn check_event_type( + frame_system: &syn::Path, + trait_item: &syn::TraitItem, + trait_has_instance: bool, +) -> syn::Result { + if let syn::TraitItem::Type(type_) = trait_item { + if type_.ident == "RuntimeEvent" { + // Check event has no generics + if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ + no generics nor where_clause"; + return Err(syn::Error::new(trait_item.span(), msg)) + } + + // Check bound contains IsType and From + let has_is_type_bound = type_.bounds.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b.0, frame_system)) + }); + + if !has_is_type_bound { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + bound: `IsType<::RuntimeEvent>`".to_string(); + return Err(syn::Error::new(type_.span(), msg)) + } + + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); + + let from_event_bound = if let Some(b) = from_event_bound { + b + } else { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + bound: `From` or `From>` or `From>`"; + return Err(syn::Error::new(type_.span(), msg)) + }; + + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) + { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ + `From`. Config and generic Event must be both with instance or \ + without instance"; + return Err(syn::Error::new(type_.span(), msg)) + } + + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } +} + +/// Check that the path to `frame_system::Config` is valid, this is that the path is just +/// `frame_system::Config` or when using the `frame` crate it is `frame::xyz::frame_system::Config`. +fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool { + // Check if `frame_system` is actually 'frame_system'. + if path.segments.iter().all(|s| s.ident != "frame_system") { + return false + } + + let mut expected_system_config = + match (is_using_frame_crate(&path), is_using_frame_crate(&frame_system)) { + (true, false) => + // We can't use the path to `frame_system` from `frame` if `frame_system` is not being + // in scope through `frame`. + return false, + (false, true) => + // We know that the only valid frame_system path is one that is `frame_system`, as + // `frame` re-exports it as such. + syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed"), + (_, _) => + // They are either both `frame_system` or both `frame::xyz::frame_system`. + frame_system.clone(), + }; + + expected_system_config + .segments + .push(syn::PathSegment::from(syn::Ident::new("Config", path.span()))); + + // the parse path might be something like `frame_system::Config<...>`, so we + // only compare the idents along the path. + expected_system_config + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() == + path.segments.into_iter().map(|ps| ps.ident).collect::>() +} + +/// Replace ident `Self` by `T` +pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream { + input + .into_iter() + .map(|token_tree| match token_tree { + proc_macro2::TokenTree::Group(group) => + proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into(), + proc_macro2::TokenTree::Ident(ident) if ident == "Self" => + proc_macro2::Ident::new("T", ident.span()).into(), + other => other, + }) + .collect() +} + +impl ConfigDef { + pub fn try_from( + frame_system: &syn::Path, + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + enable_default: bool, + ) -> syn::Result { + let item = if let syn::Item::Trait(item) = item { + item + } else { + let msg = "Invalid pallet::config, expected trait definition"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::config, trait must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + syn::parse2::(item.ident.to_token_stream())?; + + let where_clause = { + let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); + syn::parse2::>(stream).expect( + "Internal error: replacing `Self` by `T` should result in valid where + clause", + ) + }; + + if item.generics.params.len() > 1 { + let msg = "Invalid pallet::config, expected no more than one generic"; + return Err(syn::Error::new(item.generics.params[2].span(), msg)) + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_frame_system_supertrait = item.supertraits.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b, frame_system)) + }); + + let mut has_event_type = false; + let mut consts_metadata = vec![]; + let mut default_sub_trait = if enable_default { + Some(DefaultTrait { + items: Default::default(), + has_system: has_frame_system_supertrait, + }) + } else { + None + }; + for trait_item in &mut item.items { + let is_event = check_event_type(frame_system, trait_item, has_instance)?; + has_event_type = has_event_type || is_event; + + let mut already_no_default = false; + let mut already_constant = false; + let mut already_no_default_bounds = false; + + while let Ok(Some(pallet_attr)) = + helper::take_first_item_pallet_attr::(trait_item) + { + match (pallet_attr.typ, &trait_item) { + (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { + if already_constant { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::constant] attribute not allowed.", + )) + } + already_constant = true; + consts_metadata.push(ConstMetadataDef::try_from(typ)?); + }, + (PalletAttrType::Constant(_), _) => + return Err(syn::Error::new( + trait_item.span(), + "Invalid #[pallet::constant] in #[pallet::config], expected type item", + )), + (PalletAttrType::NoDefault(_), _) => { + if !enable_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "`#[pallet:no_default]` can only be used if `#[pallet::config(with_default)]` \ + has been specified" + )) + } + if already_no_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default] attribute not allowed.", + )) + } + + already_no_default = true; + }, + (PalletAttrType::NoBounds(_), _) => { + if !enable_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "`#[pallet:no_default_bounds]` can only be used if `#[pallet::config(with_default)]` \ + has been specified" + )) + } + if already_no_default_bounds { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default_bounds] attribute not allowed.", + )) + } + already_no_default_bounds = true; + }, + } + } + + if !already_no_default && enable_default { + default_sub_trait + .as_mut() + .expect("is 'Some(_)' if 'enable_default'; qed") + .items + .push((trait_item.clone(), already_no_default_bounds)); + } + } + + let attr: Option = + helper::take_first_item_pallet_attr(&mut item.attrs)?; + let disable_system_supertrait_check = attr.is_some(); + + if !has_frame_system_supertrait && !disable_system_supertrait_check { + let found = if item.supertraits.is_empty() { + "none".to_string() + } else { + let mut found = item + .supertraits + .iter() + .fold(String::new(), |acc, s| format!("{}`{}`, ", acc, quote::quote!(#s))); + found.pop(); + found.pop(); + found + }; + + let msg = format!( + "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ + found {}. \ + (try `pub trait Config: frame_system::Config {{ ...` or \ + `pub trait Config: frame_system::Config {{ ...`). \ + To disable this check, use `#[pallet::disable_frame_system_supertrait_check]`", + frame_system.to_token_stream(), + found, + ); + return Err(syn::Error::new(item.span(), msg)) + } + + Ok(Self { + index, + has_instance, + consts_metadata, + has_event_type, + where_clause, + attr_span, + default_sub_trait, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn has_expected_system_config_works() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_assoc_type() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame_system::Config)) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_other_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_frame_system() { + let frame_system = + syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_path() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_not_frame_system() { + let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); + let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } +} diff --git a/support/procedural-fork/src/pallet/parse/error.rs b/support/procedural-fork/src/pallet/parse/error.rs new file mode 100644 index 000000000..362df8d73 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/error.rs @@ -0,0 +1,115 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use frame_support_procedural_tools::get_doc_literals; +use quote::ToTokens; +use syn::{spanned::Spanned, Fields}; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(Error); +} + +/// Records information about the error enum variant field. +pub struct VariantField { + /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. + pub is_named: bool, +} + +/// Records information about the error enum variants. +pub struct VariantDef { + /// The variant ident. + pub ident: syn::Ident, + /// The variant field, if any. + pub field: Option, + /// The variant doc literals. + pub docs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, +} + +/// This checks error declaration as a enum declaration with only variants without fields nor +/// discriminant. +pub struct ErrorDef { + /// The index of error item in pallet module. + pub index: usize, + /// Variant definitions. + pub variants: Vec, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The keyword error used (contains span). + pub error: keyword::Error, + /// The span of the pallet::error attribute. + pub attr_span: proc_macro2::Span, +} + +impl ErrorDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")) + }; + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::error, `Error` must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + let instances = + vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; + + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; + return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)) + } + + let error = syn::parse2::(item.ident.to_token_stream())?; + + let variants = item + .variants + .iter() + .map(|variant| { + let field_ty = match &variant.fields { + Fields::Unit => None, + Fields::Named(_) => Some(VariantField { is_named: true }), + Fields::Unnamed(_) => Some(VariantField { is_named: false }), + }; + if variant.discriminant.is_some() { + let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ + are not supported"; + let span = variant.discriminant.as_ref().unwrap().0.span(); + return Err(syn::Error::new(span, msg)) + } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); + + Ok(VariantDef { + ident: variant.ident.clone(), + field: field_ty, + docs: get_doc_literals(&variant.attrs), + cfg_attrs, + }) + }) + .collect::>()?; + + Ok(ErrorDef { attr_span, index, variants, instances, error }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/event.rs b/support/procedural-fork/src/pallet/parse/event.rs new file mode 100644 index 000000000..0fb8ee4f5 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/event.rs @@ -0,0 +1,141 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use quote::ToTokens; +use syn::spanned::Spanned; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(Event); + syn::custom_keyword!(pallet); + syn::custom_keyword!(generate_deposit); + syn::custom_keyword!(deposit_event); +} + +/// Definition for pallet event enum. +pub struct EventDef { + /// The index of event item in pallet module. + pub index: usize, + /// The keyword Event used (contains span). + pub event: keyword::Event, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The kind of generic the type `Event` has. + pub gen_kind: super::GenericKind, + /// Whether the function `deposit_event` must be generated. + pub deposit_event: Option, + /// Where clause used in event definition. + pub where_clause: Option, + /// The span of the pallet::event attribute. + pub attr_span: proc_macro2::Span, +} + +/// Attribute for a pallet's Event. +/// +/// Syntax is: +/// * `#[pallet::generate_deposit($vis fn deposit_event)]` +pub struct PalletEventDepositAttr { + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, +} + +impl syn::parse::Parse for PalletEventDepositAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); + + Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) + } +} + +struct PalletEventAttrInfo { + deposit_event: Option, +} + +impl PalletEventAttrInfo { + fn from_attrs(attrs: Vec) -> syn::Result { + let mut deposit_event = None; + for attr in attrs { + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")) + } + } + + Ok(PalletEventAttrInfo { deposit_event }) + } +} + +impl EventDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected enum item")) + }; + + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; + let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; + let deposit_event = attr_info.deposit_event; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::event, `Event` must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + let where_clause = item.generics.where_clause.clone(); + + let mut instances = vec![]; + // NOTE: Event is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { + instances.push(u); + } else { + // construct_runtime only allow non generic event for non instantiable pallet. + instances.push(helper::InstanceUsage { has_instance: false, span: item.ident.span() }) + } + + let has_instance = item.generics.type_params().any(|t| t.ident == "I"); + let has_config = item.generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); + + let event = syn::parse2::(item.ident.to_token_stream())?; + + Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/extra_constants.rs b/support/procedural-fork/src/pallet/parse/extra_constants.rs new file mode 100644 index 000000000..2ba6c44b7 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/extra_constants.rs @@ -0,0 +1,160 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use frame_support_procedural_tools::get_doc_literals; +use syn::spanned::Spanned; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(weight); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(constant_name); +} + +/// Definition of extra constants typically `impl Pallet { ... }` +pub struct ExtraConstantsDef { + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// The extra constant defined. + pub extra_constants: Vec, +} + +/// Input definition for an constant in pallet. +pub struct ExtraConstantDef { + /// Name of the function + pub ident: syn::Ident, + /// The type returned by the function + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// Optional MetaData Name + pub metadata_name: Option, +} + +/// Attributes for functions in extra_constants impl block. +/// Parse for `#[pallet::constant_name(ConstantName)]` +pub struct ExtraConstAttr { + metadata_name: syn::Ident, +} + +impl syn::parse::Parse for ExtraConstAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + content.parse::()?; + + let metadata_name; + syn::parenthesized!(metadata_name in content); + Ok(ExtraConstAttr { metadata_name: metadata_name.parse::()? }) + } +} + +impl ExtraConstantsDef { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::extra_constants, expected item impl", + )) + }; + + let instances = vec![ + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + helper::check_pallet_struct_usage(&item.self_ty)?, + ]; + + if let Some((_, _, for_)) = item.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ + `impl<..> Pallet<..> { .. }`"; + return Err(syn::Error::new(for_.span(), msg)) + } + + let mut extra_constants = vec![]; + for impl_item in &mut item.items { + let method = if let syn::ImplItem::Fn(method) = impl_item { + method + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(impl_item.span(), msg)) + }; + + if !method.sig.inputs.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 args"; + return Err(syn::Error::new(method.sig.span(), msg)) + } + + if !method.sig.generics.params.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 generics"; + return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)) + } + + if method.sig.generics.where_clause.is_some() { + let msg = "Invalid pallet::extra_constants, method must have no where clause"; + return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)) + } + + let type_ = match &method.sig.output { + syn::ReturnType::Default => { + let msg = "Invalid pallet::extra_constants, method must have a return type"; + return Err(syn::Error::new(method.span(), msg)) + }, + syn::ReturnType::Type(_, type_) => *type_.clone(), + }; + + // parse metadata_name + let mut extra_constant_attrs: Vec = + helper::take_item_pallet_attrs(method)?; + + if extra_constant_attrs.len() > 1 { + let msg = + "Invalid attribute in pallet::constant_name, only one attribute is expected"; + return Err(syn::Error::new(extra_constant_attrs[1].metadata_name.span(), msg)) + } + + let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); + + extra_constants.push(ExtraConstantDef { + ident: method.sig.ident.clone(), + type_, + doc: get_doc_literals(&method.attrs), + metadata_name, + }); + } + + Ok(Self { + index, + instances, + where_clause: item.generics.where_clause.clone(), + extra_constants, + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/genesis_build.rs b/support/procedural-fork/src/pallet/parse/genesis_build.rs new file mode 100644 index 000000000..d0e1d9ec9 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/genesis_build.rs @@ -0,0 +1,61 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// Definition for pallet genesis build implementation. +pub struct GenesisBuildDef { + /// The index of item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Option>, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::genesis_build attribute. + pub attr_span: proc_macro2::Span, +} + +impl GenesisBuildDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::genesis_build, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ + for GenesisConfig<..>"; + syn::Error::new(item.span(), msg) + })? + .1; + + let instances = + helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); + + Ok(Self { attr_span, index, instances, where_clause: item.generics.where_clause.clone() }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/genesis_config.rs b/support/procedural-fork/src/pallet/parse/genesis_config.rs new file mode 100644 index 000000000..62da6ba13 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/genesis_config.rs @@ -0,0 +1,73 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// Definition for pallet genesis config type. +/// +/// Either: +/// * `struct GenesisConfig` +/// * `enum GenesisConfig` +pub struct GenesisConfigDef { + /// The index of item in pallet module. + pub index: usize, + /// The kind of generic the type `GenesisConfig` has. + pub gen_kind: super::GenericKind, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The ident of genesis_config, can be used for span. + pub genesis_config: syn::Ident, +} + +impl GenesisConfigDef { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::genesis_config, expected enum or struct"; + return Err(syn::Error::new(item.span(), msg)) + }, + }; + + let mut instances = vec![]; + // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { + instances.push(u); + } + + let has_instance = generics.type_params().any(|t| t.ident == "I"); + let has_config = generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); + + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; + return Err(syn::Error::new(item_span, msg)) + } + + if ident != "GenesisConfig" { + let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; + return Err(syn::Error::new(ident.span(), msg)) + } + + Ok(GenesisConfigDef { index, genesis_config: ident.clone(), instances, gen_kind }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs new file mode 100644 index 000000000..3187c9139 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -0,0 +1,632 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use proc_macro2::TokenStream; +use quote::{quote, ToTokens}; +use syn::spanned::Spanned; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(I); + syn::custom_keyword!(compact); + syn::custom_keyword!(GenesisBuild); + syn::custom_keyword!(BuildGenesisConfig); + syn::custom_keyword!(Config); + syn::custom_keyword!(T); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(origin); + syn::custom_keyword!(DispatchResult); + syn::custom_keyword!(DispatchResultWithPostInfo); +} + +/// A usage of instance, either the trait `Config` has been used with instance or without instance. +/// Used to check for consistency. +#[derive(Clone)] +pub struct InstanceUsage { + pub has_instance: bool, + pub span: proc_macro2::Span, +} + +/// Trait implemented for syn items to get mutable references on their attributes. +/// +/// NOTE: verbatim variants are not supported. +pub trait MutItemAttrs { + fn mut_item_attrs(&mut self) -> Option<&mut Vec>; +} + +/// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr` +pub(crate) fn take_first_item_pallet_attr( + item: &mut impl MutItemAttrs, +) -> syn::Result> +where + Attr: syn::parse::Parse, +{ + let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path().segments.first().map_or(false, |segment| segment.ident == "pallet") + }) { + let pallet_attr = attrs.remove(index); + Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) + } else { + Ok(None) + } +} + +/// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` +pub(crate) fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> +where + Attr: syn::parse::Parse, +{ + let mut pallet_attrs = Vec::new(); + + while let Some(attr) = take_first_item_pallet_attr(item)? { + pallet_attrs.push(attr) + } + + Ok(pallet_attrs) +} + +/// Get all the cfg attributes (e.g. attribute like `#[cfg..]`) and decode them to `Attr` +pub fn get_item_cfg_attrs(attrs: &[syn::Attribute]) -> Vec { + attrs + .iter() + .filter_map(|attr| { + if attr.path().segments.first().map_or(false, |segment| segment.ident == "cfg") { + Some(attr.clone()) + } else { + None + } + }) + .collect::>() +} + +impl MutItemAttrs for syn::Item { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Enum(item) => Some(item.attrs.as_mut()), + Self::ExternCrate(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::ForeignMod(item) => Some(item.attrs.as_mut()), + Self::Impl(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + Self::Mod(item) => Some(item.attrs.as_mut()), + Self::Static(item) => Some(item.attrs.as_mut()), + Self::Struct(item) => Some(item.attrs.as_mut()), + Self::Trait(item) => Some(item.attrs.as_mut()), + Self::TraitAlias(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Union(item) => Some(item.attrs.as_mut()), + Self::Use(item) => Some(item.attrs.as_mut()), + _ => None, + } + } +} + +impl MutItemAttrs for syn::TraitItem { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + _ => None, + } + } +} + +impl MutItemAttrs for Vec { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(self) + } +} + +impl MutItemAttrs for syn::ItemMod { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } +} + +impl MutItemAttrs for syn::ImplItemFn { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } +} + +impl MutItemAttrs for syn::ItemType { + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } +} + +/// Parse for `()` +struct Unit; +impl syn::parse::Parse for Unit { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + if !content.is_empty() { + let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; + return Err(syn::Error::new(content.span(), msg)) + } + Ok(Self) + } +} + +/// Parse for `'static` +struct StaticLifetime; +impl syn::parse::Parse for StaticLifetime { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lifetime = input.parse::()?; + if lifetime.ident != "static" { + let msg = "unexpected tokens, expected `static`"; + return Err(syn::Error::new(lifetime.ident.span(), msg)) + } + Ok(Self) + } +} + +/// Check the syntax: `I: 'static = ()` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return the instance if found. +pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result<()> { + let expected = "expected `I: 'static = ()`"; + pub struct CheckTraitDefGenerics; + impl syn::parse::Parse for CheckTraitDefGenerics { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + syn::parse2::(gen.params.to_token_stream()).map_err(|e| { + let msg = format!("Invalid generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })?; + + Ok(()) +} + +/// Check the syntax: +/// * either `T` +/// * or `T, I = ()` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return the instance if found. +pub fn check_type_def_gen_no_bounds( + gen: &syn::Generics, + span: proc_macro2::Span, +) -> syn::Result { + let expected = "expected `T` or `T, I = ()`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { has_instance: false, span: input.span() }; + + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + Ok(i) +} + +/// Check the syntax: +/// * either `` (no generics +/// * or `T` +/// * or `T: Config` +/// * or `T, I = ()` +/// * or `T: Config, I: 'static = ()` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return some instance usage if there is some generic, or none otherwise. +pub fn check_type_def_optional_gen( + gen: &syn::Generics, + span: proc_macro2::Span, +) -> syn::Result> { + let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ + `T: Config, I: 'static = ()`"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)) + } + + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else { + Err(lookahead.error()) + } + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) +} + +/// Check the syntax: +/// * either `Pallet` +/// * or `Pallet` +/// +/// return the instance if found. +pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result { + let expected = "expected `Pallet` or `Pallet`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid pallet struct: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) +} + +/// Check the generic is: +/// * either `T: Config` +/// * or `T: Config, I: 'static` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return whether it contains instance. +pub fn check_impl_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result { + let expected = "expected `impl` or `impl, I: 'static>`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); + err.combine(e); + err + })? + .0; + + Ok(i) +} + +/// Check the syntax: +/// * or `T` +/// * or `T: Config` +/// * or `T, I = ()` +/// * or `T: Config, I: 'static = ()` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return the instance if found. +pub fn check_type_def_gen( + gen: &syn::Generics, + span: proc_macro2::Span, +) -> syn::Result { + let expected = "expected `T` or `T: Config` or `T, I = ()` or \ + `T: Config, I: 'static = ()`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)) + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else { + Err(lookahead.error()) + } + } + } + + let mut i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + // Span can be call_site if generic is empty. Thus we replace it. + i.span = span; + + Ok(i) +} + +/// Check the syntax: +/// * either `GenesisBuild` +/// * or `GenesisBuild` +/// * or `BuildGenesisConfig` +/// +/// return the instance if found for `GenesisBuild` +/// return None for BuildGenesisConfig +pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result> { + let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + if input.peek(keyword::GenesisBuild) { + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + return Ok(Self(Some(instance_usage))) + } else { + input.parse::()?; + return Ok(Self(None)) + } + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid genesis builder: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) +} + +/// Check the syntax: +/// * either `` (no generics) +/// * or `T: Config` +/// * or `T: Config, I: 'static` +/// +/// `span` is used in case generics is empty (empty generics has span == call_site). +/// +/// return the instance if found. +pub fn check_type_value_gen( + gen: &syn::Generics, + span: proc_macro2::Span, +) -> syn::Result> { + let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)) + } + + input.parse::()?; + input.parse::()?; + input.parse::()?; + + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) +} + +/// Check the keyword `DispatchResultWithPostInfo` or `DispatchResult`. +pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { + pub struct Checker; + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keyword::DispatchResultWithPostInfo) { + input.parse::()?; + Ok(Self) + } else if lookahead.peek(keyword::DispatchResult) { + input.parse::()?; + Ok(Self) + } else { + Err(lookahead.error()) + } + } + } + + syn::parse2::(type_.to_token_stream()).map(|_| ()) +} + +pub(crate) fn two128_str(s: &str) -> TokenStream { + bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) +} + +pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream { + let bytes = bytes.into_iter(); + + quote!( + [ #( #bytes ),* ] + ) + .into() +} diff --git a/support/procedural-fork/src/pallet/parse/hooks.rs b/support/procedural-fork/src/pallet/parse/hooks.rs new file mode 100644 index 000000000..37d7d22f4 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/hooks.rs @@ -0,0 +1,86 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// Implementation of the pallet hooks. +pub struct HooksDef { + /// The index of item in pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::hooks attribute. + pub attr_span: proc_macro2::Span, + /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. + pub has_runtime_upgrade: bool, +} + +impl HooksDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::hooks, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ + for Pallet<..>"; + syn::Error::new(item.span(), msg) + })? + .1; + + if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { + let msg = format!( + "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ + , you can import from `frame_support::pallet_prelude`", + quote::quote!(#item_trait) + ); + + return Err(syn::Error::new(item_trait.span(), msg)) + } + + let has_runtime_upgrade = item.items.iter().any(|i| match i { + syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", + _ => false, + }); + + Ok(Self { + attr_span, + index, + instances, + has_runtime_upgrade, + where_clause: item.generics.where_clause.clone(), + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/inherent.rs b/support/procedural-fork/src/pallet/parse/inherent.rs new file mode 100644 index 000000000..d8641691a --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/inherent.rs @@ -0,0 +1,60 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// The definition of the pallet inherent implementation. +pub struct InherentDef { + /// The index of inherent item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, +} + +impl InherentDef { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::inherent, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ProvideInherent" { + let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; + return Err(syn::Error::new(last.span(), msg)) + } + } else { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + Ok(InherentDef { index, instances }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs new file mode 100644 index 000000000..6e1277461 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -0,0 +1,749 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Parse for pallet macro. +//! +//! Parse the module into `Def` struct through `Def::try_from` function. + +pub mod call; +pub mod composite; +pub mod config; +pub mod error; +pub mod event; +pub mod extra_constants; +pub mod genesis_build; +pub mod genesis_config; +pub mod helper; +pub mod hooks; +pub mod inherent; +pub mod origin; +pub mod pallet_struct; +pub mod storage; +pub mod tasks; +pub mod type_value; +pub mod validate_unsigned; + +#[cfg(test)] +pub mod tests; + +use composite::{keyword::CompositeKeyword, CompositeDef}; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use quote::ToTokens; +use syn::spanned::Spanned; + +/// Parsed definition of a pallet. +pub struct Def { + /// The module items. + /// (their order must not be modified because they are registered in individual definitions). + pub item: syn::ItemMod, + pub config: config::ConfigDef, + pub pallet_struct: pallet_struct::PalletStructDef, + pub hooks: Option, + pub call: Option, + pub tasks: Option, + pub task_enum: Option, + pub storages: Vec, + pub error: Option, + pub event: Option, + pub origin: Option, + pub inherent: Option, + pub genesis_config: Option, + pub genesis_build: Option, + pub validate_unsigned: Option, + pub extra_constants: Option, + pub composites: Vec, + pub type_values: Vec, + pub frame_system: syn::Path, + pub frame_support: syn::Path, + pub dev_mode: bool, +} + +impl Def { + pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid pallet definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; + + let mut config = None; + let mut pallet_struct = None; + let mut hooks = None; + let mut call = None; + let mut tasks = None; + let mut task_enum = None; + let mut error = None; + let mut event = None; + let mut origin = None; + let mut inherent = None; + let mut genesis_config = None; + let mut genesis_build = None; + let mut validate_unsigned = None; + let mut extra_constants = None; + let mut storages = vec![]; + let mut type_values = vec![]; + let mut composites: Vec = vec![]; + + for (index, item) in items.iter_mut().enumerate() { + let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; + + match pallet_attr { + Some(PalletAttr::Config(span, with_default)) if config.is_none() => + config = Some(config::ConfigDef::try_from( + &frame_system, + span, + index, + item, + with_default, + )?), + Some(PalletAttr::Pallet(span)) if pallet_struct.is_none() => { + let p = pallet_struct::PalletStructDef::try_from(span, index, item)?; + pallet_struct = Some(p); + }, + Some(PalletAttr::Hooks(span)) if hooks.is_none() => { + let m = hooks::HooksDef::try_from(span, index, item)?; + hooks = Some(m); + }, + Some(PalletAttr::RuntimeCall(cw, span)) if call.is_none() => + call = Some(call::CallDef::try_from(span, index, item, dev_mode, cw)?), + Some(PalletAttr::Tasks(_)) if tasks.is_none() => { + let item_tokens = item.to_token_stream(); + // `TasksDef::parse` needs to know if attr was provided so we artificially + // re-insert it here + tasks = Some(syn::parse2::(quote::quote! { + #[pallet::tasks_experimental] + #item_tokens + })?); + + // replace item with a no-op because it will be handled by the expansion of tasks + *item = syn::Item::Verbatim(quote::quote!()); + } + Some(PalletAttr::TaskCondition(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_condition]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::TaskIndex(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_index]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::TaskList(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_list]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::RuntimeTask(_)) if task_enum.is_none() => + task_enum = Some(syn::parse2::(item.to_token_stream())?), + Some(PalletAttr::Error(span)) if error.is_none() => + error = Some(error::ErrorDef::try_from(span, index, item)?), + Some(PalletAttr::RuntimeEvent(span)) if event.is_none() => + event = Some(event::EventDef::try_from(span, index, item)?), + Some(PalletAttr::GenesisConfig(_)) if genesis_config.is_none() => { + let g = genesis_config::GenesisConfigDef::try_from(index, item)?; + genesis_config = Some(g); + }, + Some(PalletAttr::GenesisBuild(span)) if genesis_build.is_none() => { + let g = genesis_build::GenesisBuildDef::try_from(span, index, item)?; + genesis_build = Some(g); + }, + Some(PalletAttr::RuntimeOrigin(_)) if origin.is_none() => + origin = Some(origin::OriginDef::try_from(index, item)?), + Some(PalletAttr::Inherent(_)) if inherent.is_none() => + inherent = Some(inherent::InherentDef::try_from(index, item)?), + Some(PalletAttr::Storage(span)) => + storages.push(storage::StorageDef::try_from(span, index, item, dev_mode)?), + Some(PalletAttr::ValidateUnsigned(_)) if validate_unsigned.is_none() => { + let v = validate_unsigned::ValidateUnsignedDef::try_from(index, item)?; + validate_unsigned = Some(v); + }, + Some(PalletAttr::TypeValue(span)) => + type_values.push(type_value::TypeValueDef::try_from(span, index, item)?), + Some(PalletAttr::ExtraConstants(_)) => + extra_constants = + Some(extra_constants::ExtraConstantsDef::try_from(index, item)?), + Some(PalletAttr::Composite(span)) => { + let composite = + composite::CompositeDef::try_from(span, index, &frame_support, item)?; + if composites.iter().any(|def| { + match (&def.composite_keyword, &composite.composite_keyword) { + ( + CompositeKeyword::FreezeReason(_), + CompositeKeyword::FreezeReason(_), + ) | + (CompositeKeyword::HoldReason(_), CompositeKeyword::HoldReason(_)) | + (CompositeKeyword::LockId(_), CompositeKeyword::LockId(_)) | + ( + CompositeKeyword::SlashReason(_), + CompositeKeyword::SlashReason(_), + ) => true, + _ => false, + } + }) { + let msg = format!( + "Invalid duplicated `{}` definition", + composite.composite_keyword + ); + return Err(syn::Error::new(composite.composite_keyword.span(), &msg)) + } + composites.push(composite); + }, + Some(attr) => { + let msg = "Invalid duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)) + }, + None => (), + } + } + + if genesis_config.is_some() != genesis_build.is_some() { + let msg = format!( + "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ + either both used or both not used, instead genesis_config is {} and genesis_build \ + is {}", + genesis_config.as_ref().map_or("unused", |_| "used"), + genesis_build.as_ref().map_or("unused", |_| "used"), + ); + return Err(syn::Error::new(item_span, msg)) + } + + Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; + + let def = Def { + item, + config: config + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, + pallet_struct: pallet_struct + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, + hooks, + call, + tasks, + task_enum, + extra_constants, + genesis_config, + genesis_build, + validate_unsigned, + error, + event, + origin, + inherent, + storages, + composites, + type_values, + frame_system, + frame_support, + dev_mode, + }; + + def.check_instance_usage()?; + def.check_event_usage()?; + + Ok(def) + } + + /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. + fn resolve_tasks( + item_span: &proc_macro2::Span, + tasks: &mut Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + // fallback for manual (without macros) definition of tasks impl + Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; + + // fallback for manual (without macros) definition of task enum + Self::resolve_manual_task_enum(tasks, task_enum, items)?; + + // ensure that if `task_enum` is specified, `tasks` is also specified + match (&task_enum, &tasks) { + (Some(_), None) => + return Err(syn::Error::new( + *item_span, + "Missing `#[pallet::tasks_experimental]` impl", + )), + (None, Some(tasks)) => + if tasks.tasks_attr.is_none() { + return Err(syn::Error::new( + tasks.item_impl.impl_token.span(), + "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ + task enum has been omitted", + )) + } else { + }, + _ => (), + } + + Ok(()) + } + + /// Tries to locate task enum based on the tasks impl target if attribute is not specified + /// but impl is present. If one is found, `task_enum` is set appropriately. + fn resolve_manual_task_enum( + tasks: &Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + let (None, Some(tasks)) = (&task_enum, &tasks) else { return Ok(()) }; + let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { return Ok(()) }; + let type_path = type_path.path.segments.iter().collect::>(); + let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Enum(item_enum) = item else { continue }; + if item_enum.ident == seg.ident { + result = Some(syn::parse2::(item_enum.to_token_stream())?); + // replace item with a no-op because it will be handled by the expansion of + // `task_enum`. We use a no-op instead of simply removing it from the vec + // so that any indices collected by `Def::try_from` remain accurate + *item = syn::Item::Verbatim(quote::quote!()); + break + } + } + *task_enum = result; + Ok(()) + } + + /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is + /// `Task`) in the event that one has not been found already via the attribute macro + pub fn resolve_manual_tasks_impl( + tasks: &mut Option, + task_enum: &Option, + items: &Vec, + ) -> syn::Result<()> { + let None = tasks else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Impl(item_impl) = item else { continue }; + let Some((_, path, _)) = &item_impl.trait_ else { continue }; + let Some(trait_last_seg) = path.segments.last() else { continue }; + let syn::Type::Path(target_path) = &*item_impl.self_ty else { continue }; + let target_path = target_path.path.segments.iter().collect::>(); + let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + continue + }; + let matches_task_enum = match task_enum { + Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, + None => true, + }; + if trait_last_seg.ident == "Task" && matches_task_enum { + result = Some(syn::parse2::(item_impl.to_token_stream())?); + break + } + } + *tasks = result; + Ok(()) + } + + /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared + /// and trait defines type RuntimeEvent, or not declared and no trait associated type. + fn check_event_usage(&self) -> syn::Result<()> { + match (self.config.has_event_type, self.event.is_some()) { + (true, false) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ + but enum `Event` is not declared (i.e. no use of `#[pallet::event]`). \ + Note that type `RuntimeEvent` in trait is reserved to work alongside pallet event."; + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + }, + (false, true) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ + `RuntimeEvent`, but enum `Event` is declared (in use of `#[pallet::event]`). \ + An RuntimeEvent associated type must be declare on trait `Config`."; + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + }, + _ => Ok(()), + } + } + + /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with + /// instance iff it is defined with instance. + fn check_instance_usage(&self) -> syn::Result<()> { + let mut instances = vec![]; + instances.extend_from_slice(&self.pallet_struct.instances[..]); + instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); + if let Some(call) = &self.call { + instances.extend_from_slice(&call.instances[..]); + } + if let Some(hooks) = &self.hooks { + instances.extend_from_slice(&hooks.instances[..]); + } + if let Some(event) = &self.event { + instances.extend_from_slice(&event.instances[..]); + } + if let Some(error) = &self.error { + instances.extend_from_slice(&error.instances[..]); + } + if let Some(inherent) = &self.inherent { + instances.extend_from_slice(&inherent.instances[..]); + } + if let Some(origin) = &self.origin { + instances.extend_from_slice(&origin.instances[..]); + } + if let Some(genesis_config) = &self.genesis_config { + instances.extend_from_slice(&genesis_config.instances[..]); + } + if let Some(genesis_build) = &self.genesis_build { + genesis_build.instances.as_ref().map(|i| instances.extend_from_slice(&i)); + } + if let Some(extra_constants) = &self.extra_constants { + instances.extend_from_slice(&extra_constants.instances[..]); + } + + let mut errors = instances.into_iter().filter_map(|instances| { + if instances.has_instance == self.config.has_instance { + return None + } + let msg = if self.config.has_instance { + "Invalid generic declaration, trait is defined with instance but generic use none" + } else { + "Invalid generic declaration, trait is defined without instance but generic use \ + some" + }; + Some(syn::Error::new(instances.span, msg)) + }); + + if let Some(mut first_error) = errors.next() { + for error in errors { + first_error.combine(error) + } + Err(first_error) + } else { + Ok(()) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static` + pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static = ()` + pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static = ()) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I = ()` + pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I = ()) + } else { + quote::quote_spanned!(span => T) + } + } + + /// Depending on if pallet is instantiable: + /// * either `` + /// * or `` + /// to be used when using pallet trait `Config` + pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => ) + } else { + quote::quote_spanned!(span => ) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I` + pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I) + } else { + quote::quote_spanned!(span => T) + } + } +} + +/// Some generic kind for type which can be not generic, or generic over config, +/// or generic over config and instance, but not generic only over instance. +pub enum GenericKind { + None, + Config, + ConfigAndInstance, +} + +impl GenericKind { + /// Return Err if it is only generics over instance but not over config. + pub fn from_gens(has_config: bool, has_instance: bool) -> Result { + match (has_config, has_instance) { + (false, false) => Ok(GenericKind::None), + (true, false) => Ok(GenericKind::Config), + (true, true) => Ok(GenericKind::ConfigAndInstance), + (false, true) => Err(()), + } + } + + /// Return the generic to be used when using the type. + /// + /// Depending on its definition it can be: ``, `T` or `T, I` + pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T), + GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), + } + } + + /// Return the generic to be used in `impl<..>` when implementing on the type. + pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T: Config), + GenericKind::ConfigAndInstance => { + quote::quote_spanned!(span => T: Config, I: 'static) + }, + } + } + + /// Return whereas the type has some generic. + pub fn is_generic(&self) -> bool { + match self { + GenericKind::None => false, + GenericKind::Config | GenericKind::ConfigAndInstance => true, + } + } +} + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(origin); + syn::custom_keyword!(call); + syn::custom_keyword!(tasks_experimental); + syn::custom_keyword!(task_enum); + syn::custom_keyword!(task_list); + syn::custom_keyword!(task_condition); + syn::custom_keyword!(task_index); + syn::custom_keyword!(weight); + syn::custom_keyword!(event); + syn::custom_keyword!(config); + syn::custom_keyword!(with_default); + syn::custom_keyword!(hooks); + syn::custom_keyword!(inherent); + syn::custom_keyword!(error); + syn::custom_keyword!(storage); + syn::custom_keyword!(genesis_build); + syn::custom_keyword!(genesis_config); + syn::custom_keyword!(validate_unsigned); + syn::custom_keyword!(type_value); + syn::custom_keyword!(pallet); + syn::custom_keyword!(extra_constants); + syn::custom_keyword!(composite_enum); +} + +/// Parse attributes for item in pallet module +/// syntax must be `pallet::` (e.g. `#[pallet::config]`) +enum PalletAttr { + Config(proc_macro2::Span, bool), + Pallet(proc_macro2::Span), + Hooks(proc_macro2::Span), + /// A `#[pallet::call]` with optional attributes to specialize the behaviour. + /// + /// # Attributes + /// + /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or + /// `#[pallet::call(attr(…))]`. The possible attributes are: + /// + /// ## `weight` + /// + /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one + /// argument that is expected to be an implementation of the `WeightInfo` or something that + /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. + /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the + /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: + /// + /// ```ignore + /// #[pallet::call] + /// impl Pallet { + /// #[pallet::weight(T::WeightInfo::create())] + /// pub fn create( + /// ``` + /// you can now omit it on the call itself, if the name of the weigh function matches the call: + /// + /// ```ignore + /// #[pallet::call(weight = ::WeightInfo)] + /// impl Pallet { + /// pub fn create( + /// ``` + /// + /// It is possible to use this syntax together with instantiated pallets by using `Config` + /// instead. + /// + /// ### Dev Mode + /// + /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation + /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used + /// instead of the zero weight. So to say: it works together with `dev_mode`. + RuntimeCall(Option, proc_macro2::Span), + Error(proc_macro2::Span), + Tasks(proc_macro2::Span), + TaskList(proc_macro2::Span), + TaskCondition(proc_macro2::Span), + TaskIndex(proc_macro2::Span), + RuntimeTask(proc_macro2::Span), + RuntimeEvent(proc_macro2::Span), + RuntimeOrigin(proc_macro2::Span), + Inherent(proc_macro2::Span), + Storage(proc_macro2::Span), + GenesisConfig(proc_macro2::Span), + GenesisBuild(proc_macro2::Span), + ValidateUnsigned(proc_macro2::Span), + TypeValue(proc_macro2::Span), + ExtraConstants(proc_macro2::Span), + Composite(proc_macro2::Span), +} + +impl PalletAttr { + fn span(&self) -> proc_macro2::Span { + match self { + Self::Config(span, _) => *span, + Self::Pallet(span) => *span, + Self::Hooks(span) => *span, + Self::Tasks(span) => *span, + Self::TaskCondition(span) => *span, + Self::TaskIndex(span) => *span, + Self::TaskList(span) => *span, + Self::Error(span) => *span, + Self::RuntimeTask(span) => *span, + Self::RuntimeCall(_, span) => *span, + Self::RuntimeEvent(span) => *span, + Self::RuntimeOrigin(span) => *span, + Self::Inherent(span) => *span, + Self::Storage(span) => *span, + Self::GenesisConfig(span) => *span, + Self::GenesisBuild(span) => *span, + Self::ValidateUnsigned(span) => *span, + Self::TypeValue(span) => *span, + Self::ExtraConstants(span) => *span, + Self::Composite(span) => *span, + } + } +} + +impl syn::parse::Parse for PalletAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::config) { + let span = content.parse::()?.span(); + let with_default = content.peek(syn::token::Paren); + if with_default { + let inside_config; + let _paren = syn::parenthesized!(inside_config in content); + inside_config.parse::()?; + } + Ok(PalletAttr::Config(span, with_default)) + } else if lookahead.peek(keyword::pallet) { + Ok(PalletAttr::Pallet(content.parse::()?.span())) + } else if lookahead.peek(keyword::hooks) { + Ok(PalletAttr::Hooks(content.parse::()?.span())) + } else if lookahead.peek(keyword::call) { + let span = content.parse::().expect("peeked").span(); + let attr = match content.is_empty() { + true => None, + false => Some(InheritedCallWeightAttr::parse(&content)?), + }; + Ok(PalletAttr::RuntimeCall(attr, span)) + } else if lookahead.peek(keyword::tasks_experimental) { + Ok(PalletAttr::Tasks(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_enum) { + Ok(PalletAttr::RuntimeTask(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_condition) { + Ok(PalletAttr::TaskCondition(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_index) { + Ok(PalletAttr::TaskIndex(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_list) { + Ok(PalletAttr::TaskList(content.parse::()?.span())) + } else if lookahead.peek(keyword::error) { + Ok(PalletAttr::Error(content.parse::()?.span())) + } else if lookahead.peek(keyword::event) { + Ok(PalletAttr::RuntimeEvent(content.parse::()?.span())) + } else if lookahead.peek(keyword::origin) { + Ok(PalletAttr::RuntimeOrigin(content.parse::()?.span())) + } else if lookahead.peek(keyword::inherent) { + Ok(PalletAttr::Inherent(content.parse::()?.span())) + } else if lookahead.peek(keyword::storage) { + Ok(PalletAttr::Storage(content.parse::()?.span())) + } else if lookahead.peek(keyword::genesis_config) { + Ok(PalletAttr::GenesisConfig(content.parse::()?.span())) + } else if lookahead.peek(keyword::genesis_build) { + Ok(PalletAttr::GenesisBuild(content.parse::()?.span())) + } else if lookahead.peek(keyword::validate_unsigned) { + Ok(PalletAttr::ValidateUnsigned(content.parse::()?.span())) + } else if lookahead.peek(keyword::type_value) { + Ok(PalletAttr::TypeValue(content.parse::()?.span())) + } else if lookahead.peek(keyword::extra_constants) { + Ok(PalletAttr::ExtraConstants(content.parse::()?.span())) + } else if lookahead.peek(keyword::composite_enum) { + Ok(PalletAttr::Composite(content.parse::()?.span())) + } else { + Err(lookahead.error()) + } + } +} + +/// The optional weight annotation on a `#[pallet::call]` like `#[pallet::call(weight($type))]`. +#[derive(Clone)] +pub struct InheritedCallWeightAttr { + pub typename: syn::Type, + pub span: proc_macro2::Span, +} + +impl syn::parse::Parse for InheritedCallWeightAttr { + // Parses `(weight($type))` or `(weight = $type)`. + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + content.parse::()?; + let lookahead = content.lookahead1(); + + let buffer = if lookahead.peek(syn::token::Paren) { + let inner; + syn::parenthesized!(inner in content); + inner + } else if lookahead.peek(syn::Token![=]) { + content.parse::().expect("peeked"); + content + } else { + return Err(lookahead.error()) + }; + + Ok(Self { typename: buffer.parse()?, span: input.span() }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/origin.rs b/support/procedural-fork/src/pallet/parse/origin.rs new file mode 100644 index 000000000..76e2a8841 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/origin.rs @@ -0,0 +1,72 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// Definition of the pallet origin type. +/// +/// Either: +/// * `type Origin` +/// * `struct Origin` +/// * `enum Origin` +pub struct OriginDef { + /// The index of item in pallet module. + pub index: usize, + pub has_instance: bool, + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, +} + +impl OriginDef { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::origin, expected enum or struct or type"; + return Err(syn::Error::new(item.span(), msg)) + }, + }; + + let has_instance = generics.params.len() == 2; + let is_generic = !generics.params.is_empty(); + + let mut instances = vec![]; + if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { + instances.push(u); + } else { + // construct_runtime only allow generic event for instantiable pallet. + instances.push(helper::InstanceUsage { has_instance: false, span: ident.span() }) + } + + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::origin, Origin must be public"; + return Err(syn::Error::new(item_span, msg)) + } + + if ident != "Origin" { + let msg = "Invalid pallet::origin, ident must `Origin`"; + return Err(syn::Error::new(ident.span(), msg)) + } + + Ok(OriginDef { index, has_instance, is_generic, instances }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/pallet_struct.rs b/support/procedural-fork/src/pallet/parse/pallet_struct.rs new file mode 100644 index 000000000..b64576099 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/pallet_struct.rs @@ -0,0 +1,149 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use quote::ToTokens; +use syn::spanned::Spanned; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(pallet); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(without_storage_info); + syn::custom_keyword!(storage_version); +} + +/// Definition of the pallet pallet. +pub struct PalletStructDef { + /// The index of item in pallet pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config trait. + pub instances: Vec, + /// The keyword Pallet used (contains span). + pub pallet: keyword::Pallet, + /// The span of the pallet::pallet attribute. + pub attr_span: proc_macro2::Span, + /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. + /// Contains the span of the attribute. + pub without_storage_info: Option, + /// The in-code storage version of the pallet. + pub storage_version: Option, +} + +/// Parse for one variant of: +/// * `#[pallet::without_storage_info]` +/// * `#[pallet::storage_version(STORAGE_VERSION)]` +pub enum PalletStructAttr { + WithoutStorageInfoTrait(proc_macro2::Span), + StorageVersion { storage_version: syn::Path, span: proc_macro2::Span }, +} + +impl PalletStructAttr { + fn span(&self) -> proc_macro2::Span { + match self { + Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, + } + } +} + +impl syn::parse::Parse for PalletStructAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::without_storage_info) { + let span = content.parse::()?.span(); + Ok(Self::WithoutStorageInfoTrait(span)) + } else if lookahead.peek(keyword::storage_version) { + let span = content.parse::()?.span(); + + let version_content; + syn::parenthesized!(version_content in content); + let storage_version = version_content.parse::()?; + + Ok(Self::StorageVersion { storage_version, span }) + } else { + Err(lookahead.error()) + } + } +} + +impl PalletStructDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid pallet::pallet, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let mut without_storage_info = None; + let mut storage_version_found = None; + + let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + for attr in struct_attrs { + match attr { + PalletStructAttr::WithoutStorageInfoTrait(span) + if without_storage_info.is_none() => + { + without_storage_info = Some(span); + }, + PalletStructAttr::StorageVersion { storage_version, .. } + if storage_version_found.is_none() => + { + storage_version_found = Some(storage_version); + }, + attr => { + let msg = "Unexpected duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)) + }, + } + } + + let pallet = syn::parse2::(item.ident.to_token_stream())?; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::pallet, Pallet must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; + return Err(syn::Error::new(item.generics.where_clause.span(), msg)) + } + + let instances = + vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; + + Ok(Self { + index, + instances, + pallet, + attr_span, + without_storage_info, + storage_version: storage_version_found, + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs new file mode 100644 index 000000000..9d96a18b5 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -0,0 +1,947 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use frame_support_procedural_tools::get_doc_literals; +use quote::ToTokens; +use std::collections::HashMap; +use syn::spanned::Spanned; + +/// List of additional token to be used for parsing. +mod keyword { + syn::custom_keyword!(Error); + syn::custom_keyword!(pallet); + syn::custom_keyword!(getter); + syn::custom_keyword!(storage_prefix); + syn::custom_keyword!(unbounded); + syn::custom_keyword!(whitelist_storage); + syn::custom_keyword!(disable_try_decode_storage); + syn::custom_keyword!(OptionQuery); + syn::custom_keyword!(ResultQuery); + syn::custom_keyword!(ValueQuery); +} + +/// Parse for one of the following: +/// * `#[pallet::getter(fn dummy)]` +/// * `#[pallet::storage_prefix = "CustomName"]` +/// * `#[pallet::unbounded]` +/// * `#[pallet::whitelist_storage] +/// * `#[pallet::disable_try_decode_storage]` +pub enum PalletStorageAttr { + Getter(syn::Ident, proc_macro2::Span), + StorageName(syn::LitStr, proc_macro2::Span), + Unbounded(proc_macro2::Span), + WhitelistStorage(proc_macro2::Span), + DisableTryDecodeStorage(proc_macro2::Span), +} + +impl PalletStorageAttr { + fn attr_span(&self) -> proc_macro2::Span { + match self { + Self::Getter(_, span) | + Self::StorageName(_, span) | + Self::Unbounded(span) | + Self::WhitelistStorage(span) => *span, + Self::DisableTryDecodeStorage(span) => *span, + } + } +} + +impl syn::parse::Parse for PalletStorageAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let attr_span = input.span(); + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::getter) { + content.parse::()?; + + let generate_content; + syn::parenthesized!(generate_content in content); + generate_content.parse::()?; + Ok(Self::Getter(generate_content.parse::()?, attr_span)) + } else if lookahead.peek(keyword::storage_prefix) { + content.parse::()?; + content.parse::()?; + + let renamed_prefix = content.parse::()?; + // Ensure the renamed prefix is a proper Rust identifier + syn::parse_str::(&renamed_prefix.value()).map_err(|_| { + let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); + syn::Error::new(renamed_prefix.span(), msg) + })?; + + Ok(Self::StorageName(renamed_prefix, attr_span)) + } else if lookahead.peek(keyword::unbounded) { + content.parse::()?; + + Ok(Self::Unbounded(attr_span)) + } else if lookahead.peek(keyword::whitelist_storage) { + content.parse::()?; + Ok(Self::WhitelistStorage(attr_span)) + } else if lookahead.peek(keyword::disable_try_decode_storage) { + content.parse::()?; + Ok(Self::DisableTryDecodeStorage(attr_span)) + } else { + Err(lookahead.error()) + } + } +} + +struct PalletStorageAttrInfo { + getter: Option, + rename_as: Option, + unbounded: bool, + whitelisted: bool, + try_decode: bool, +} + +impl PalletStorageAttrInfo { + fn from_attrs(attrs: Vec) -> syn::Result { + let mut getter = None; + let mut rename_as = None; + let mut unbounded = false; + let mut whitelisted = false; + let mut disable_try_decode_storage = false; + for attr in attrs { + match attr { + PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), + PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => + rename_as = Some(name), + PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, + PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, + PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => + disable_try_decode_storage = true, + attr => + return Err(syn::Error::new( + attr.attr_span(), + "Invalid attribute: Duplicate attribute", + )), + } + } + + Ok(PalletStorageAttrInfo { + getter, + rename_as, + unbounded, + whitelisted, + try_decode: !disable_try_decode_storage, + }) + } +} + +/// The value and key types used by storages. Needed to expand metadata. +pub enum Metadata { + Value { value: syn::Type }, + Map { value: syn::Type, key: syn::Type }, + CountedMap { value: syn::Type, key: syn::Type }, + DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, + NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, + CountedNMap { keys: Vec, keygen: syn::Type, value: syn::Type }, +} + +pub enum QueryKind { + OptionQuery, + ResultQuery(syn::Path, syn::Ident), + ValueQuery, +} + +/// Definition of a storage, storage is a storage type like +/// `type MyStorage = StorageValue` +/// The keys and values types are parsed in order to get metadata +pub struct StorageDef { + /// The index of storage item in pallet module. + pub index: usize, + /// Visibility of the storage type. + pub vis: syn::Visibility, + /// The type ident, to generate the StoragePrefix for. + pub ident: syn::Ident, + /// The keys and value metadata of the storage. + pub metadata: Metadata, + /// The doc associated to the storage. + pub docs: Vec, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// Optional getter to generate. If some then query_kind is ensured to be some as well. + pub getter: Option, + /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of + /// ident. + pub rename_as: Option, + /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. + /// Note that this is best effort as it can't be determined when QueryKind is generic, and + /// result can be false if user do some unexpected type alias. + pub query_kind: Option, + /// Where clause of type definition. + pub where_clause: Option, + /// The span of the pallet::storage attribute. + pub attr_span: proc_macro2::Span, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// If generics are named (e.g. `StorageValue`) then this contains all the + /// generics of the storage. + /// If generics are not named, this is none. + pub named_generics: Option, + /// If the value stored in this storage is unbounded. + pub unbounded: bool, + /// Whether or not reads to this storage key will be ignored by benchmarking + pub whitelisted: bool, + /// Whether or not to try to decode the storage key when running try-runtime checks. + pub try_decode: bool, + /// Whether or not a default hasher is allowed to replace `_` + pub use_default_hasher: bool, +} + +/// The parsed generic from the +#[derive(Clone)] +pub enum StorageGenerics { + DoubleMap { + hasher1: syn::Type, + key1: syn::Type, + hasher2: syn::Type, + key2: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Map { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedMap { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Value { + value: syn::Type, + query_kind: Option, + on_empty: Option, + }, + NMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedNMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, +} + +impl StorageGenerics { + /// Return the metadata from the defined generics + fn metadata(&self) -> syn::Result { + let res = match self.clone() { + Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, + Self::Map { value, key, .. } => Metadata::Map { value, key }, + Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, + Self::Value { value, .. } => Metadata::Value { value }, + Self::NMap { keygen, value, .. } => + Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, + Self::CountedNMap { keygen, value, .. } => + Metadata::CountedNMap { keys: collect_keys(&keygen)?, keygen, value }, + }; + + Ok(res) + } + + /// Return the query kind from the defined generics + fn query_kind(&self) -> Option { + match &self { + Self::DoubleMap { query_kind, .. } | + Self::Map { query_kind, .. } | + Self::CountedMap { query_kind, .. } | + Self::Value { query_kind, .. } | + Self::NMap { query_kind, .. } | + Self::CountedNMap { query_kind, .. } => query_kind.clone(), + } + } +} + +enum StorageKind { + Value, + Map, + CountedMap, + DoubleMap, + NMap, + CountedNMap, +} + +/// Check the generics in the `map` contains the generics in `gen` may contains generics in +/// `optional_gen`, and doesn't contains any other. +fn check_generics( + map: &HashMap, + mandatory_generics: &[&str], + optional_generics: &[&str], + storage_type_name: &str, + args_span: proc_macro2::Span, +) -> syn::Result<()> { + let mut errors = vec![]; + + let expectation = { + let mut e = format!( + "`{}` expect generics {}and optional generics {}", + storage_type_name, + mandatory_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + &optional_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), + ); + e.pop(); + e.pop(); + e.push('.'); + e + }; + + for (gen_name, gen_binding) in map { + if !mandatory_generics.contains(&gen_name.as_str()) && + !optional_generics.contains(&gen_name.as_str()) + { + let msg = format!( + "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", + gen_name, storage_type_name, expectation, + ); + errors.push(syn::Error::new(gen_binding.span(), msg)); + } + } + + for mandatory_generic in mandatory_generics { + if !map.contains_key(&mandatory_generic.to_string()) { + let msg = format!( + "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", + mandatory_generic, storage_type_name + ); + errors.push(syn::Error::new(args_span, msg)); + } + } + + let mut errors = errors.drain(..); + if let Some(mut error) = errors.next() { + for other_error in errors { + error.combine(other_error); + } + Err(error) + } else { + Ok(()) + } +} + +/// Returns `(named generics, metadata, query kind, use_default_hasher)` +fn process_named_generics( + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::AssocType], + dev_mode: bool, +) -> syn::Result<(Option, Metadata, Option, bool)> { + let mut parsed = HashMap::::new(); + + // Ensure no duplicate. + for arg in args { + if let Some(other) = parsed.get(&arg.ident.to_string()) { + let msg = "Invalid pallet::storage, Duplicated named generic"; + let mut err = syn::Error::new(arg.ident.span(), msg); + err.combine(syn::Error::new(other.ident.span(), msg)); + return Err(err) + } + parsed.insert(arg.ident.to_string(), arg.clone()); + } + + let mut map_mandatory_generics = vec!["Key", "Value"]; + let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; + if dev_mode { + map_optional_generics.push("Hasher"); + } else { + map_mandatory_generics.push("Hasher"); + } + + let generics = match storage { + StorageKind::Value => { + check_generics( + &parsed, + &["Value"], + &["QueryKind", "OnEmpty"], + "StorageValue", + args_span, + )?; + + StorageGenerics::Value { + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + } + }, + StorageKind::Map => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "StorageMap", + args_span, + )?; + + StorageGenerics::Map { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::CountedMap => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "CountedStorageMap", + args_span, + )?; + + StorageGenerics::CountedMap { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::DoubleMap => { + let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; + if dev_mode { + map_optional_generics.extend(["Hasher1", "Hasher2"]); + } else { + double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); + } + + check_generics( + &parsed, + &double_map_mandatory_generics, + &map_optional_generics, + "StorageDoubleMap", + args_span, + )?; + + StorageGenerics::DoubleMap { + hasher1: parsed + .remove("Hasher1") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key1: parsed + .remove("Key1") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + hasher2: parsed + .remove("Hasher2") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key2: parsed + .remove("Key2") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::NMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "StorageNMap", + args_span, + )?; + + StorageGenerics::NMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::CountedNMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "CountedStorageNMap", + args_span, + )?; + + StorageGenerics::CountedNMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + }; + + let metadata = generics.metadata()?; + let query_kind = generics.query_kind(); + + Ok((Some(generics), metadata, query_kind, false)) +} + +/// Returns `(named generics, metadata, query kind, use_default_hasher)` +fn process_unnamed_generics( + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::Type], + dev_mode: bool, +) -> syn::Result<(Option, Metadata, Option, bool)> { + let retrieve_arg = |arg_pos| { + args.get(arg_pos).cloned().ok_or_else(|| { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic argument, \ + expect at least {} args, found {}.", + arg_pos + 1, + args.len(), + ); + syn::Error::new(args_span, msg) + }) + }; + + let prefix_arg = retrieve_arg(0)?; + syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { + let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ + first generic argument must be `_`, the argument is then replaced by macro."; + let mut err = syn::Error::new(prefix_arg.span(), msg); + err.combine(e); + err + })?; + + let use_default_hasher = |arg_pos| { + let arg = retrieve_arg(arg_pos)?; + if syn::parse2::(arg.to_token_stream()).is_ok() { + if dev_mode { + Ok(true) + } else { + let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; + Err(syn::Error::new(arg.span(), msg)) + } + } else { + Ok(false) + } + }; + + let res = match storage { + StorageKind::Value => + (None, Metadata::Value { value: retrieve_arg(1)? }, retrieve_arg(2).ok(), false), + StorageKind::Map => ( + None, + Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::CountedMap => ( + None, + Metadata::CountedMap { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::DoubleMap => ( + None, + Metadata::DoubleMap { + key1: retrieve_arg(2)?, + key2: retrieve_arg(4)?, + value: retrieve_arg(5)?, + }, + retrieve_arg(6).ok(), + use_default_hasher(1)? && use_default_hasher(3)?, + ), + StorageKind::NMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::NMap { keys, keygen, value: retrieve_arg(2)? }, + retrieve_arg(3).ok(), + false, + ) + }, + StorageKind::CountedNMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::CountedNMap { keys, keygen, value: retrieve_arg(2)? }, + retrieve_arg(3).ok(), + false, + ) + }, + }; + + Ok(res) +} + +/// Returns `(named generics, metadata, query kind, use_default_hasher)` +fn process_generics( + segment: &syn::PathSegment, + dev_mode: bool, +) -> syn::Result<(Option, Metadata, Option, bool)> { + let storage_kind = match &*segment.ident.to_string() { + "StorageValue" => StorageKind::Value, + "StorageMap" => StorageKind::Map, + "CountedStorageMap" => StorageKind::CountedMap, + "StorageDoubleMap" => StorageKind::DoubleMap, + "StorageNMap" => StorageKind::NMap, + "CountedStorageNMap" => StorageKind::CountedNMap, + found => { + let msg = format!( + "Invalid pallet::storage, expected ident: `StorageValue` or \ + `StorageMap` or `CountedStorageMap` or `StorageDoubleMap` or `StorageNMap` or `CountedStorageNMap` \ + in order to expand metadata, found `{}`.", + found, + ); + return Err(syn::Error::new(segment.ident.span(), msg)) + }, + }; + + let args_span = segment.arguments.span(); + + let args = match &segment.arguments { + syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, + _ => { + let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ + expect more that 0 generic arguments."; + return Err(syn::Error::new(segment.span(), msg)) + }, + }; + + if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Type(_))) { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::Type(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are types"), + }) + .collect::>(); + process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) + } else if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::AssocType(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are bindings"), + }) + .collect::>(); + process_named_generics(&storage_kind, args_span, &args, dev_mode) + } else { + let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ + type generics or binding generics, e.g. `` or \ + ``."; + Err(syn::Error::new(segment.span(), msg)) + } +} + +/// Parse the 2nd type argument to `StorageNMap` and return its keys. +fn collect_keys(keygen: &syn::Type) -> syn::Result> { + if let syn::Type::Tuple(tup) = keygen { + tup.elems.iter().map(extract_key).collect::>>() + } else { + Ok(vec![extract_key(keygen)?]) + } +} + +/// In `Key`, extract K and return it. +fn extract_key(ty: &syn::Type) -> syn::Result { + let typ = if let syn::Type::Path(typ) = ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(ty.span(), msg)) + }; + + let key_struct = typ.path.segments.last().ok_or_else(|| { + let msg = "Invalid pallet::storage, expected type path with at least one segment"; + syn::Error::new(typ.path.span(), msg) + })?; + if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { + let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; + return Err(syn::Error::new(key_struct.ident.span(), msg)) + } + + let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { + args + } else { + let msg = "Invalid pallet::storage, expected angle bracketed arguments"; + return Err(syn::Error::new(key_struct.arguments.span(), msg)) + }; + + if ty_params.args.len() != 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ + for Key struct, expected 2 args, found {}", + ty_params.args.len() + ); + return Err(syn::Error::new(ty_params.span(), msg)) + } + + let key = match &ty_params.args[1] { + syn::GenericArgument::Type(key_ty) => key_ty.clone(), + _ => { + let msg = "Invalid pallet::storage, expected type"; + return Err(syn::Error::new(ty_params.args[1].span(), msg)) + }, + }; + + Ok(key) +} + +impl StorageDef { + /// Return the storage prefix for this storage item + pub fn prefix(&self) -> String { + self.rename_as + .as_ref() + .map(syn::LitStr::value) + .unwrap_or_else(|| self.ident.to_string()) + } + + /// Return either the span of the ident or the span of the literal in the + /// #[storage_prefix] attribute + pub fn prefix_span(&self) -> proc_macro2::Span { + self.rename_as + .as_ref() + .map(syn::LitStr::span) + .unwrap_or_else(|| self.ident.span()) + } + + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + ) -> syn::Result { + let item = if let syn::Item::Type(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")) + }; + + let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let PalletStorageAttrInfo { getter, rename_as, mut unbounded, whitelisted, try_decode } = + PalletStorageAttrInfo::from_attrs(attrs)?; + + // set all storages to be unbounded if dev_mode is enabled + unbounded |= dev_mode; + let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); + + let instances = vec![helper::check_type_def_gen(&item.generics, item.ident.span())?]; + + let where_clause = item.generics.where_clause.clone(); + let docs = get_doc_literals(&item.attrs); + + let typ = if let syn::Type::Path(typ) = &*item.ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(item.ty.span(), msg)) + }; + + if typ.path.segments.len() != 1 { + let msg = "Invalid pallet::storage, expected type path with one segment"; + return Err(syn::Error::new(item.ty.span(), msg)) + } + + let (named_generics, metadata, query_kind, use_default_hasher) = + process_generics(&typ.path.segments[0], dev_mode)?; + + let query_kind = query_kind + .map(|query_kind| { + use syn::{ + AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, + TypePath, + }; + + let result_query = match query_kind { + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "OptionQuery") => + return Ok(Some(QueryKind::OptionQuery)), + Type::Path(TypePath { path: Path { segments, .. }, .. }) + if segments.last().map_or(false, |s| s.ident == "ResultQuery") => + segments + .last() + .expect("segments is checked to have the last value; qed") + .clone(), + Type::Path(path) + if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") => + return Ok(Some(QueryKind::ValueQuery)), + _ => return Ok(None), + }; + + let error_type = match result_query.arguments { + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + args, .. + }) => { + if args.len() != 1 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ + for ResultQuery, expected 1 type argument, found {}", + args.len(), + ); + return Err(syn::Error::new(args.span(), msg)) + } + + args[0].clone() + }, + args => { + let msg = format!( + "Invalid pallet::storage, unexpected generic args for ResultQuery, \ + expected angle-bracketed arguments, found `{}`", + args.to_token_stream().to_string() + ); + return Err(syn::Error::new(args.span(), msg)) + }, + }; + + match error_type { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments: err_variant, leading_colon }, + .. + })) => { + if err_variant.len() < 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of path segments for \ + the generics in ResultQuery, expected a path with at least 2 \ + segments, found {}", + err_variant.len(), + ); + return Err(syn::Error::new(err_variant.span(), msg)) + } + let mut error = err_variant.clone(); + let err_variant = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value() + .ident; + + // Necessary here to eliminate the last double colon + let last = + error.pop().expect("Checked to have at least 2; qed").into_value(); + error.push_value(last); + + Ok(Some(QueryKind::ResultQuery( + syn::Path { leading_colon, segments: error }, + err_variant, + ))) + }, + gen_arg => { + let msg = format!( + "Invalid pallet::storage, unexpected generic argument kind, expected a \ + type path to a `PalletError` enum variant, found `{}`", + gen_arg.to_token_stream().to_string(), + ); + Err(syn::Error::new(gen_arg.span(), msg)) + }, + } + }) + .transpose()? + .unwrap_or(Some(QueryKind::OptionQuery)); + + if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { + let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ + identifiable. QueryKind must be `OptionQuery`, `ResultQuery`, `ValueQuery`, or default \ + one to be identifiable."; + return Err(syn::Error::new(getter.span(), msg)) + } + + Ok(StorageDef { + attr_span, + index, + vis: item.vis.clone(), + ident: item.ident.clone(), + instances, + metadata, + docs, + getter, + rename_as, + query_kind, + where_clause, + cfg_attrs, + named_generics, + unbounded, + whitelisted, + try_decode, + use_default_hasher, + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs new file mode 100644 index 000000000..6405bb415 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -0,0 +1,968 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Home of the parsing code for the Tasks API + +use std::collections::HashSet; + +#[cfg(test)] +use crate::assert_parse_error_matches; + +#[cfg(test)] +use crate::pallet::parse::tests::simulate_manifest_dir; + +use derive_syn_parse::Parse; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens}; +use syn::{ + parse::ParseStream, + parse2, + spanned::Spanned, + token::{Bracket, Paren, PathSep, Pound}, + Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, + PathArguments, Result, TypePath, +}; + +pub mod keywords { + use syn::custom_keyword; + + custom_keyword!(tasks_experimental); + custom_keyword!(task_enum); + custom_keyword!(task_list); + custom_keyword!(task_condition); + custom_keyword!(task_index); + custom_keyword!(task_weight); + custom_keyword!(pallet); +} + +/// Represents the `#[pallet::tasks_experimental]` attribute and its attached item. Also includes +/// metadata about the linked [`TaskEnumDef`] if applicable. +#[derive(Clone, Debug)] +pub struct TasksDef { + pub tasks_attr: Option, + pub tasks: Vec, + pub item_impl: ItemImpl, + /// Path to `frame_support` + pub scrate: Path, + pub enum_ident: Ident, + pub enum_arguments: PathArguments, +} + +impl syn::parse::Parse for TasksDef { + fn parse(input: ParseStream) -> Result { + let item_impl: ItemImpl = input.parse()?; + let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); + let tasks_attr = match tasks_attrs.first() { + Some(attr) => Some(parse2::(attr.to_token_stream())?), + None => None, + }; + if let Some(extra_tasks_attr) = tasks_attrs.get(1) { + return Err(Error::new( + extra_tasks_attr.span(), + "unexpected extra `#[pallet::tasks_experimental]` attribute", + )) + } + let tasks: Vec = if tasks_attr.is_some() { + item_impl + .items + .clone() + .into_iter() + .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) + .map(|item| parse2::(item.to_token_stream())) + .collect::>()? + } else { + Vec::new() + }; + let mut task_indices = HashSet::::new(); + for task in tasks.iter() { + let task_index = &task.index_attr.meta.index; + if !task_indices.insert(task_index.clone()) { + return Err(Error::new( + task_index.span(), + format!("duplicate task index `{}`", task_index), + )) + } + } + let mut item_impl = item_impl; + item_impl.attrs = normal_attrs; + + // we require the path on the impl to be a TypePath + let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; + let segments = enum_path.path.segments.iter().collect::>(); + let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + return Err(Error::new( + enum_path.span(), + "if specified manually, the task enum must be defined locally in this \ + pallet and cannot be a re-export", + )) + }; + let enum_ident = last_seg.ident.clone(); + let enum_arguments = last_seg.arguments.clone(); + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + Ok(TasksDef { tasks_attr, item_impl, tasks, scrate, enum_ident, enum_arguments }) + } +} + +/// Parsing for a `#[pallet::tasks_experimental]` attr. +pub type PalletTasksAttr = PalletTaskAttr; + +/// Parsing for any of the attributes that can be used within a `#[pallet::tasks_experimental]` +/// [`ItemImpl`]. +pub type TaskAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_index]` attr. +pub type TaskIndexAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_condition]` attr. +pub type TaskConditionAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_list]` attr. +pub type TaskListAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_weight]` attr. +pub type TaskWeightAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet:task_enum]` attr. +pub type PalletTaskEnumAttr = PalletTaskAttr; + +/// Parsing for a manually-specified (or auto-generated) task enum, optionally including the +/// attached `#[pallet::task_enum]` attribute. +#[derive(Clone, Debug)] +pub struct TaskEnumDef { + pub attr: Option, + pub item_enum: ItemEnum, + pub scrate: Path, + pub type_use_generics: TokenStream2, +} + +impl syn::parse::Parse for TaskEnumDef { + fn parse(input: ParseStream) -> Result { + let mut item_enum = input.parse::()?; + let attr = extract_pallet_attr(&mut item_enum)?; + let attr = match attr { + Some(attr) => Some(parse2(attr)?), + None => None, + }; + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + let type_use_generics = quote!(T); + + Ok(TaskEnumDef { attr, item_enum, scrate, type_use_generics }) + } +} + +/// Represents an individual tasks within a [`TasksDef`]. +#[derive(Debug, Clone)] +pub struct TaskDef { + pub index_attr: TaskIndexAttr, + pub condition_attr: TaskConditionAttr, + pub list_attr: TaskListAttr, + pub weight_attr: TaskWeightAttr, + pub normal_attrs: Vec, + pub item: ImplItemFn, + pub arg_names: Vec, +} + +impl syn::parse::Parse for TaskDef { + fn parse(input: ParseStream) -> Result { + let item = input.parse::()?; + // we only want to activate TaskAttrType parsing errors for tasks-related attributes, + // so we filter them here + let (task_attrs, normal_attrs) = partition_task_attrs(&item); + + let task_attrs: Vec = task_attrs + .into_iter() + .map(|attr| parse2(attr.to_token_stream())) + .collect::>()?; + + let Some(index_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_index(..)]` attribute", + )) + }; + + let Some(condition_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_condition(..)]` attribute", + )) + }; + + let Some(list_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_list(..)]` attribute", + )) + }; + + let Some(weight_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_weight(..)]` attribute", + )) + }; + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_condition(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_list(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_index(..)]` attribute", + )) + } + + let mut arg_names = vec![]; + for input in item.sig.inputs.iter() { + match input { + syn::FnArg::Typed(pat_type) => match &*pat_type.pat { + syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), + _ => return Err(Error::new(input.span(), "unexpected pattern type")), + }, + _ => return Err(Error::new(input.span(), "unexpected function argument type")), + } + } + + let index_attr = index_attr.try_into().expect("we check the type above; QED"); + let condition_attr = condition_attr.try_into().expect("we check the type above; QED"); + let list_attr = list_attr.try_into().expect("we check the type above; QED"); + let weight_attr = weight_attr.try_into().expect("we check the type above; QED"); + + Ok(TaskDef { + index_attr, + condition_attr, + list_attr, + weight_attr, + normal_attrs, + item, + arg_names, + }) + } +} + +/// The contents of a [`TasksDef`]-related attribute. +#[derive(Parse, Debug, Clone)] +pub enum TaskAttrMeta { + #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] + TaskList(TaskListAttrMeta), + #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] + TaskIndex(TaskIndexAttrMeta), + #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] + TaskCondition(TaskConditionAttrMeta), + #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] + TaskWeight(TaskWeightAttrMeta), +} + +/// The contents of a `#[pallet::task_list]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskListAttrMeta { + pub task_list: keywords::task_list, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task_index]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskIndexAttrMeta { + pub task_index: keywords::task_index, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub index: LitInt, +} + +/// The contents of a `#[pallet::task_condition]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskConditionAttrMeta { + pub task_condition: keywords::task_condition, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task_weight]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskWeightAttrMeta { + pub task_weight: keywords::task_weight, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct PalletTaskAttr { + pub pound: Pound, + #[bracket] + _bracket: Bracket, + #[inside(_bracket)] + pub pallet: keywords::pallet, + #[inside(_bracket)] + pub colons: PathSep, + #[inside(_bracket)] + pub meta: T, +} + +impl ToTokens for TaskListAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_list = self.task_list; + let expr = &self.expr; + tokens.extend(quote!(#task_list(#expr))); + } +} + +impl ToTokens for TaskConditionAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_condition = self.task_condition; + let expr = &self.expr; + tokens.extend(quote!(#task_condition(#expr))); + } +} + +impl ToTokens for TaskWeightAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_weight = self.task_weight; + let expr = &self.expr; + tokens.extend(quote!(#task_weight(#expr))); + } +} + +impl ToTokens for TaskIndexAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_index = self.task_index; + let index = &self.index; + tokens.extend(quote!(#task_index(#index))) + } +} + +impl ToTokens for TaskAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), + TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), + TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), + TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), + } + } +} + +impl ToTokens for PalletTaskAttr { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let pound = self.pound; + let pallet = self.pallet; + let colons = self.colons; + let meta = &self.meta; + tokens.extend(quote!(#pound[#pallet #colons #meta])); + } +} + +impl TryFrom> for TaskIndexAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskIndexAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskConditionAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskConditionAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskWeightAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskWeightAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskListAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )), + } + } +} + +fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> { + let mut duplicate = None; + let mut attr = None; + item_enum.attrs = item_enum + .attrs + .iter() + .filter(|found_attr| { + let segs = found_attr + .path() + .segments + .iter() + .map(|seg| seg.ident.clone()) + .collect::>(); + let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + return true + }; + if seg1 != "pallet" { + return true + } + if attr.is_some() { + duplicate = Some(found_attr.span()); + } + attr = Some(found_attr.to_token_stream()); + false + }) + .cloned() + .collect(); + if let Some(span) = duplicate { + return Err(Error::new(span, "only one `#[pallet::_]` attribute is supported on this item")) + } + Ok(attr) +} + +fn partition_tasks_attrs(item_impl: &ItemImpl) -> (Vec, Vec) { + item_impl.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix), None) = + (path_segs.next(), path_segs.next(), path_segs.next()) + else { + return false + }; + prefix.ident == "pallet" && suffix.ident == "tasks_experimental" + }) +} + +fn partition_task_attrs(item: &ImplItemFn) -> (Vec, Vec) { + item.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { + return false + }; + // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than + // parsing and makes no stack or heap allocations + prefix.ident == "pallet" && + (suffix.ident == "tasks_experimental" || + suffix.ident == "task_list" || + suffix.ident == "task_condition" || + suffix.ident == "task_weight" || + suffix.ident == "task_index") + }) +} + +#[test] +fn test_parse_task_list_() { + parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); + parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); + parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list()])), + "expected an expression" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list])), + "expected parentheses" + ); +} + +#[test] +fn test_parse_task_index() { + parse2::(quote!(#[pallet::task_index(3)])).unwrap(); + parse2::(quote!(#[pallet::task_index(0)])).unwrap(); + parse2::(quote!(#[pallet::task_index(17)])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index])), + "expected parentheses" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index("hey")])), + "expected integer literal" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index(0.3)])), + "expected integer literal" + ); +} + +#[test] +fn test_parse_task_condition() { + parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); +} + +#[test] +fn test_parse_tasks_attr() { + parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::taskss])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pal::tasks])), + "expected `pallet`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_experimental()])), + "unexpected token" + ); +} + +#[test] +fn test_parse_tasks_def_basic() { + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 1); + }); +} + +#[test] +fn test_parse_tasks_def_basic_increment_decrement() { + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Get the value and check if it can be incremented + #[pallet::task_index(0)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value < 255 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn increment() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value >= 255 { + Err(Error::::ValueOverflow.into()) + } else { + let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Incremented { new_val }); + Ok(()) + } + } + + // Get the value and check if it can be decremented + #[pallet::task_index(1)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value > 0 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn decrement() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value == 0 { + Err(Error::::ValueUnderflow.into()) + } else { + let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Decremented { new_val }); + Ok(()) + } + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 2); + }); +} + +#[test] +fn test_parse_tasks_def_duplicate_index() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn bar(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + "duplicate task index `0`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_list() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_list\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_condition() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_index() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_index\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_weight() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_weight\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_list_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_list(SomethingElse::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_condition_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_condition(|i| i % 4 == 0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_index_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_extra_tasks_attribute() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + #[pallet::tasks_experimental] + impl, I: 'static> Pallet {} + }), + r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" + ); + }); +} + +#[test] +fn test_parse_task_enum_def_basic() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_non_task_name() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Something { + Foo + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_missing_attr_allowed() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_missing_attr_alternate_name_allowed() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Foo { + Red, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_wrong_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::something] + pub enum Task { + Increment, + Decrement, + } + }), + "expected `task_enum`" + ); + }); +} + +#[test] +fn test_parse_task_enum_def_wrong_item() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::task_enum] + pub struct Something; + }), + "expected `enum`" + ); + }); +} diff --git a/support/procedural-fork/src/pallet/parse/tests/mod.rs b/support/procedural-fork/src/pallet/parse/tests/mod.rs new file mode 100644 index 000000000..a3661f307 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/tests/mod.rs @@ -0,0 +1,264 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::{panic, sync::Mutex}; +use syn::parse_quote; + +#[doc(hidden)] +pub mod __private { + pub use regex; +} + +/// Allows you to assert that the input expression resolves to an error whose string +/// representation matches the specified regex literal. +/// +/// ## Example: +/// +/// ``` +/// use super::tasks::*; +/// +/// assert_parse_error_matches!( +/// parse2::(quote! { +/// #[pallet::task_enum] +/// pub struct Something; +/// }), +/// "expected `enum`" +/// ); +/// ``` +/// +/// More complex regular expressions are also possible (anything that could pass as a regex for +/// use with the [`regex`] crate.): +/// +/// ```ignore +/// assert_parse_error_matches!( +/// parse2::(quote! { +/// #[pallet::tasks_experimental] +/// impl, I: 'static> Pallet { +/// #[pallet::task_condition(|i| i % 2 == 0)] +/// #[pallet::task_index(0)] +/// pub fn foo(i: u32) -> DispatchResult { +/// Ok(()) +/// } +/// } +/// }), +/// r"missing `#\[pallet::task_list\(\.\.\)\]`" +/// ); +/// ``` +/// +/// Although this is primarily intended to be used with parsing errors, this macro is general +/// enough that it will work with any error with a reasonable [`core::fmt::Display`] impl. +#[macro_export] +macro_rules! assert_parse_error_matches { + ($expr:expr, $reg:literal) => { + match $expr { + Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), + Err(e) => { + let error_message = e.to_string(); + let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) + .expect("Invalid regex pattern"); + assert!( + re.is_match(&error_message), + "Error message \"{}\" does not match the pattern \"{}\"", + error_message, + $reg + ); + }, + } + }; +} + +/// Allows you to assert that an entire pallet parses successfully. A custom syntax is used for +/// specifying arguments so please pay attention to the docs below. +/// +/// The general syntax is: +/// +/// ```ignore +/// assert_pallet_parses! { +/// #[manifest_dir("../../examples/basic")] +/// #[frame_support::pallet] +/// pub mod pallet { +/// #[pallet::config] +/// pub trait Config: frame_system::Config {} +/// +/// #[pallet::pallet] +/// pub struct Pallet(_); +/// } +/// }; +/// ``` +/// +/// The `#[manifest_dir(..)]` attribute _must_ be specified as the _first_ attribute on the +/// pallet module, and should reference the relative (to your current directory) path of a +/// directory containing containing the `Cargo.toml` of a valid pallet. Typically you will only +/// ever need to use the `examples/basic` pallet, but sometimes it might be advantageous to +/// specify a different one that has additional dependencies. +/// +/// The reason this must be specified is that our underlying parsing of pallets depends on +/// reaching out into the file system to look for particular `Cargo.toml` dependencies via the +/// [`generate_access_from_frame_or_crate`] method, so to simulate this properly in a proc +/// macro crate, we need to temporarily convince this function that we are running from the +/// directory of a valid pallet. +#[macro_export] +macro_rules! assert_pallet_parses { + ( + #[manifest_dir($manifest_dir:literal)] + $($tokens:tt)* + ) => { + { + let mut pallet: Option<$crate::pallet::parse::Def> = None; + $crate::pallet::parse::tests::simulate_manifest_dir($manifest_dir, core::panic::AssertUnwindSafe(|| { + pallet = Some($crate::pallet::parse::Def::try_from(syn::parse_quote! { + $($tokens)* + }, false).unwrap()); + })); + pallet.unwrap() + } + } +} + +/// Similar to [`assert_pallet_parses`], except this instead expects the pallet not to parse, +/// and allows you to specify a regex matching the expected parse error. +/// +/// This is identical syntactically to [`assert_pallet_parses`] in every way except there is a +/// second attribute that must be specified immediately after `#[manifest_dir(..)]` which is +/// `#[error_regex(..)]` which should contain a string/regex literal designed to match what you +/// consider to be the correct parsing error we should see when we try to parse this particular +/// pallet. +/// +/// ## Example: +/// +/// ``` +/// assert_pallet_parse_error! { +/// #[manifest_dir("../../examples/basic")] +/// #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] +/// #[frame_support::pallet] +/// pub mod pallet { +/// #[pallet::config] +/// pub trait Config: frame_system::Config {} +/// } +/// } +/// ``` +#[macro_export] +macro_rules! assert_pallet_parse_error { + ( + #[manifest_dir($manifest_dir:literal)] + #[error_regex($reg:literal)] + $($tokens:tt)* + ) => { + $crate::pallet::parse::tests::simulate_manifest_dir($manifest_dir, || { + $crate::assert_parse_error_matches!( + $crate::pallet::parse::Def::try_from( + parse_quote! { + $($tokens)* + }, + false + ), + $reg + ); + }); + } +} + +/// Safely runs the specified `closure` while simulating an alternative `CARGO_MANIFEST_DIR`, +/// restoring `CARGO_MANIFEST_DIR` to its original value upon completion regardless of whether +/// the closure panics. +/// +/// This is useful in tests of `Def::try_from` and other pallet-related methods that internally +/// make use of [`generate_access_from_frame_or_crate`], which is sensitive to entries in the +/// "current" `Cargo.toml` files. +/// +/// This function uses a [`Mutex`] to avoid a race condition created when multiple tests try to +/// modify and then restore the `CARGO_MANIFEST_DIR` ENV var in an overlapping way. +pub fn simulate_manifest_dir, F: FnOnce() + std::panic::UnwindSafe>( + path: P, + closure: F, +) { + use std::{env::*, path::*}; + + /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, + /// avoiding a race condition because `cargo test` runs tests in parallel. + /// + /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with + /// respect to each other, this is still several orders of magnitude faster than using UI + /// tests, even if they are run in parallel. + static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); + + // avoid race condition when swapping out `CARGO_MANIFEST_DIR` + let guard = MANIFEST_DIR_LOCK.lock().unwrap(); + + // obtain the current/original `CARGO_MANIFEST_DIR` + let orig = PathBuf::from( + var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), + ); + + // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir + set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); + + // safely run closure catching any panics + let result = panic::catch_unwind(closure); + + // restore original `CARGO_MANIFEST_DIR` before unwinding + set_var("CARGO_MANIFEST_DIR", &orig); + + // unlock the mutex so we don't poison it if there is a panic + drop(guard); + + // unwind any panics originally encountered when running closure + result.unwrap(); +} + +mod tasks; + +#[test] +fn test_parse_minimal_pallet() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_missing_pallet() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + } + } +} + +#[test] +fn test_parse_pallet_missing_config() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::config\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::pallet] + pub struct Pallet(_); + } + } +} diff --git a/support/procedural-fork/src/pallet/parse/tests/tasks.rs b/support/procedural-fork/src/pallet/parse/tests/tasks.rs new file mode 100644 index 000000000..9f1436284 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/tests/tasks.rs @@ -0,0 +1,240 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::parse_quote; + +#[test] +fn test_parse_pallet_with_task_enum_missing_impl() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum Task { + Something, + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_with_task_enum_wrong_attribute() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("expected one of")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::wrong_attribute] + pub enum Task { + Something, + } + + #[pallet::task_list] + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_missing_task_enum() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::tasks_experimental] + #[cfg(test)] // aha, this means it's being eaten + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_task_list_in_wrong_place() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("can only be used on items within an `impl` statement.")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::task_list] + pub fn something() { + println!("hey"); + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex(".*attribute must be attached to your.*")] + #[frame_support::pallet] + pub mod pallet { + + impl frame_support::traits::Task for Task + where + T: TypeInfo, + { + type Enumeration = sp_std::vec::IntoIter>; + + fn iter() -> Self::Enumeration { + sp_std::vec![Task::increment, Task::decrement].into_iter() + } + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_manual_task_enum_non_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_non_manual_task_enum_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_manual_task_enum_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_manual_task_enum_mismatch_ident() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum WrongIdent { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} diff --git a/support/procedural-fork/src/pallet/parse/type_value.rs b/support/procedural-fork/src/pallet/parse/type_value.rs new file mode 100644 index 000000000..4d9db30b3 --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/type_value.rs @@ -0,0 +1,123 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// Definition of type value. Just a function which is expanded to a struct implementing `Get`. +pub struct TypeValueDef { + /// The index of error item in pallet module. + pub index: usize, + /// Visibility of the struct to generate. + pub vis: syn::Visibility, + /// Ident of the struct to generate. + pub ident: syn::Ident, + /// The type return by Get. + pub type_: Box, + /// The block returning the value to get + pub block: Box, + /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// The where clause of the function. + pub where_clause: Option, + /// The span of the pallet::type_value attribute. + pub attr_span: proc_macro2::Span, + /// Docs on the item. + pub docs: Vec, +} + +impl TypeValueDef { + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Fn(item) = item { + item + } else { + let msg = "Invalid pallet::type_value, expected item fn"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let mut docs = vec![]; + for attr in &item.attrs { + if let syn::Meta::NameValue(meta) = &attr.meta { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + docs.push(meta.value.clone()); + continue + } + } + + let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ + allowed"; + return Err(syn::Error::new(attr.span(), msg)) + } + + if let Some(span) = item + .sig + .constness + .as_ref() + .map(|t| t.span()) + .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) + .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) + .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) + .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) + { + let msg = "Invalid pallet::type_value, unexpected token"; + return Err(syn::Error::new(span, msg)) + } + + if !item.sig.inputs.is_empty() { + let msg = "Invalid pallet::type_value, unexpected argument"; + return Err(syn::Error::new(item.sig.inputs[0].span(), msg)) + } + + let vis = item.vis.clone(); + let ident = item.sig.ident.clone(); + let block = item.block.clone(); + let type_ = match item.sig.output.clone() { + syn::ReturnType::Type(_, type_) => type_, + syn::ReturnType::Default => { + let msg = "Invalid pallet::type_value, expected return type"; + return Err(syn::Error::new(item.sig.span(), msg)) + }, + }; + + let mut instances = vec![]; + if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { + instances.push(usage); + } + + let is_generic = item.sig.generics.type_params().count() > 0; + let where_clause = item.sig.generics.where_clause.clone(); + + Ok(TypeValueDef { + attr_span, + index, + is_generic, + vis, + ident, + block, + type_, + instances, + where_clause, + docs, + }) + } +} diff --git a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs new file mode 100644 index 000000000..2bf0a1b6c --- /dev/null +++ b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs @@ -0,0 +1,62 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::helper; +use syn::spanned::Spanned; + +/// The definition of the pallet validate unsigned implementation. +pub struct ValidateUnsignedDef { + /// The index of validate unsigned item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, +} + +impl ValidateUnsignedDef { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::validate_unsigned, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ValidateUnsigned" { + let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; + return Err(syn::Error::new(last.span(), msg)) + } + } else { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + Ok(ValidateUnsignedDef { index, instances }) + } +} diff --git a/support/procedural-fork/src/pallet_error.rs b/support/procedural-fork/src/pallet_error.rs new file mode 100644 index 000000000..693a1e982 --- /dev/null +++ b/support/procedural-fork/src/pallet_error.rs @@ -0,0 +1,178 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use quote::ToTokens; + +// Derive `PalletError` +pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let syn::DeriveInput { ident: name, generics, data, .. } = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let frame_support = match generate_access_from_frame_or_crate("frame-support") { + Ok(c) => c, + Err(e) => return e.into_compile_error().into(), + }; + let frame_support = &frame_support; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + let max_encoded_size = match data { + syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | + syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { + let maybe_field_tys = fields + .iter() + .map(|f| generate_field_types(f, &frame_support)) + .collect::>>(); + let field_tys = match maybe_field_tys { + Ok(tys) => tys.into_iter().flatten(), + Err(e) => return e.into_compile_error().into(), + }; + quote::quote! { + 0_usize + #( + .saturating_add(< + #field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE) + )* + } + }, + syn::Fields::Unit => quote::quote!(0), + }, + syn::Data::Enum(syn::DataEnum { variants, .. }) => { + let field_tys = variants + .iter() + .map(|variant| generate_variant_field_types(variant, &frame_support)) + .collect::>>, syn::Error>>(); + + let field_tys = match field_tys { + Ok(tys) => tys.into_iter().flatten().collect::>(), + Err(e) => return e.to_compile_error().into(), + }; + + // We start with `1`, because the discriminant of an enum is stored as u8 + if field_tys.is_empty() { + quote::quote!(1) + } else { + let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { + quote::quote! { + 1_usize + #(.saturating_add(< + #variant_field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE))* + } + }); + + quote::quote! {{ + let mut size = 1_usize; + let mut tmp = 0_usize; + #( + tmp = #variant_sizes; + size = if tmp > size { tmp } else { size }; + tmp = 0_usize; + )* + size + }} + } + }, + syn::Data::Union(syn::DataUnion { union_token, .. }) => { + let msg = "Cannot derive `PalletError` for union; please implement it directly"; + return syn::Error::new(union_token.span, msg).into_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + impl #impl_generics #frame_support::traits::PalletError + for #name #ty_generics #where_clause + { + const MAX_ENCODED_SIZE: usize = #max_encoded_size; + } + }; + ) + .into() +} + +fn generate_field_types( + field: &syn::Field, + scrate: &syn::Path, +) -> syn::Result> { + let attrs = &field.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut res = None; + + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("skip") { + res = Some(None); + } else if meta.path.is_ident("compact") { + let field_ty = &field.ty; + res = Some(Some(quote::quote!(#scrate::__private::codec::Compact<#field_ty>))); + } else if meta.path.is_ident("compact") { + res = Some(Some(meta.value()?.parse()?)); + } + + Ok(()) + })?; + + if let Some(v) = res { + return Ok(v) + } + } + } + + Ok(Some(field.ty.to_token_stream())) +} + +fn generate_variant_field_types( + variant: &syn::Variant, + scrate: &syn::Path, +) -> syn::Result>> { + let attrs = &variant.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut skip = false; + + // We ignore the error intentionally as this isn't `codec(skip)` when + // `parse_nested_meta` fails. + let _ = attr.parse_nested_meta(|meta| { + skip = meta.path.is_ident("skip"); + Ok(()) + }); + + if skip { + return Ok(None) + } + } + } + + match &variant.fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | + syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { + let field_tys = fields + .iter() + .map(|field| generate_field_types(field, scrate)) + .collect::>>()?; + Ok(Some(field_tys.into_iter().flatten().collect())) + }, + syn::Fields::Unit => Ok(None), + } +} diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs new file mode 100644 index 000000000..93c88fce9 --- /dev/null +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -0,0 +1,320 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::parse::runtime_types::RuntimeType; +use crate::{ + construct_runtime::{ + check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, + decl_static_assertions, expand, + }, + runtime::{ + parse::{ + AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, + }, + Def, + }, +}; +use cfg_expr::Predicate; +use frame_support_procedural_tools::{ + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, +}; +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; +use std::collections::HashSet; +use syn::{Ident, Result}; + +/// The fixed name of the system pallet. +const SYSTEM_PALLET_NAME: &str = "System"; + +pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { + let input = def.input; + + let (check_pallet_number_res, res) = match def.pallets { + AllPalletsDeclaration::Implicit(ref decl) => ( + check_pallet_number(input.clone(), decl.pallet_count), + construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), + ), + AllPalletsDeclaration::Explicit(ref decl) => ( + check_pallet_number(input, decl.pallets.len()), + construct_runtime_final_expansion( + def.runtime_struct.ident.clone(), + decl.clone(), + def.runtime_types.clone(), + legacy_ordering, + ), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("FRAME_EXPAND").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() +} + +fn construct_runtime_implicit_to_explicit( + input: TokenStream2, + definition: ImplicitAllPalletsDeclaration, + legacy_ordering: bool, +) -> Result { + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let attr = if legacy_ordering { quote!((legacy_ordering)) } else { quote!() }; + let mut expansion = quote::quote!( + #[frame_support::runtime #attr] + #input + ); + for pallet in definition.pallet_decls.iter() { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts_v2 }] + frame_support = [{ #frame_support }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name = #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) +} + +fn construct_runtime_final_expansion( + name: Ident, + definition: ExplicitAllPalletsDeclaration, + runtime_types: Vec, + legacy_ordering: bool, +) -> Result { + let ExplicitAllPalletsDeclaration { mut pallets, name: pallets_name } = definition; + + if !legacy_ordering { + // Ensure that order of hooks is based on the pallet index + pallets.sort_by_key(|p| p.index); + } + + let system_pallet = + pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { + syn::Error::new( + pallets_name.span(), + "`System` pallet declaration is missing. \ + Please add this line: `pub type System = frame_system;`", + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )) + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let mut dispatch = None; + let mut outer_event = None; + let mut outer_error = None; + let mut outer_origin = None; + let mut freeze_reason = None; + let mut hold_reason = None; + let mut slash_reason = None; + let mut lock_id = None; + let mut task = None; + + for runtime_type in runtime_types.iter() { + match runtime_type { + RuntimeType::RuntimeCall(_) => { + dispatch = + Some(expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate)); + }, + RuntimeType::RuntimeEvent(_) => { + outer_event = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Event, + )?); + }, + RuntimeType::RuntimeError(_) => { + outer_error = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Error, + )?); + }, + RuntimeType::RuntimeOrigin(_) => { + outer_origin = + Some(expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?); + }, + RuntimeType::RuntimeFreezeReason(_) => { + freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeHoldReason(_) => { + hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeSlashReason(_) => { + slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeLockId(_) => { + lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); + }, + RuntimeType::RuntimeTask(_) => { + task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); + }, + } + } + + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let res = quote!( + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `runtime` may be used without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` + // is used. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} + + #outer_event + + #outer_error + + #outer_origin + + #all_pallets + + #pallet_to_index + + #dispatch + + #task + + #metadata + + #outer_config + + #inherent + + #validate_unsigned + + #freeze_reason + + #hold_reason + + #lock_id + + #slash_reason + + #integrity_test + + #static_assertions + ); + + Ok(res) +} diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs new file mode 100644 index 000000000..aaae579eb --- /dev/null +++ b/support/procedural-fork/src/runtime/mod.rs @@ -0,0 +1,236 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of `runtime`. +//! +//! `runtime` implementation is recursive and can generate code which will call itself +//! in order to get all the pallet parts for each pallet. +//! +//! Pallets can define their parts: +//! - Implicitly: `pub type System = frame_system;` +//! - Explicitly: `pub type System = frame_system + Pallet + Call;` +//! +//! The `runtime` transitions from the implicit definition to the explicit one. +//! From the explicit state, Substrate expands the pallets with additional information +//! that is to be included in the runtime metadata. +//! +//! Pallets must provide the `tt_default_parts_v2` macro for these transitions. +//! These are automatically implemented by the `#[pallet::pallet]` macro. +//! +//! This macro also generates the following enums for ease of decoding if the respective type +//! is defined inside `#[runtime::derive]`: +//! - `enum RuntimeCall`: This type contains the information needed to decode extrinsics. +//! - `enum RuntimeEvent`: This type contains the information needed to decode events. +//! - `enum RuntimeError`: While this cannot be used directly to decode `sp_runtime::DispatchError` +//! from the chain, it contains the information needed to decode the +//! `sp_runtime::DispatchError::Module`. +//! +//! # State Transitions +//! +//! ```ignore +//! +----------+ +//! | Implicit | +//! +----------+ +//! | +//! v +//! +----------+ +//! | Explicit | +//! +----------+ +//! ``` +//! +//! The `runtime` macro transforms the implicit declaration of each pallet +//! `System: frame_system` to an explicit one `System: frame_system + Pallet + Call` using the +//! `tt_default_parts_v2` macro. +//! +//! The `tt_default_parts_v2` macro exposes a plus separated list of pallet parts. For example, the +//! `Event` part is exposed only if the pallet implements an event via `#[pallet::event]` macro. +//! The tokens generated by this macro are `+ Pallet + Call` for our example. +//! +//! The `match_and_insert` macro takes in 3 arguments: +//! - target: This is the `TokenStream` that contains the `runtime` macro. +//! - pattern: The pattern to match against in the target stream. +//! - tokens: The tokens to added after the pattern match. +//! +//! The `runtime` macro uses the `tt_call` to get the default pallet parts via +//! the `tt_default_parts_v2` macro defined by each pallet. The pallet parts are then returned as +//! input to the `match_and_replace` macro. +//! The `match_and_replace` then will modify the `runtime` to expand the implicit +//! definition to the explicit one. +//! +//! For example, +//! +//! ```ignore +//! #[frame_support::runtime] +//! mod runtime { +//! //... +//! +//! #[runtime::pallet_index(0)] +//! pub type System = frame_system; // Implicit definition of parts +//! +//! #[runtime::pallet_index(1)] +//! pub type Balances = pallet_balances; // Implicit definition of parts +//! } +//! ``` +//! This call has some implicit pallet parts, thus it will expand to: +//! ```ignore +//! frame_support::__private::tt_call! { +//! macro = [{ pallet_balances::tt_default_parts_v2 }] +//! ~~> frame_support::match_and_insert! { +//! target = [{ +//! frame_support::__private::tt_call! { +//! macro = [{ frame_system::tt_default_parts_v2 }] +//! ~~> frame_support::match_and_insert! { +//! target = [{ +//! #[frame_support::runtime] +//! mod runtime { +//! //... +//! +//! #[runtime::pallet_index(0)] +//! pub type System = frame_system; +//! +//! #[runtime::pallet_index(1)] +//! pub type Balances = pallet_balances; +//! } +//! }] +//! pattern = [{ System = frame_system }] +//! } +//! } +//! }] +//! pattern = [{ Balances = pallet_balances }] +//! } +//! } +//! ``` +//! `tt_default_parts_v2` must be defined. It returns the pallet parts inside some tokens, and +//! then `tt_call` will pipe the returned pallet parts into the input of `match_and_insert`. +//! Thus `match_and_insert` will initially receive the following inputs: +//! ```ignore +//! frame_support::match_and_insert! { +//! target = [{ +//! frame_support::match_and_insert! { +//! target = [{ +//! #[frame_support::runtime] +//! mod runtime { +//! //... +//! +//! #[runtime::pallet_index(0)] +//! pub type System = frame_system; +//! +//! #[runtime::pallet_index(1)] +//! pub type Balances = pallet_balances; +//! } +//! }] +//! pattern = [{ System = frame_system }] +//! tokens = [{ ::{+ Pallet + Call} }] +//! } +//! }] +//! pattern = [{ Balances = pallet_balances }] +//! tokens = [{ ::{+ Pallet + Call} }] +//! } +//! ``` +//! After dealing with `pallet_balances`, the inner `match_and_insert` will expand to: +//! ```ignore +//! frame_support::match_and_insert! { +//! target = [{ +//! #[frame_support::runtime] +//! mod runtime { +//! //... +//! +//! #[runtime::pallet_index(0)] +//! pub type System = frame_system; // Implicit definition of parts +//! +//! #[runtime::pallet_index(1)] +//! pub type Balances = pallet_balances + Pallet + Call; // Explicit definition of parts +//! } +//! }] +//! pattern = [{ System = frame_system }] +//! tokens = [{ ::{+ Pallet + Call} }] +//! } +//! ``` +//! +//! Which will then finally expand to the following: +//! ```ignore +//! #[frame_support::runtime] +//! mod runtime { +//! //... +//! +//! #[runtime::pallet_index(0)] +//! pub type System = frame_system + Pallet + Call; +//! +//! #[runtime::pallet_index(1)] +//! pub type Balances = pallet_balances + Pallet + Call; +//! } +//! ``` +//! +//! This call has no implicit pallet parts, thus it will expand to the runtime construction: +//! ```ignore +//! pub struct Runtime { ... } +//! pub struct Call { ... } +//! impl Call ... +//! pub enum Origin { ... } +//! ... +//! ``` +//! +//! Visualizing the entire flow of `#[frame_support::runtime]`, it would look like the following: +//! +//! ```ignore +//! +----------------------+ +------------------------+ +-------------------+ +//! | | | (defined in pallet) | | | +//! | runtime | --> | tt_default_parts_v2! | --> | match_and_insert! | +//! | w/ no pallet parts | | | | | +//! +----------------------+ +------------------------+ +-------------------+ +//! +//! +----------------------+ +//! | | +//! --> | runtime | +//! | w/ pallet parts | +//! +----------------------+ +//! ``` + +#![cfg(feature = "experimental")] + +pub use parse::Def; +use proc_macro::TokenStream; +use syn::spanned::Spanned; + +mod expand; +mod parse; + +mod keyword { + syn::custom_keyword!(legacy_ordering); +} + +pub fn runtime(attr: TokenStream, tokens: TokenStream) -> TokenStream { + let mut legacy_ordering = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + legacy_ordering = true; + } else { + let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ + bare, such as `#[frame_support::runtime]` or `#[runtime]`, or must specify the \ + `legacy_ordering` attribute, such as `#[frame_support::runtime(legacy_ordering)]` or \ + #[runtime(legacy_ordering)]."; + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into() + } + } + + let item = syn::parse_macro_input!(tokens as syn::ItemMod); + match parse::Def::try_from(item) { + Ok(def) => expand::expand(def, legacy_ordering).into(), + Err(e) => e.to_compile_error().into(), + } +} diff --git a/support/procedural-fork/src/runtime/parse/helper.rs b/support/procedural-fork/src/runtime/parse/helper.rs new file mode 100644 index 000000000..f05395f9b --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/helper.rs @@ -0,0 +1,37 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::pallet::parse::helper::MutItemAttrs; +use quote::ToTokens; + +pub(crate) fn take_first_item_runtime_attr( + item: &mut impl MutItemAttrs, +) -> syn::Result> +where + Attr: syn::parse::Parse, +{ + let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path().segments.first().map_or(false, |segment| segment.ident == "runtime") + }) { + let runtime_attr = attrs.remove(index); + Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) + } else { + Ok(None) + } +} diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs new file mode 100644 index 000000000..893cb4726 --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -0,0 +1,266 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +pub mod helper; +pub mod pallet; +pub mod pallet_decl; +pub mod runtime_struct; +pub mod runtime_types; + +use crate::construct_runtime::parse::Pallet; +use pallet_decl::PalletDeclaration; +use proc_macro2::TokenStream as TokenStream2; +use quote::ToTokens; +use std::collections::HashMap; +use syn::{spanned::Spanned, Ident, Token}; + +use frame_support_procedural_tools::syn_ext as ext; +use runtime_types::RuntimeType; + +mod keyword { + use syn::custom_keyword; + + custom_keyword!(runtime); + custom_keyword!(derive); + custom_keyword!(pallet_index); + custom_keyword!(disable_call); + custom_keyword!(disable_unsigned); +} + +enum RuntimeAttr { + Runtime(proc_macro2::Span), + Derive(proc_macro2::Span, Vec), + PalletIndex(proc_macro2::Span, u8), + DisableCall(proc_macro2::Span), + DisableUnsigned(proc_macro2::Span), +} + +impl RuntimeAttr { + fn span(&self) -> proc_macro2::Span { + match self { + Self::Runtime(span) => *span, + Self::Derive(span, _) => *span, + Self::PalletIndex(span, _) => *span, + Self::DisableCall(span) => *span, + Self::DisableUnsigned(span) => *span, + } + } +} + +impl syn::parse::Parse for RuntimeAttr { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::runtime) { + Ok(RuntimeAttr::Runtime(content.parse::()?.span())) + } else if lookahead.peek(keyword::derive) { + let _ = content.parse::(); + let derive_content; + syn::parenthesized!(derive_content in content); + let runtime_types = + derive_content.parse::>()?; + let runtime_types = runtime_types.inner.into_iter().collect(); + Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) + } else if lookahead.peek(keyword::pallet_index) { + let _ = content.parse::(); + let pallet_index_content; + syn::parenthesized!(pallet_index_content in content); + let pallet_index = pallet_index_content.parse::()?; + if !pallet_index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(pallet_index.span(), msg)) + } + Ok(RuntimeAttr::PalletIndex(pallet_index.span(), pallet_index.base10_parse()?)) + } else if lookahead.peek(keyword::disable_call) { + Ok(RuntimeAttr::DisableCall(content.parse::()?.span())) + } else if lookahead.peek(keyword::disable_unsigned) { + Ok(RuntimeAttr::DisableUnsigned(content.parse::()?.span())) + } else { + Err(lookahead.error()) + } + } +} + +#[derive(Debug, Clone)] +pub enum AllPalletsDeclaration { + Implicit(ImplicitAllPalletsDeclaration), + Explicit(ExplicitAllPalletsDeclaration), +} + +/// Declaration of a runtime with some pallet with implicit declaration of parts. +#[derive(Debug, Clone)] +pub struct ImplicitAllPalletsDeclaration { + pub name: Ident, + pub pallet_decls: Vec, + pub pallet_count: usize, +} + +/// Declaration of a runtime with all pallet having explicit declaration of parts. +#[derive(Debug, Clone)] +pub struct ExplicitAllPalletsDeclaration { + pub name: Ident, + pub pallets: Vec, +} + +pub struct Def { + pub input: TokenStream2, + pub item: syn::ItemMod, + pub runtime_struct: runtime_struct::RuntimeStructDef, + pub pallets: AllPalletsDeclaration, + pub runtime_types: Vec, +} + +impl Def { + pub fn try_from(mut item: syn::ItemMod) -> syn::Result { + let input: TokenStream2 = item.to_token_stream().into(); + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid runtime definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; + + let mut runtime_struct = None; + let mut runtime_types = None; + + let mut indices = HashMap::new(); + let mut names = HashMap::new(); + + let mut pallet_decls = vec![]; + let mut pallets = vec![]; + + for item in items.iter_mut() { + let mut pallet_item = None; + let mut pallet_index = 0; + + let mut disable_call = false; + let mut disable_unsigned = false; + + while let Some(runtime_attr) = + helper::take_first_item_runtime_attr::(item)? + { + match runtime_attr { + RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { + let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; + runtime_struct = Some(p); + }, + RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { + runtime_types = Some(types); + }, + RuntimeAttr::PalletIndex(span, index) => { + pallet_index = index; + pallet_item = if let syn::Item::Type(item) = item { + Some(item.clone()) + } else { + let msg = "Invalid runtime::pallet_index, expected type definition"; + return Err(syn::Error::new(span, msg)) + }; + }, + RuntimeAttr::DisableCall(_) => disable_call = true, + RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, + attr => { + let msg = "Invalid duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)) + }, + } + } + + if let Some(pallet_item) = pallet_item { + match *pallet_item.ty.clone() { + syn::Type::Path(ref path) => { + let pallet_decl = + PalletDeclaration::try_from(item.span(), &pallet_item, path)?; + + if let Some(used_pallet) = + names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) + { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); + return Err(err) + } + + pallet_decls.push(pallet_decl); + }, + syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { + let pallet = Pallet::try_from( + item.span(), + &pallet_item, + pallet_index, + disable_call, + disable_unsigned, + &bounds, + )?; + + if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) + { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, pallet.index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err) + } + + pallets.push(pallet); + }, + _ => continue, + } + } + } + + let name = item.ident.clone(); + let decl_count = pallet_decls.len(); + let pallets = if decl_count > 0 { + AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { + name, + pallet_decls, + pallet_count: decl_count.saturating_add(pallets.len()), + }) + } else { + AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) + }; + + let def = Def { + input, + item, + runtime_struct: runtime_struct.ok_or_else(|| { + syn::Error::new(item_span, + "Missing Runtime. Please add a struct inside the module and annotate it with `#[runtime::runtime]`" + ) + })?, + pallets, + runtime_types: runtime_types.ok_or_else(|| { + syn::Error::new(item_span, + "Missing Runtime Types. Please annotate the runtime struct with `#[runtime::derive]`" + ) + })?, + }; + + Ok(def) + } +} diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs new file mode 100644 index 000000000..d2f1857fb --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -0,0 +1,99 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::construct_runtime::parse::{Pallet, PalletPart, PalletPartKeyword, PalletPath}; +use quote::ToTokens; +use syn::{punctuated::Punctuated, spanned::Spanned, token, Error, Ident, PathArguments}; + +impl Pallet { + pub fn try_from( + attr_span: proc_macro2::Span, + item: &syn::ItemType, + pallet_index: u8, + disable_call: bool, + disable_unsigned: bool, + bounds: &Punctuated, + ) -> syn::Result { + let name = item.ident.clone(); + + let mut pallet_path = None; + let mut pallet_parts = vec![]; + + for (index, bound) in bounds.into_iter().enumerate() { + if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { + if index == 0 { + pallet_path = Some(PalletPath { inner: path.clone() }); + } else { + let pallet_part = syn::parse2::(bound.into_token_stream())?; + pallet_parts.push(pallet_part); + } + } else { + return Err(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + )) + }; + } + + let mut path = pallet_path.ok_or(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + ))?; + + let mut instance = None; + if let Some(segment) = path.inner.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = + Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); + segment.arguments = PathArguments::None; + } + } + } + + pallet_parts = pallet_parts + .into_iter() + .filter(|part| { + if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { + false + } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = + (disable_unsigned, &part.keyword) + { + false + } else { + true + } + }) + .collect(); + + let cfg_pattern = vec![]; + + Ok(Pallet { + is_expanded: true, + name, + index: pallet_index, + path, + instance, + cfg_pattern, + pallet_parts, + }) + } +} diff --git a/support/procedural-fork/src/runtime/parse/pallet_decl.rs b/support/procedural-fork/src/runtime/parse/pallet_decl.rs new file mode 100644 index 000000000..437a163cf --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/pallet_decl.rs @@ -0,0 +1,60 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use quote::ToTokens; +use syn::{spanned::Spanned, Attribute, Ident, PathArguments}; + +/// The declaration of a pallet. +#[derive(Debug, Clone)] +pub struct PalletDeclaration { + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: syn::Path, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, +} + +impl PalletDeclaration { + pub fn try_from( + _attr_span: proc_macro2::Span, + item: &syn::ItemType, + path: &syn::TypePath, + ) -> syn::Result { + let name = item.ident.clone(); + + let mut path = path.path.clone(); + + let mut instance = None; + if let Some(segment) = path.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = + Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); + segment.arguments = PathArguments::None; + } + } + } + + Ok(Self { name, path, instance, attrs: item.attrs.clone() }) + } +} diff --git a/support/procedural-fork/src/runtime/parse/runtime_struct.rs b/support/procedural-fork/src/runtime/parse/runtime_struct.rs new file mode 100644 index 000000000..8fa746ee8 --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/runtime_struct.rs @@ -0,0 +1,35 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::spanned::Spanned; +pub struct RuntimeStructDef { + pub ident: syn::Ident, + pub attr_span: proc_macro2::Span, +} + +impl RuntimeStructDef { + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid runtime::runtime, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)) + }; + + Ok(Self { ident: item.ident.clone(), attr_span }) + } +} diff --git a/support/procedural-fork/src/runtime/parse/runtime_types.rs b/support/procedural-fork/src/runtime/parse/runtime_types.rs new file mode 100644 index 000000000..a4480e2a1 --- /dev/null +++ b/support/procedural-fork/src/runtime/parse/runtime_types.rs @@ -0,0 +1,76 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::{ + parse::{Parse, ParseStream}, + Result, +}; + +mod keyword { + use syn::custom_keyword; + + custom_keyword!(RuntimeCall); + custom_keyword!(RuntimeEvent); + custom_keyword!(RuntimeError); + custom_keyword!(RuntimeOrigin); + custom_keyword!(RuntimeFreezeReason); + custom_keyword!(RuntimeHoldReason); + custom_keyword!(RuntimeSlashReason); + custom_keyword!(RuntimeLockId); + custom_keyword!(RuntimeTask); +} + +#[derive(Debug, Clone, PartialEq)] +pub enum RuntimeType { + RuntimeCall(keyword::RuntimeCall), + RuntimeEvent(keyword::RuntimeEvent), + RuntimeError(keyword::RuntimeError), + RuntimeOrigin(keyword::RuntimeOrigin), + RuntimeFreezeReason(keyword::RuntimeFreezeReason), + RuntimeHoldReason(keyword::RuntimeHoldReason), + RuntimeSlashReason(keyword::RuntimeSlashReason), + RuntimeLockId(keyword::RuntimeLockId), + RuntimeTask(keyword::RuntimeTask), +} + +impl Parse for RuntimeType { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(keyword::RuntimeCall) { + Ok(Self::RuntimeCall(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeEvent) { + Ok(Self::RuntimeEvent(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeError) { + Ok(Self::RuntimeError(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeOrigin) { + Ok(Self::RuntimeOrigin(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeFreezeReason) { + Ok(Self::RuntimeFreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeHoldReason) { + Ok(Self::RuntimeHoldReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeSlashReason) { + Ok(Self::RuntimeSlashReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeLockId) { + Ok(Self::RuntimeLockId(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeTask) { + Ok(Self::RuntimeTask(input.parse()?)) + } else { + Err(lookahead.error()) + } + } +} diff --git a/support/procedural-fork/src/storage_alias.rs b/support/procedural-fork/src/storage_alias.rs new file mode 100644 index 000000000..06f62768f --- /dev/null +++ b/support/procedural-fork/src/storage_alias.rs @@ -0,0 +1,676 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of the `storage_alias` attribute macro. + +use crate::{counter_prefix, pallet::parse::helper}; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro2::{Span, TokenStream}; +use quote::{quote, ToTokens}; +use syn::{ + parenthesized, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, + visit::Visit, + Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, +}; + +/// Extension trait for [`Type`]. +trait TypeExt { + fn get_ident(&self) -> Option<&Ident>; + fn contains_ident(&self, ident: &Ident) -> bool; +} + +impl TypeExt for Type { + fn get_ident(&self) -> Option<&Ident> { + match self { + Type::Path(p) => match &p.qself { + Some(qself) => qself.ty.get_ident(), + None => p.path.get_ident(), + }, + _ => None, + } + } + + fn contains_ident(&self, ident: &Ident) -> bool { + struct ContainsIdent<'a> { + ident: &'a Ident, + found: bool, + } + impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { + fn visit_ident(&mut self, i: &'ast Ident) { + if i == self.ident { + self.found = true; + } + } + } + + let mut visitor = ContainsIdent { ident, found: false }; + syn::visit::visit_type(&mut visitor, self); + visitor.found + } +} + +/// Represents generics which only support [`TypeParam`] separated by commas. +struct SimpleGenerics { + lt_token: Token![<], + params: Punctuated, + gt_token: Token![>], +} + +impl SimpleGenerics { + /// Returns the generics for types declarations etc. + fn type_generics(&self) -> impl Iterator { + self.params.iter().map(|p| &p.ident) + } + + /// Returns the generics for the `impl` block. + fn impl_generics(&self) -> impl Iterator { + self.params.iter() + } +} + +impl Parse for SimpleGenerics { + fn parse(input: ParseStream<'_>) -> Result { + Ok(Self { + lt_token: input.parse()?, + params: Punctuated::parse_separated_nonempty(input)?, + gt_token: input.parse()?, + }) + } +} + +impl ToTokens for SimpleGenerics { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lt_token.to_tokens(tokens); + self.params.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } +} + +mod storage_types { + syn::custom_keyword!(StorageValue); + syn::custom_keyword!(StorageMap); + syn::custom_keyword!(CountedStorageMap); + syn::custom_keyword!(StorageDoubleMap); + syn::custom_keyword!(StorageNMap); +} + +/// The types of prefixes the storage alias macro supports. +mod prefix_types { + // Use the verbatim/unmodified input name as the prefix. + syn::custom_keyword!(verbatim); + // The input type is a pallet and its pallet name should be used as the prefix. + syn::custom_keyword!(pallet_name); + // The input type implements `Get<'static str>` and this `str` should be used as the prefix. + syn::custom_keyword!(dynamic); +} + +/// The supported storage types +enum StorageType { + Value { + _kw: storage_types::StorageValue, + _lt_token: Token![<], + prefix: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + Map { + _kw: storage_types::StorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + CountedMap { + _kw: storage_types::CountedStorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + DoubleMap { + _kw: storage_types::StorageDoubleMap, + _lt_token: Token![<], + prefix: Type, + _hasher1_comma: Token![,], + hasher1_ty: Type, + _key1_comma: Token![,], + key1_ty: Type, + _hasher2_comma: Token![,], + hasher2_ty: Type, + _key2_comma: Token![,], + key2_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + NMap { + _kw: storage_types::StorageNMap, + _lt_token: Token![<], + prefix: Type, + _paren_comma: Token![,], + _paren_token: token::Paren, + key_types: Punctuated, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, +} + +impl StorageType { + /// Generate the actual type declaration. + fn generate_type_declaration( + &self, + crate_: &syn::Path, + storage_instance: &StorageInstance, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + visibility: &Visibility, + attributes: &[Attribute], + ) -> TokenStream { + let storage_instance_generics = &storage_instance.generics; + let storage_instance = &storage_instance.name; + let attributes = attributes.iter(); + let storage_generics = storage_generics.map(|g| { + let generics = g.type_generics(); + + quote!( < #( #generics ),* > ) + }); + + match self { + Self::Value { value_ty, query_type, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< + #storage_instance #storage_instance_generics, + #value_ty + #query_type + >; + } + }, + Self::CountedMap { value_ty, query_type, hasher_ty, key_ty, .. } | + Self::Map { value_ty, query_type, hasher_ty, key_ty, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let map_type = Ident::new( + match self { + Self::Map { .. } => "StorageMap", + _ => "CountedStorageMap", + }, + Span::call_site(), + ); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< + #storage_instance #storage_instance_generics, + #hasher_ty, + #key_ty, + #value_ty + #query_type + >; + } + }, + Self::DoubleMap { + value_ty, + query_type, + hasher1_ty, + key1_ty, + hasher2_ty, + key2_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< + #storage_instance #storage_instance_generics, + #hasher1_ty, + #key1_ty, + #hasher2_ty, + #key2_ty, + #value_ty + #query_type + >; + } + }, + Self::NMap { value_ty, query_type, key_types, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let key_types = key_types.iter(); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< + #storage_instance #storage_instance_generics, + ( #( #key_types ),* ), + #value_ty + #query_type + >; + } + }, + } + } + + /// The prefix for this storage type. + fn prefix(&self) -> &Type { + match self { + Self::Value { prefix, .. } | + Self::Map { prefix, .. } | + Self::CountedMap { prefix, .. } | + Self::NMap { prefix, .. } | + Self::DoubleMap { prefix, .. } => prefix, + } + } +} + +impl Parse for StorageType { + fn parse(input: ParseStream<'_>) -> Result { + let lookahead = input.lookahead1(); + + let parse_query_type = |input: ParseStream<'_>| -> Result> { + if input.peek(Token![,]) && !input.peek2(Token![>]) { + Ok(Some((input.parse()?, input.parse()?))) + } else { + Ok(None) + } + }; + + if lookahead.peek(storage_types::StorageValue) { + Ok(Self::Value { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageMap) { + Ok(Self::Map { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::CountedStorageMap) { + Ok(Self::CountedMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageDoubleMap) { + Ok(Self::DoubleMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher1_comma: input.parse()?, + hasher1_ty: input.parse()?, + _key1_comma: input.parse()?, + key1_ty: input.parse()?, + _hasher2_comma: input.parse()?, + hasher2_ty: input.parse()?, + _key2_comma: input.parse()?, + key2_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageNMap) { + let content; + Ok(Self::NMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _paren_comma: input.parse()?, + _paren_token: parenthesized!(content in input), + key_types: Punctuated::parse_terminated(&content)?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else { + Err(lookahead.error()) + } + } +} + +/// The input expected by this macro. +struct Input { + attributes: Vec, + visibility: Visibility, + _type: Token![type], + storage_name: Ident, + storage_generics: Option, + where_clause: Option, + _equal: Token![=], + storage_type: StorageType, + _semicolon: Token![;], +} + +impl Parse for Input { + fn parse(input: ParseStream<'_>) -> Result { + let attributes = input.call(Attribute::parse_outer)?; + let visibility = input.parse()?; + let _type = input.parse()?; + let storage_name = input.parse()?; + + let lookahead = input.lookahead1(); + let storage_generics = if lookahead.peek(Token![<]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()) + }; + + let lookahead = input.lookahead1(); + let where_clause = if lookahead.peek(Token![where]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()) + }; + + let _equal = input.parse()?; + + let storage_type = input.parse()?; + + let _semicolon = input.parse()?; + + Ok(Self { + attributes, + visibility, + _type, + storage_name, + storage_generics, + _equal, + storage_type, + where_clause, + _semicolon, + }) + } +} + +/// Defines which type of prefix the storage alias is using. +#[derive(Clone, Copy)] +enum PrefixType { + /// An appropriate prefix will be determined automatically. + /// + /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. + /// Otherwise use the verbatim passed name as prefix. + Compatibility, + /// The provided ident/name will be used as the prefix. + Verbatim, + /// The provided type will be used to determine the prefix. This type must + /// implement `PalletInfoAccess` which specifies the proper name. This + /// name is then used as the prefix. + PalletName, + /// Uses the provided type implementing `Get<'static str>` to determine the prefix. + Dynamic, +} + +/// Implementation of the `storage_alias` attribute macro. +pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> Result { + let input = syn::parse2::(input)?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + + let prefix_type = if attributes.is_empty() { + PrefixType::Compatibility + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Verbatim + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::PalletName + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Dynamic + } else { + return Err(Error::new(attributes.span(), "Unknown attributes")) + }; + + let storage_instance = generate_storage_instance( + &crate_, + &input.storage_name, + input.storage_generics.as_ref(), + input.where_clause.as_ref(), + input.storage_type.prefix(), + &input.visibility, + matches!(input.storage_type, StorageType::CountedMap { .. }), + prefix_type, + )?; + + let definition = input.storage_type.generate_type_declaration( + &crate_, + &storage_instance, + &input.storage_name, + input.storage_generics.as_ref(), + &input.visibility, + &input.attributes, + ); + + let storage_instance_code = storage_instance.code; + + Ok(quote! { + #storage_instance_code + + #definition + }) +} + +/// The storage instance to use for the storage alias. +struct StorageInstance { + name: Ident, + generics: TokenStream, + code: TokenStream, +} + +/// Generate the [`StorageInstance`] for the storage alias. +fn generate_storage_instance( + crate_: &syn::Path, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + storage_where_clause: Option<&WhereClause>, + prefix: &Type, + visibility: &Visibility, + is_counted_map: bool, + prefix_type: PrefixType, +) -> Result { + if let Type::Infer(_) = prefix { + return Err(Error::new(prefix.span(), "`_` is not allowed as prefix by `storage_alias`.")) + } + + let impl_generics_used_by_prefix = storage_generics + .as_ref() + .map(|g| { + g.impl_generics() + .filter(|g| prefix.contains_ident(&g.ident)) + .collect::>() + }) + .unwrap_or_default(); + + let (pallet_prefix, impl_generics, type_generics) = match prefix_type { + PrefixType::Compatibility => + if !impl_generics_used_by_prefix.is_empty() { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + < #prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } else if let Some(prefix) = prefix.get_ident() { + let prefix_str = prefix.to_string(); + + (quote!(#prefix_str), quote!(), quote!()) + } else { + return Err(Error::new_spanned( + prefix, + "If there are no generics, the prefix is only allowed to be an identifier.", + )) + }, + PrefixType::Verbatim => { + let prefix_str = match prefix.get_ident() { + Some(p) => p.to_string(), + None => + return Err(Error::new_spanned( + prefix, + "Prefix type `verbatim` requires that the prefix is an ident.", + )), + }; + + (quote!(#prefix_str), quote!(), quote!()) + }, + PrefixType::PalletName => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + }, + PrefixType::Dynamic => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::Get<_>>::get() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + }, + }; + + let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); + + let name_str = format!("{}_Storage_Instance", storage_name); + let name = Ident::new(&name_str, Span::call_site()); + let storage_name_str = storage_name.to_string(); + + let counter_code = is_counted_map.then(|| { + let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); + let counter_storage_name_str = counter_prefix(&storage_name_str); + let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); + + quote! { + #visibility struct #counter_name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #counter_name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #counter_storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance + for #name< #type_generics > #where_clause + { + type CounterPrefix = #counter_name < #type_generics >; + } + } + }); + + let storage_prefix_hash = helper::two128_str(&storage_name_str); + + // Implement `StorageInstance` trait. + let code = quote! { + #[allow(non_camel_case_types)] + #visibility struct #name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + #counter_code + }; + + Ok(StorageInstance { name, code, generics: quote!( < #type_generics > ) }) +} diff --git a/support/procedural-fork/src/transactional.rs b/support/procedural-fork/src/transactional.rs new file mode 100644 index 000000000..e9d4f84b7 --- /dev/null +++ b/support/procedural-fork/src/transactional.rs @@ -0,0 +1,60 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro::TokenStream; +use quote::quote; +use syn::{ItemFn, Result}; + +pub fn transactional(_attr: TokenStream, input: TokenStream) -> Result { + let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + use #crate_::storage::{with_transaction, TransactionOutcome}; + with_transaction(|| { + let r = (|| { #block })(); + if r.is_ok() { + TransactionOutcome::Commit(r) + } else { + TransactionOutcome::Rollback(r) + } + }) + } + }; + + Ok(output.into()) +} + +pub fn require_transactional(_attr: TokenStream, input: TokenStream) -> Result { + let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + if !#crate_::storage::transactional::is_transactional() { + return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); + } + #block + } + }; + + Ok(output.into()) +} diff --git a/support/procedural-fork/src/tt_macro.rs b/support/procedural-fork/src/tt_macro.rs new file mode 100644 index 000000000..d37127421 --- /dev/null +++ b/support/procedural-fork/src/tt_macro.rs @@ -0,0 +1,105 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of the `create_tt_return_macro` macro + +use crate::COUNTER; +use proc_macro2::{Ident, TokenStream}; +use quote::format_ident; + +struct CreateTtReturnMacroDef { + name: Ident, + args: Vec<(Ident, TokenStream)>, +} + +impl syn::parse::Parse for CreateTtReturnMacroDef { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let name = input.parse()?; + let _ = input.parse::()?; + + let mut args = Vec::new(); + while !input.is_empty() { + let mut value; + let key: Ident = input.parse()?; + let _ = input.parse::()?; + let _: syn::token::Bracket = syn::bracketed!(value in input); + let _: syn::token::Brace = syn::braced!(value in value); + let value: TokenStream = value.parse()?; + + args.push((key, value)) + } + + Ok(Self { name, args }) + } +} + +/// A proc macro that accepts a name and any number of key-value pairs, to be used to create a +/// declarative macro that follows tt-call conventions and simply calls +/// [`tt_call::tt_return`], accepting an optional `frame-support` argument and returning +/// the key-value pairs that were supplied to the proc macro. +/// +/// # Example +/// ```ignore +/// __create_tt_macro! { +/// my_tt_macro, +/// foo = [{ bar }] +/// } +/// +/// // Creates the following declarative macro: +/// +/// macro_rules! my_tt_macro { +/// { +/// $caller:tt +/// $(your_tt_return = [{ $my_tt_return:path }])? +/// } => { +/// $my_tt_return! { +/// $caller +/// foo = [{ bar }] +/// } +/// } +/// } +/// ``` +pub fn create_tt_return_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let CreateTtReturnMacroDef { name, args } = + syn::parse_macro_input!(input as CreateTtReturnMacroDef); + + let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let unique_name = format_ident!("{}_{}", name, count); + + let decl_macro = quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #unique_name { + { + $caller:tt + $(your_tt_return = [{ $my_tt_macro:path }])? + } => { + $my_tt_return! { + $caller + #( + #keys = [{ #values }] + )* + } + } + } + + pub use #unique_name as #name; + }; + + decl_macro.into() +} From dd5c84b4e6b0a7757115cef8cafbbc91cc296df6 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:02:43 -0400 Subject: [PATCH 066/213] fix warnings --- support/procedural-fork/src/lib.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 08ce0a73c..efc3ee6a7 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -19,12 +19,7 @@ mod storage_alias; mod transactional; mod tt_macro; -use frame_support_procedural_tools::generate_access_from_frame_or_crate; -use macro_magic::{import_tokens_attr, import_tokens_attr_verbatim}; -use proc_macro::TokenStream; -use quote::{quote, ToTokens}; use std::{cell::RefCell, str::FromStr}; -use syn::{parse_macro_input, Error, ItemImpl, ItemMod, TraitItemType}; pub(crate) const INHERENT_INSTANCE_NAME: &str = "__InherentHiddenInstance"; From 15fedec861fc0ee1a1b330b4120498f743e2e194 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:19:44 -0400 Subject: [PATCH 067/213] publicly export everything --- support/procedural-fork/src/lib.rs | 92 ++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index efc3ee6a7..5292c7834 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -60,3 +60,95 @@ fn get_cargo_env_var(version_env: &str) -> std::result::Result String { format!("CounterFor{}", prefix) } + +pub mod exports { + pub mod benchmark { + pub use crate::benchmark::*; + } + + pub mod crate_version { + pub use crate::crate_version::*; + } + + pub mod derive_impl { + pub use crate::derive_impl::*; + } + + pub mod dummy_part_checker { + pub use crate::dummy_part_checker::*; + } + + pub mod dynamic_params { + pub use crate::dynamic_params::*; + } + + pub mod key_prefix { + pub use crate::key_prefix::*; + } + + pub mod match_and_insert { + pub use crate::match_and_insert::*; + } + + pub mod pallet_error { + pub use crate::pallet_error::*; + } + + pub mod storage_alias { + pub use crate::storage_alias::*; + } + + pub mod transactional { + pub use crate::transactional::*; + } + + pub mod tt_macro { + pub use crate::tt_macro::*; + } + + pub mod construct_runtime { + pub use crate::construct_runtime::*; + + pub mod parse { + pub use crate::construct_runtime::parse::*; + } + + pub mod expand { + pub use crate::construct_runtime::expand::*; + } + } + + pub mod no_bound { + pub mod clone { + pub use crate::no_bound::clone::*; + } + + pub mod debug { + pub use crate::no_bound::debug::*; + } + + pub mod default { + pub use crate::no_bound::default::*; + } + + pub mod ord { + pub use crate::no_bound::ord::*; + } + + pub mod partial_eq { + pub use crate::no_bound::partial_eq::*; + } + + pub mod partial_ord { + pub use crate::no_bound::partial_ord::*; + } + } + + pub mod pallet { + pub use crate::pallet::*; + + pub mod parse { + pub use crate::pallet::parse::*; + } + } +} From 472337733ddc930ed004cedb2b76ff10ff489f30 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:27:26 -0400 Subject: [PATCH 068/213] add docs --- support/procedural-fork/src/lib.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 5292c7834..cce0c65fa 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -1,3 +1,13 @@ +//! This crate is a fork of the `frame-support-procedural` crate from +//! `substrate/frame/support/procedural` in `polkadot-sdk`. The purpose of this fork is to +//! re-export all parsing code from the original crate to make it accessible to other crates, +//! since the original crate is a `proc-macro` crate and therefore cannot have any non-macro +//! public exports. If Parity ever decides to move the parsing code to a separate crate, this +//! fork will no longer need to exist. +//! +//! Tags will be created for each major version of `polkadot-sdk` that `subtensor` relies on, +//! on an as-needed, ad-hoc basis, and versions will matched the corresponding `polkadot-sdk` +//! version/tag name. #![recursion_limit = "512"] #![deny(rustdoc::broken_intra_doc_links)] From aa57e9c32e221439b3f1c709dca662a84ec012dc Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:29:01 -0400 Subject: [PATCH 069/213] tweak --- support/procedural-fork/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index cce0c65fa..2ac076636 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -3,7 +3,8 @@ //! re-export all parsing code from the original crate to make it accessible to other crates, //! since the original crate is a `proc-macro` crate and therefore cannot have any non-macro //! public exports. If Parity ever decides to move the parsing code to a separate crate, this -//! fork will no longer need to exist. +//! fork will no longer need to exist, but right now this is the only reliable way to get +//! access to the core parsing logic of substrate. //! //! Tags will be created for each major version of `polkadot-sdk` that `subtensor` relies on, //! on an as-needed, ad-hoc basis, and versions will matched the corresponding `polkadot-sdk` From 6122efa8e968bcea7452b425a6ac9bde86b885b4 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:42:39 -0400 Subject: [PATCH 070/213] new lint using procedural-fork --- support/linting/src/pallet_index.rs | 209 ++++++---------------------- 1 file changed, 44 insertions(+), 165 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index b373a04b2..80e5e234d 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -1,12 +1,6 @@ use super::*; -use quote::ToTokens; -use syn::braced; -use syn::parse::{Parse, ParseStream}; -use syn::punctuated::Punctuated; -use syn::spanned::Spanned; -use syn::token::Colon; -use syn::visit::Visit; -use syn::{File, Ident, ItemMacro, Path, Token, Visibility}; +use procedural_fork::exports::construct_runtime::parse::RuntimeDeclaration; +use syn::{visit::Visit, File}; pub struct RequireExplicitPalletIndex; @@ -29,133 +23,42 @@ struct ConstructRuntimeVisitor { errors: Vec, } -impl<'ast> Visit<'ast> for ConstructRuntimeVisitor { - fn visit_item_macro(&mut self, node: &'ast ItemMacro) { +impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { + fn visit_item_macro(&mut self, node: &'ast syn::ItemMacro) { if node.mac.path.is_ident("construct_runtime") { - // Token stream parsing logic let tokens = node.mac.tokens.clone(); - println!("Parsing construct_runtime! tokens: {}", tokens.to_string()); - let result = syn::parse2::(tokens); - if let Ok(runtime_entries) = result { - for entry in runtime_entries.entries { - // Check if the entry is missing an explicit index - if entry.index.is_none() { - self.errors.push(syn::Error::new( - entry.pallet_name.span(), - format!( - "Pallet `{}` does not have an explicit index in construct_runtime!", - entry.pallet_name.to_token_stream().to_string().trim() - ), - )); + // Attempt to parse the construct_runtime invocation. + match syn::parse2::(tokens) { + Ok(runtime_decl) => { + if let RuntimeDeclaration::Explicit(runtime) = runtime_decl { + for pallet in runtime.pallets { + if pallet.index.is_none() { + self.errors.push(syn::Error::new( + pallet.name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + pallet.name.to_token_stream() + ), + )); + } + } } } - } else { - // Log error - println!("Failed to parse construct_runtime! block: {:?}", result); - self.errors.push(result.unwrap_err()); + Err(e) => self.errors.push(e), } } - // Continue visiting the rest of the file syn::visit::visit_item_macro(self, node); } } -#[derive(Debug)] -struct ConstructRuntimeEntries { - entries: Punctuated, -} - -impl Parse for ConstructRuntimeEntries { - fn parse(input: ParseStream) -> syn::Result { - let entries = input.parse_terminated(PalletEntry::parse, Token![,])?; - Ok(ConstructRuntimeEntries { entries }) - } -} - -#[derive(Debug)] -struct PalletEntry { - visibility: Option, - pallet_name: Path, - components: Option, - index: Option, -} - -impl Parse for PalletEntry { - fn parse(input: ParseStream) -> syn::Result { - // Optionally parse visibility (e.g., `pub`) - let visibility: Option = input.parse().ok(); - - // Parse the pallet name (handling complex paths with generics and nested components) - let pallet_name = parse_complex_pallet_path(input)?; - - // Optionally parse the components in `{ Pallet, Call, Storage }` - let components = if input.peek(syn::token::Brace) { - let content; - braced!(content in input); - Some(content.parse::()?) - } else { - None - }; - - // Optionally parse the index if it's present - let index = if input.peek(Colon) { - input.parse::()?; - Some(input.parse::()?) - } else { - None - }; - - Ok(PalletEntry { - visibility, - pallet_name, - components, - index, - }) - } -} - -fn parse_complex_pallet_path(input: ParseStream) -> syn::Result { - // Parse the base path (e.g., `pallet_collective`) - let path = input.parse::()?; - - // If there are generics like `::`, handle them - if input.peek(syn::token::Lt) { - let _generics: syn::AngleBracketedGenericArguments = input.parse()?; - } - - // Now handle nested components like `{ Pallet, Call, Storage }` - if input.peek(syn::token::Brace) { - let content; - braced!(content in input); - let components: Punctuated = - content.parse_terminated(Ident::parse, Token![,])?; - println!("Parsed components: {:?}", components); - } - - Ok(path) -} - -#[derive(Debug)] -struct PalletComponents { - components: Punctuated, -} - -impl Parse for PalletComponents { - fn parse(input: ParseStream) -> syn::Result { - Ok(PalletComponents { - components: input.parse_terminated(Ident::parse, Token![,])?, - }) - } -} - #[cfg(test)] mod tests { use super::*; - fn lint_macro(input: &str) -> Result { - let item_macro: ItemMacro = syn::parse_str(input).expect("should only use on a macro"); + fn lint_macro(input: &str) -> Result<()> { + let item_macro: syn::ItemMacro = syn::parse_str(input).expect("should only use on a macro"); let mut visitor = ConstructRuntimeVisitor::default(); visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { @@ -208,52 +111,6 @@ mod tests { lint_macro(input).unwrap(); } - #[test] - fn test_with_generic_and_index() { - let input = r#" - construct_runtime!( - PalletA, - pallet_collective::::{ Pallet, Call, Storage }: 1 - ); - "#; - lint_macro(input).unwrap(); - } - - #[test] - fn test_with_nested_and_missing_index() { - let input = r#" - construct_runtime!( - PalletA, - pallet_collective::::{ Pallet, Call, Storage } - ); - "#; - lint_macro(input).unwrap_err(); - } - - #[test] - fn test_complex_construct_runtime_enum_should_fail() { - // This test should fail because there are no explicit indices for the pallets - let input = r#" - construct_runtime! { - pub enum Test { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, - Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, - Utility: pallet_utility::{Pallet, Call, Storage, Event}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, - Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, - } - } - "#; - - // This should fail because there are no explicit indices - lint_macro(input).unwrap_err(); - } - #[test] fn test_complex_construct_runtime_struct() { let input = r#" @@ -286,4 +143,26 @@ mod tests { lint_macro(input).unwrap(); } + + #[test] + fn test_complex_construct_runtime_enum_should_fail() { + let input = r#" + construct_runtime! { + pub enum Test { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, + Utility: pallet_utility::{Pallet, Call, Storage, Event}, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, + } + } + "#; + + lint_macro(input).unwrap_err(); + } } From 523d469eb143355677e98b5055cecd42d83e87f1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:56:15 -0400 Subject: [PATCH 071/213] passing :tada: :tada: :boom: !!! --- support/linting/src/pallet_index.rs | 58 +++++++++++++---------------- 1 file changed, 26 insertions(+), 32 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 80e5e234d..fe84aaea2 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -1,5 +1,6 @@ use super::*; use procedural_fork::exports::construct_runtime::parse::RuntimeDeclaration; +use quote::ToTokens; use syn::{visit::Visit, File}; pub struct RequireExplicitPalletIndex; @@ -33,7 +34,7 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { Ok(runtime_decl) => { if let RuntimeDeclaration::Explicit(runtime) = runtime_decl { for pallet in runtime.pallets { - if pallet.index.is_none() { + if pallet.index == 0 { self.errors.push(syn::Error::new( pallet.name.span(), format!( @@ -57,7 +58,7 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { mod tests { use super::*; - fn lint_macro(input: &str) -> Result<()> { + fn lint_macro(input: &str) -> Result { let item_macro: syn::ItemMacro = syn::parse_str(input).expect("should only use on a macro"); let mut visitor = ConstructRuntimeVisitor::default(); visitor.visit_item_macro(&item_macro); @@ -67,50 +68,43 @@ mod tests { Ok(()) } + // Corrected test cases + #[test] fn test_no_pallet_index() { + // Updated with valid `construct_runtime!` syntax let input = r#" - construct_runtime!( - PalletA, - PalletB - ); + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + PalletA, + PalletB + } + } "#; lint_macro(input).unwrap_err(); } - #[test] - fn test_with_pallet_index() { - let input = r#" - construct_runtime!( - PalletA: 0, - PalletB: 1 - ); - "#; - lint_macro(input).unwrap(); - } - #[test] fn test_mixed_pallet_index() { let input = r#" - construct_runtime!( - PalletA, - PalletB: 1 - ); + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + PalletA, + PalletB: 1 + } + } "#; lint_macro(input).unwrap_err(); } - #[test] - fn test_with_visibility_and_index() { - let input = r#" - construct_runtime!( - pub PalletA: 0, - PalletB: 1 - ); - "#; - lint_macro(input).unwrap(); - } - #[test] fn test_complex_construct_runtime_struct() { let input = r#" From 160c7b21c8dc6e9b47132aed95fcb30162da4bcb Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:57:21 -0400 Subject: [PATCH 072/213] more test examples --- support/linting/src/pallet_index.rs | 85 ++++++++++++++++++++++++++++- 1 file changed, 84 insertions(+), 1 deletion(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index fe84aaea2..084f3be6c 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -72,7 +72,6 @@ mod tests { #[test] fn test_no_pallet_index() { - // Updated with valid `construct_runtime!` syntax let input = r#" construct_runtime! { pub enum Test where @@ -159,4 +158,88 @@ mod tests { lint_macro(input).unwrap_err(); } + + #[test] + fn test_with_multiple_instances() { + let input = r#" + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + Instance1: pallet_collective::::{Pallet, Call, Storage} = 1, + Instance2: pallet_collective::::{Pallet, Call, Storage} = 2, + Balances: pallet_balances = 3 + } + } + "#; + lint_macro(input).unwrap(); + } + + #[test] + fn test_missing_pallet_parts() { + let input = r#" + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + PalletA = 0, + PalletB + } + } + "#; + lint_macro(input).unwrap_err(); + } + + #[test] + fn test_with_expanded_pallet() { + let input = r#" + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + ExpandedPallet: pallet_balances expanded::{Pallet, Call, Storage} = 1, + RegularPallet: pallet_sudo = 2 + } + } + "#; + lint_macro(input).unwrap(); + } + + #[test] + fn test_with_no_pallets() { + let input = r#" + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + } + } + "#; + lint_macro(input).unwrap(); + } + + #[test] + fn test_with_pallet_alias() { + let input = r#" + construct_runtime! { + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + MyAlias: pallet_balances = 1, + OtherAlias: pallet_timestamp = 2 + } + } + "#; + lint_macro(input).unwrap(); + } } From 6b46371525a80fe47c74be7efd33b7ad9e6460bc Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 16:59:19 -0400 Subject: [PATCH 073/213] refactor to use quote! directly --- support/linting/src/pallet_index.rs | 141 ++++++++++++++-------------- 1 file changed, 69 insertions(+), 72 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 084f3be6c..31e9c0af3 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -57,9 +57,10 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { #[cfg(test)] mod tests { use super::*; + use quote::quote; - fn lint_macro(input: &str) -> Result { - let item_macro: syn::ItemMacro = syn::parse_str(input).expect("should only use on a macro"); + fn lint_macro(tokens: proc_macro2::TokenStream) -> Result { + let item_macro: syn::ItemMacro = syn::parse2(tokens).expect("should only use on a macro"); let mut visitor = ConstructRuntimeVisitor::default(); visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { @@ -68,11 +69,9 @@ mod tests { Ok(()) } - // Corrected test cases - #[test] fn test_no_pallet_index() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -83,13 +82,13 @@ mod tests { PalletB } } - "#; - lint_macro(input).unwrap_err(); + }; + lint_macro(tokens).unwrap_err(); } #[test] fn test_mixed_pallet_index() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -100,68 +99,66 @@ mod tests { PalletB: 1 } } - "#; - lint_macro(input).unwrap_err(); + }; + lint_macro(tokens).unwrap_err(); } #[test] fn test_complex_construct_runtime_struct() { - let input = r#" - construct_runtime! { - pub struct Runtime { - System : frame_system = 0, - RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, - Timestamp : pallet_timestamp = 2, - Aura : pallet_aura = 3, - Grandpa : pallet_grandpa = 4, - Balances : pallet_balances = 5, - TransactionPayment : pallet_transaction_payment = 6, - SubtensorModule : pallet_subtensor = 7, - Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, - TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, - SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, - Utility : pallet_utility = 11, - Sudo : pallet_sudo = 12, - Multisig : pallet_multisig = 13, - Preimage : pallet_preimage = 14, - Scheduler : pallet_scheduler = 15, - Proxy : pallet_proxy = 16, - Registry : pallet_registry = 17, - Commitments : pallet_commitments = 18, - AdminUtils : pallet_admin_utils = 19, - SafeMode : pallet_safe_mode = 20 - } - } - "#; - - lint_macro(input).unwrap(); + let tokens = quote! { + construct_runtime! { + pub struct Runtime { + System : frame_system = 0, + RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, + Timestamp : pallet_timestamp = 2, + Aura : pallet_aura = 3, + Grandpa : pallet_grandpa = 4, + Balances : pallet_balances = 5, + TransactionPayment : pallet_transaction_payment = 6, + SubtensorModule : pallet_subtensor = 7, + Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, + TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, + SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, + Utility : pallet_utility = 11, + Sudo : pallet_sudo = 12, + Multisig : pallet_multisig = 13, + Preimage : pallet_preimage = 14, + Scheduler : pallet_scheduler = 15, + Proxy : pallet_proxy = 16, + Registry : pallet_registry = 17, + Commitments : pallet_commitments = 18, + AdminUtils : pallet_admin_utils = 19, + SafeMode : pallet_safe_mode = 20 + } + } + }; + lint_macro(tokens).unwrap(); } #[test] fn test_complex_construct_runtime_enum_should_fail() { - let input = r#" - construct_runtime! { - pub enum Test { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, - Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, - Utility: pallet_utility::{Pallet, Call, Storage, Event}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, - Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, - } - } - "#; - - lint_macro(input).unwrap_err(); + let tokens = quote! { + construct_runtime! { + pub enum Test { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, + Utility: pallet_utility::{Pallet, Call, Storage, Event}, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, + } + } + }; + lint_macro(tokens).unwrap_err(); } #[test] fn test_with_multiple_instances() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -173,13 +170,13 @@ mod tests { Balances: pallet_balances = 3 } } - "#; - lint_macro(input).unwrap(); + }; + lint_macro(tokens).unwrap(); } #[test] fn test_missing_pallet_parts() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -190,13 +187,13 @@ mod tests { PalletB } } - "#; - lint_macro(input).unwrap_err(); + }; + lint_macro(tokens).unwrap_err(); } #[test] fn test_with_expanded_pallet() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -207,13 +204,13 @@ mod tests { RegularPallet: pallet_sudo = 2 } } - "#; - lint_macro(input).unwrap(); + }; + lint_macro(tokens).unwrap(); } #[test] fn test_with_no_pallets() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -222,13 +219,13 @@ mod tests { { } } - "#; - lint_macro(input).unwrap(); + }; + lint_macro(tokens).unwrap(); } #[test] fn test_with_pallet_alias() { - let input = r#" + let tokens = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -239,7 +236,7 @@ mod tests { OtherAlias: pallet_timestamp = 2 } } - "#; - lint_macro(input).unwrap(); + }; + lint_macro(tokens).unwrap(); } } From ff1cb0de0140616976813bed541115a699ea7ead Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 17:02:08 -0400 Subject: [PATCH 074/213] use explicit unwrap for testing purposes --- support/linting/src/pallet_index.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 31e9c0af3..a97f1a95a 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -60,7 +60,7 @@ mod tests { use quote::quote; fn lint_macro(tokens: proc_macro2::TokenStream) -> Result { - let item_macro: syn::ItemMacro = syn::parse2(tokens).expect("should only use on a macro"); + let item_macro: syn::ItemMacro = syn::parse2(tokens).unwrap(); let mut visitor = ConstructRuntimeVisitor::default(); visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { From ed529c7e7e636e030623bb09f344803e09a1dbe0 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 17:14:52 -0400 Subject: [PATCH 075/213] handle implicit --- support/linting/src/pallet_index.rs | 227 +++++++++++----------------- 1 file changed, 92 insertions(+), 135 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index a97f1a95a..9f9b49094 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -8,7 +8,6 @@ pub struct RequireExplicitPalletIndex; impl Lint for RequireExplicitPalletIndex { fn lint(source: &File) -> Result { let mut visitor = ConstructRuntimeVisitor::default(); - visitor.visit_file(source); if !visitor.errors.is_empty() { @@ -32,18 +31,35 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { // Attempt to parse the construct_runtime invocation. match syn::parse2::(tokens) { Ok(runtime_decl) => { - if let RuntimeDeclaration::Explicit(runtime) = runtime_decl { - for pallet in runtime.pallets { - if pallet.index == 0 { - self.errors.push(syn::Error::new( - pallet.name.span(), - format!( - "Pallet `{}` does not have an explicit index in construct_runtime!", - pallet.name.to_token_stream() - ), - )); + match runtime_decl { + RuntimeDeclaration::Explicit(runtime) => { + for pallet in runtime.pallets { + if pallet.index == 0 { + self.errors.push(syn::Error::new( + pallet.name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + pallet.name.to_token_stream() + ), + )); + } + } + } + RuntimeDeclaration::Implicit(runtime) => { + for pallet in runtime.pallets { + // Check if the index is missing (implicit declaration) + if pallet.index.is_none() { + self.errors.push(syn::Error::new( + pallet.name.span(), + format!( + "Pallet `{}` does not have an explicit index in the implicit construct_runtime!", + pallet.name.to_token_stream() + ), + )); + } } } + _ => {} } } Err(e) => self.errors.push(e), @@ -59,8 +75,8 @@ mod tests { use super::*; use quote::quote; - fn lint_macro(tokens: proc_macro2::TokenStream) -> Result { - let item_macro: syn::ItemMacro = syn::parse2(tokens).unwrap(); + fn lint_macro(input: proc_macro2::TokenStream) -> Result { + let item_macro: syn::ItemMacro = syn::parse2(input).unwrap(); let mut visitor = ConstructRuntimeVisitor::default(); visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { @@ -69,9 +85,12 @@ mod tests { Ok(()) } + // Corrected test cases + #[test] fn test_no_pallet_index() { - let tokens = quote! { + // Updated with valid `construct_runtime!` syntax + let input = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -83,12 +102,12 @@ mod tests { } } }; - lint_macro(tokens).unwrap_err(); + lint_macro(input).unwrap_err(); } #[test] fn test_mixed_pallet_index() { - let tokens = quote! { + let input = quote! { construct_runtime! { pub enum Test where Block = Block, @@ -100,143 +119,81 @@ mod tests { } } }; - lint_macro(tokens).unwrap_err(); + lint_macro(input).unwrap_err(); } #[test] fn test_complex_construct_runtime_struct() { - let tokens = quote! { - construct_runtime! { - pub struct Runtime { - System : frame_system = 0, - RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, - Timestamp : pallet_timestamp = 2, - Aura : pallet_aura = 3, - Grandpa : pallet_grandpa = 4, - Balances : pallet_balances = 5, - TransactionPayment : pallet_transaction_payment = 6, - SubtensorModule : pallet_subtensor = 7, - Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, - TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, - SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, - Utility : pallet_utility = 11, - Sudo : pallet_sudo = 12, - Multisig : pallet_multisig = 13, - Preimage : pallet_preimage = 14, - Scheduler : pallet_scheduler = 15, - Proxy : pallet_proxy = 16, - Registry : pallet_registry = 17, - Commitments : pallet_commitments = 18, - AdminUtils : pallet_admin_utils = 19, - SafeMode : pallet_safe_mode = 20 - } + let input = quote! { + construct_runtime! { + pub struct Runtime { + System : frame_system = 0, + RandomnessCollectiveFlip : pallet_insecure_randomness_collective_flip = 1, + Timestamp : pallet_timestamp = 2, + Aura : pallet_aura = 3, + Grandpa : pallet_grandpa = 4, + Balances : pallet_balances = 5, + TransactionPayment : pallet_transaction_payment = 6, + SubtensorModule : pallet_subtensor = 7, + Triumvirate : pallet_collective::::{ Pallet, Call, Storage, Origin, Event, Config } = 8, + TriumvirateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 9, + SenateMembers : pallet_membership::::{ Pallet, Call, Storage, Event, Config } = 10, + Utility : pallet_utility = 11, + Sudo : pallet_sudo = 12, + Multisig : pallet_multisig = 13, + Preimage : pallet_preimage = 14, + Scheduler : pallet_scheduler = 15, + Proxy : pallet_proxy = 16, + Registry : pallet_registry = 17, + Commitments : pallet_commitments = 18, + AdminUtils : pallet_admin_utils = 19, + SafeMode : pallet_safe_mode = 20 } + } }; - lint_macro(tokens).unwrap(); - } - #[test] - fn test_complex_construct_runtime_enum_should_fail() { - let tokens = quote! { - construct_runtime! { - pub enum Test { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, - Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, - Utility: pallet_utility::{Pallet, Call, Storage, Event}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, - Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, - } - } - }; - lint_macro(tokens).unwrap_err(); + lint_macro(input).unwrap(); } #[test] - fn test_with_multiple_instances() { - let tokens = quote! { - construct_runtime! { - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - Instance1: pallet_collective::::{Pallet, Call, Storage} = 1, - Instance2: pallet_collective::::{Pallet, Call, Storage} = 2, - Balances: pallet_balances = 3 - } - } - }; - lint_macro(tokens).unwrap(); - } - - #[test] - fn test_missing_pallet_parts() { - let tokens = quote! { - construct_runtime! { - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - PalletA = 0, - PalletB - } + fn test_complex_construct_runtime_enum_should_fail() { + let input = quote! { + construct_runtime! { + pub enum Test { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, + Utility: pallet_utility::{Pallet, Call, Storage, Event}, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, } + } }; - lint_macro(tokens).unwrap_err(); - } - #[test] - fn test_with_expanded_pallet() { - let tokens = quote! { - construct_runtime! { - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - ExpandedPallet: pallet_balances expanded::{Pallet, Call, Storage} = 1, - RegularPallet: pallet_sudo = 2 - } - } - }; - lint_macro(tokens).unwrap(); + lint_macro(input).unwrap_err(); } + // New test for implicit construct_runtime #[test] - fn test_with_no_pallets() { - let tokens = quote! { - construct_runtime! { - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - } + fn test_implicit_construct_runtime_should_fail() { + let input = quote! { + construct_runtime! { + pub struct Runtime { + System: frame_system = 0, + RandomnessCollectiveFlip: pallet_insecure_randomness_collective_flip = 1, + Timestamp: pallet_timestamp, + Aura: pallet_aura, + Grandpa: pallet_grandpa, + Balances: pallet_balances, + TransactionPayment: pallet_transaction_payment } + } }; - lint_macro(tokens).unwrap(); - } - #[test] - fn test_with_pallet_alias() { - let tokens = quote! { - construct_runtime! { - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - MyAlias: pallet_balances = 1, - OtherAlias: pallet_timestamp = 2 - } - } - }; - lint_macro(tokens).unwrap(); + lint_macro(input).unwrap_err(); } } From edbaf387e457396c689359ccf6e07d58913f6e4f Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 17:22:27 -0400 Subject: [PATCH 076/213] fully handling all construct_runtime variants --- support/linting/src/pallet_index.rs | 68 ++++++++++++++++++++++++----- 1 file changed, 56 insertions(+), 12 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 9f9b49094..00f3c5ea3 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -33,17 +33,10 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { Ok(runtime_decl) => { match runtime_decl { RuntimeDeclaration::Explicit(runtime) => { - for pallet in runtime.pallets { - if pallet.index == 0 { - self.errors.push(syn::Error::new( - pallet.name.span(), - format!( - "Pallet `{}` does not have an explicit index in construct_runtime!", - pallet.name.to_token_stream() - ), - )); - } - } + self.check_pallets_for_index(&runtime.pallets); + } + RuntimeDeclaration::ExplicitExpanded(runtime) => { + self.check_pallets_for_index(&runtime.pallets); } RuntimeDeclaration::Implicit(runtime) => { for pallet in runtime.pallets { @@ -59,7 +52,6 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { } } } - _ => {} } } Err(e) => self.errors.push(e), @@ -70,6 +62,25 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { } } +impl ConstructRuntimeVisitor { + fn check_pallets_for_index( + &mut self, + pallets: &[procedural_fork::exports::construct_runtime::parse::Pallet], + ) { + for pallet in pallets { + if pallet.index == 0 { + self.errors.push(syn::Error::new( + pallet.name.span(), + format!( + "Pallet `{}` does not have an explicit index in construct_runtime!", + pallet.name.to_token_stream() + ), + )); + } + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -196,4 +207,37 @@ mod tests { lint_macro(input).unwrap_err(); } + + // Test for explicit expanded case that should pass + #[test] + fn test_explicit_expanded_runtime_with_correct_index_should_pass() { + let input = quote! { + construct_runtime! { + pub struct Runtime { + System : frame_system = 0, + Balances : pallet_balances = 1, + ExpandedPallet: pallet_collective::{ Pallet, Call, Config, Storage, Event } = 2 + } + } + }; + + lint_macro(input).unwrap(); + } + + // Test for explicit expanded case that should fail + #[test] + fn test_explicit_expanded_runtime_with_missing_index_should_fail() { + let input = quote! { + construct_runtime! { + pub struct Runtime { + System : frame_system = 0, + Balances : pallet_balances = 1, + ExpandedPallet: pallet_collective::{ Pallet, Call, Config, Storage, Event }, + FaultyPallet: pallet_sudo + } + } + }; + + lint_macro(input).unwrap_err(); + } } From b19854e159795a4a4b0bba24bbc85fe3cbc8494a Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 17:29:16 -0400 Subject: [PATCH 077/213] fully working, need to fix existing construct_runtime!s now --- support/linting/src/pallet_index.rs | 60 ++++++++++++++++++----------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 00f3c5ea3..1513bc8bf 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -25,35 +25,38 @@ struct ConstructRuntimeVisitor { impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { fn visit_item_macro(&mut self, node: &'ast syn::ItemMacro) { - if node.mac.path.is_ident("construct_runtime") { + let is_construct_runtime = node + .mac + .path + .segments + .last() + .map_or(false, |segment| segment.ident == "construct_runtime"); + + if is_construct_runtime { let tokens = node.mac.tokens.clone(); - // Attempt to parse the construct_runtime invocation. match syn::parse2::(tokens) { - Ok(runtime_decl) => { - match runtime_decl { - RuntimeDeclaration::Explicit(runtime) => { - self.check_pallets_for_index(&runtime.pallets); - } - RuntimeDeclaration::ExplicitExpanded(runtime) => { - self.check_pallets_for_index(&runtime.pallets); - } - RuntimeDeclaration::Implicit(runtime) => { - for pallet in runtime.pallets { - // Check if the index is missing (implicit declaration) - if pallet.index.is_none() { - self.errors.push(syn::Error::new( + Ok(runtime_decl) => match runtime_decl { + RuntimeDeclaration::Explicit(runtime) => { + self.check_pallets_for_index(&runtime.pallets); + } + RuntimeDeclaration::ExplicitExpanded(runtime) => { + self.check_pallets_for_index(&runtime.pallets); + } + RuntimeDeclaration::Implicit(runtime) => { + for pallet in runtime.pallets { + if pallet.index.is_none() { + self.errors.push(syn::Error::new( pallet.name.span(), format!( "Pallet `{}` does not have an explicit index in the implicit construct_runtime!", pallet.name.to_token_stream() ), )); - } } } } - } + }, Err(e) => self.errors.push(e), } } @@ -96,11 +99,8 @@ mod tests { Ok(()) } - // Corrected test cases - #[test] fn test_no_pallet_index() { - // Updated with valid `construct_runtime!` syntax let input = quote! { construct_runtime! { pub enum Test where @@ -188,7 +188,6 @@ mod tests { lint_macro(input).unwrap_err(); } - // New test for implicit construct_runtime #[test] fn test_implicit_construct_runtime_should_fail() { let input = quote! { @@ -208,7 +207,6 @@ mod tests { lint_macro(input).unwrap_err(); } - // Test for explicit expanded case that should pass #[test] fn test_explicit_expanded_runtime_with_correct_index_should_pass() { let input = quote! { @@ -224,7 +222,6 @@ mod tests { lint_macro(input).unwrap(); } - // Test for explicit expanded case that should fail #[test] fn test_explicit_expanded_runtime_with_missing_index_should_fail() { let input = quote! { @@ -240,4 +237,21 @@ mod tests { lint_macro(input).unwrap_err(); } + + #[test] + fn test_fully_qualified_construct_runtime() { + let input = quote! { + frame_support::construct_runtime! { + pub enum Test { + System: frame_system, + Balances: pallet_balances, + AdminUtils: pallet_admin_utils, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event, Error}, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + } + } + }; + + lint_macro(input).unwrap(); + } } From 6dd376e54dc31c96e7ead89887da4e132778ed9d Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Wed, 18 Sep 2024 18:03:29 -0400 Subject: [PATCH 078/213] always use explicit pallet indexing :cool: --- pallets/admin-utils/tests/mock.rs | 10 +++++----- pallets/collective/src/tests.rs | 10 +++++----- pallets/commitments/src/mock.rs | 4 ++-- pallets/commitments/src/tests.rs | 6 +++--- pallets/registry/src/mock.rs | 4 ++-- pallets/subtensor/tests/mock.rs | 20 ++++++++++---------- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/pallets/admin-utils/tests/mock.rs b/pallets/admin-utils/tests/mock.rs index 342ed01cd..dca08ab72 100644 --- a/pallets/admin-utils/tests/mock.rs +++ b/pallets/admin-utils/tests/mock.rs @@ -23,11 +23,11 @@ type Block = frame_system::mocking::MockBlock; frame_support::construct_runtime!( pub enum Test { - System: frame_system, - Balances: pallet_balances, - AdminUtils: pallet_admin_utils, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event, Error}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + System: frame_system = 1, + Balances: pallet_balances = 2, + AdminUtils: pallet_admin_utils = 3, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event, Error} = 4, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event} = 5, } ); diff --git a/pallets/collective/src/tests.rs b/pallets/collective/src/tests.rs index 91fca58d4..4cc5f1bad 100644 --- a/pallets/collective/src/tests.rs +++ b/pallets/collective/src/tests.rs @@ -36,11 +36,11 @@ pub type UncheckedExtrinsic = sp_runtime::generic::UncheckedExtrinsic}, - Collective: pallet_collective::::{Pallet, Call, Event, Origin, Config}, - CollectiveMajority: pallet_collective::::{Pallet, Call, Event, Origin, Config}, - DefaultCollective: pallet_collective::{Pallet, Call, Event, Origin, Config}, - Democracy: mock_democracy::{Pallet, Call, Event}, + System: frame_system::{Pallet, Call, Event} = 1, + Collective: pallet_collective::::{Pallet, Call, Event, Origin, Config} = 2, + CollectiveMajority: pallet_collective::::{Pallet, Call, Event, Origin, Config} = 3, + DefaultCollective: pallet_collective::{Pallet, Call, Event, Origin, Config} = 4, + Democracy: mock_democracy::{Pallet, Call, Event} = 5, } ); mod mock_democracy { diff --git a/pallets/commitments/src/mock.rs b/pallets/commitments/src/mock.rs index 47df72d5d..8866e1c0d 100644 --- a/pallets/commitments/src/mock.rs +++ b/pallets/commitments/src/mock.rs @@ -12,8 +12,8 @@ type Block = frame_system::mocking::MockBlock; frame_support::construct_runtime!( pub enum Test { - System: frame_system, - Commitments: pallet_commitments, + System: frame_system = 1, + Commitments: pallet_commitments = 2, } ); diff --git a/pallets/commitments/src/tests.rs b/pallets/commitments/src/tests.rs index 7449003f4..82f6c97a1 100644 --- a/pallets/commitments/src/tests.rs +++ b/pallets/commitments/src/tests.rs @@ -16,9 +16,9 @@ pub type UncheckedExtrinsic = sp_runtime::generic::UncheckedExtrinsic; frame_support::construct_runtime!( pub enum Test { - System: frame_system, - TemplateModule: pallet_template, + System: frame_system = 1, + TemplateModule: pallet_template = 2, } ); diff --git a/pallets/subtensor/tests/mock.rs b/pallets/subtensor/tests/mock.rs index aa93c3531..6f3b44383 100644 --- a/pallets/subtensor/tests/mock.rs +++ b/pallets/subtensor/tests/mock.rs @@ -24,16 +24,16 @@ type Block = frame_system::mocking::MockBlock; frame_support::construct_runtime!( pub enum Test { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, - Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config}, - SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config}, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event}, - Utility: pallet_utility::{Pallet, Call, Storage, Event}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, - Preimage: pallet_preimage::{Pallet, Call, Storage, Event}, + System: frame_system::{Pallet, Call, Config, Storage, Event} = 1, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event} = 2, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 3, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 4, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 5, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 6, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event} = 7, + Utility: pallet_utility::{Pallet, Call, Storage, Event} = 8, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event} = 9, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event} = 10, } ); From a1bd6eb8f1b5a88b16f413fc08eb6aca9bbd68a0 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 00:56:02 -0400 Subject: [PATCH 079/213] nearly --- support/linting/src/pallet_index.rs | 47 +++++++++++++++++++++-------- 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 1513bc8bf..06264af6f 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -47,12 +47,12 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { for pallet in runtime.pallets { if pallet.index.is_none() { self.errors.push(syn::Error::new( - pallet.name.span(), - format!( - "Pallet `{}` does not have an explicit index in the implicit construct_runtime!", - pallet.name.to_token_stream() - ), - )); + pallet.name.span(), + format!( + "Pallet `{}` does not have an explicit index in the implicit construct_runtime!", + pallet.name.to_token_stream() + ), + )); } } } @@ -71,6 +71,7 @@ impl ConstructRuntimeVisitor { pallets: &[procedural_fork::exports::construct_runtime::parse::Pallet], ) { for pallet in pallets { + // For explicit and expanded, ensure index is explicitly provided (not zero) if pallet.index == 0 { self.errors.push(syn::Error::new( pallet.name.span(), @@ -239,19 +240,41 @@ mod tests { } #[test] - fn test_fully_qualified_construct_runtime() { + fn test_fully_qualified_construct_runtime_should_pass() { let input = quote! { frame_support::construct_runtime! { pub enum Test { - System: frame_system, - Balances: pallet_balances, - AdminUtils: pallet_admin_utils, - SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event, Error}, - Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event}, + System: frame_system = 1, + Balances: pallet_balances = 2, + AdminUtils: pallet_admin_utils = 3, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event, Error} = 4, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event} = 5, } } }; lint_macro(input).unwrap(); } + + #[test] + fn test_mixed_pallets_should_fail() { + let input = quote! { + frame_support::construct_runtime! { + pub enum Test { + System: frame_system::{Pallet, Call, Config, Storage, Event} = 1, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 3, + TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 4, + Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 5, + SenateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 6, + SubtensorModule: pallet_subtensor::{Pallet, Call, Storage, Event} = 7, + Utility: pallet_utility::{Pallet, Call, Storage, Event} = 8, + Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event} = 9, + Preimage: pallet_preimage::{Pallet, Call, Storage, Event} = 10, + } + } + }; + + lint_macro(input).unwrap_err(); + } } From b39ef20286b64f0927264140fb31956143351690 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:01:43 -0400 Subject: [PATCH 080/213] track original source text --- support/linting/src/pallet_index.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 06264af6f..bd6f829ac 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -7,7 +7,10 @@ pub struct RequireExplicitPalletIndex; impl Lint for RequireExplicitPalletIndex { fn lint(source: &File) -> Result { - let mut visitor = ConstructRuntimeVisitor::default(); + let mut visitor = ConstructRuntimeVisitor { + original_tokens: source.to_token_stream().to_string(), + errors: Vec::new(), + }; visitor.visit_file(source); if !visitor.errors.is_empty() { @@ -18,8 +21,8 @@ impl Lint for RequireExplicitPalletIndex { } } -#[derive(Default)] struct ConstructRuntimeVisitor { + original_tokens: String, errors: Vec, } @@ -44,6 +47,7 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { self.check_pallets_for_index(&runtime.pallets); } RuntimeDeclaration::Implicit(runtime) => { + // Only implicit runtime allows `None` for index for pallet in runtime.pallets { if pallet.index.is_none() { self.errors.push(syn::Error::new( @@ -71,7 +75,7 @@ impl ConstructRuntimeVisitor { pallets: &[procedural_fork::exports::construct_runtime::parse::Pallet], ) { for pallet in pallets { - // For explicit and expanded, ensure index is explicitly provided (not zero) + // Check for explicit index and detect missing indices if pallet.index == 0 { self.errors.push(syn::Error::new( pallet.name.span(), @@ -92,7 +96,10 @@ mod tests { fn lint_macro(input: proc_macro2::TokenStream) -> Result { let item_macro: syn::ItemMacro = syn::parse2(input).unwrap(); - let mut visitor = ConstructRuntimeVisitor::default(); + let mut visitor = ConstructRuntimeVisitor { + original_tokens: item_macro.to_token_stream().to_string(), + errors: Vec::new(), + }; visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { return Err(visitor.errors); From d78c2260ad75080229ceae9b464dccb5fe82ca56 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:06:10 -0400 Subject: [PATCH 081/213] 100% working --- support/linting/src/pallet_index.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index bd6f829ac..0d5c4ad8b 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -76,7 +76,14 @@ impl ConstructRuntimeVisitor { ) { for pallet in pallets { // Check for explicit index and detect missing indices - if pallet.index == 0 { + if !self + .original_tokens + .contains(format!(" = {}", pallet.index).as_str()) + { + // ^ HACK: FRAME's parsing code does not allow us to differentiate between an + // automatically generated index and an explicitly provided index so we fall + // back to the original source code here. e.g. if index is 1, we will search + // for " = 1" in the original source code to determine if it was explicitly provided. self.errors.push(syn::Error::new( pallet.name.span(), format!( From b98ef6e3ac4bbaa0869dce64ed614f2c9ff5ccc0 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:28:08 -0400 Subject: [PATCH 082/213] fixed --- support/linting/src/pallet_index.rs | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index 0d5c4ad8b..b74e5a62c 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -1,4 +1,5 @@ use super::*; +use proc_macro2::TokenStream as TokenStream2; use procedural_fork::exports::construct_runtime::parse::RuntimeDeclaration; use quote::ToTokens; use syn::{visit::Visit, File}; @@ -7,10 +8,7 @@ pub struct RequireExplicitPalletIndex; impl Lint for RequireExplicitPalletIndex { fn lint(source: &File) -> Result { - let mut visitor = ConstructRuntimeVisitor { - original_tokens: source.to_token_stream().to_string(), - errors: Vec::new(), - }; + let mut visitor = ConstructRuntimeVisitor::new(source.to_token_stream()); visitor.visit_file(source); if !visitor.errors.is_empty() { @@ -70,6 +68,17 @@ impl<'ast> syn::visit::Visit<'ast> for ConstructRuntimeVisitor { } impl ConstructRuntimeVisitor { + fn new(original_tokens: impl Into) -> Self { + ConstructRuntimeVisitor { + original_tokens: { + let mut st = original_tokens.into().to_string(); + st.retain(|c| !c.is_whitespace()); + st + }, + errors: Vec::new(), + } + } + fn check_pallets_for_index( &mut self, pallets: &[procedural_fork::exports::construct_runtime::parse::Pallet], @@ -78,7 +87,7 @@ impl ConstructRuntimeVisitor { // Check for explicit index and detect missing indices if !self .original_tokens - .contains(format!(" = {}", pallet.index).as_str()) + .contains(format!("={},", pallet.index).as_str()) { // ^ HACK: FRAME's parsing code does not allow us to differentiate between an // automatically generated index and an explicitly provided index so we fall @@ -103,10 +112,7 @@ mod tests { fn lint_macro(input: proc_macro2::TokenStream) -> Result { let item_macro: syn::ItemMacro = syn::parse2(input).unwrap(); - let mut visitor = ConstructRuntimeVisitor { - original_tokens: item_macro.to_token_stream().to_string(), - errors: Vec::new(), - }; + let mut visitor = ConstructRuntimeVisitor::new(item_macro.to_token_stream()); visitor.visit_item_macro(&item_macro); if !visitor.errors.is_empty() { return Err(visitor.errors); From 5f457d8c55419817c646b8d1c201da6c95f5a7e1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:28:35 -0400 Subject: [PATCH 083/213] intentionally fail CI to make sure this is caught automatically --- pallets/subtensor/tests/mock.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/tests/mock.rs b/pallets/subtensor/tests/mock.rs index 6f3b44383..9555833de 100644 --- a/pallets/subtensor/tests/mock.rs +++ b/pallets/subtensor/tests/mock.rs @@ -25,7 +25,7 @@ frame_support::construct_runtime!( pub enum Test { System: frame_system::{Pallet, Call, Config, Storage, Event} = 1, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event} = 2, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 3, TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 4, Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 5, From a05aa8185dd6cbc5a17e6d66a495366f07a742f1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:52:29 -0400 Subject: [PATCH 084/213] fix procedural-fork tests --- support/procedural-fork/src/derive_impl.rs | 386 +++-- .../procedural-fork/src/pallet/parse/tasks.rs | 1489 +++++++++-------- .../src/pallet/parse/tests/mod.rs | 146 +- .../src/pallet/parse/tests/tasks.rs | 372 ++-- 4 files changed, 1222 insertions(+), 1171 deletions(-) diff --git a/support/procedural-fork/src/derive_impl.rs b/support/procedural-fork/src/derive_impl.rs index 54755f116..f064c8521 100644 --- a/support/procedural-fork/src/derive_impl.rs +++ b/support/procedural-fork/src/derive_impl.rs @@ -23,65 +23,67 @@ use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::collections::HashSet; use syn::{ - parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, + parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(inject_runtime_type); - syn::custom_keyword!(no_aggregated_types); + syn::custom_keyword!(inject_runtime_type); + syn::custom_keyword!(no_aggregated_types); } #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] - RuntimeType(keyword::inject_runtime_type), + #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] + RuntimeType(keyword::inject_runtime_type), } #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + typ: PalletAttrType, } fn is_runtime_type(item: &syn::ImplItemType) -> bool { - item.attrs.iter().any(|attr| { - if let Ok(PalletAttr { typ: PalletAttrType::RuntimeType(_), .. }) = - parse2::(attr.into_token_stream()) - { - return true - } - false - }) + item.attrs.iter().any(|attr| { + if let Ok(PalletAttr { + typ: PalletAttrType::RuntimeType(_), + .. + }) = parse2::(attr.into_token_stream()) + { + return true; + } + false + }) } #[derive(Parse, Debug)] pub struct DeriveImplAttrArgs { - pub default_impl_path: Path, - _as: Option, - #[parse_if(_as.is_some())] - pub disambiguation_path: Option, - _comma: Option, - #[parse_if(_comma.is_some())] - pub no_aggregated_types: Option, + pub default_impl_path: Path, + _as: Option, + #[parse_if(_as.is_some())] + pub disambiguation_path: Option, + _comma: Option, + #[parse_if(_comma.is_some())] + pub no_aggregated_types: Option, } impl ForeignPath for DeriveImplAttrArgs { - fn foreign_path(&self) -> &Path { - &self.default_impl_path - } + fn foreign_path(&self) -> &Path { + &self.default_impl_path + } } impl ToTokens for DeriveImplAttrArgs { - fn to_tokens(&self, tokens: &mut TokenStream2) { - tokens.extend(self.default_impl_path.to_token_stream()); - tokens.extend(self._as.to_token_stream()); - tokens.extend(self.disambiguation_path.to_token_stream()); - tokens.extend(self._comma.to_token_stream()); - tokens.extend(self.no_aggregated_types.to_token_stream()); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + tokens.extend(self.default_impl_path.to_token_stream()); + tokens.extend(self._as.to_token_stream()); + tokens.extend(self.disambiguation_path.to_token_stream()); + tokens.extend(self._comma.to_token_stream()); + tokens.extend(self.no_aggregated_types.to_token_stream()); + } } /// Gets the [`Ident`] representation of the given [`ImplItem`], if one exists. Otherwise @@ -90,13 +92,13 @@ impl ToTokens for DeriveImplAttrArgs { /// Used by [`combine_impls`] to determine whether we can compare [`ImplItem`]s by [`Ident`] /// or not. fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { - match impl_item { - ImplItem::Const(item) => Some(&item.ident), - ImplItem::Fn(item) => Some(&item.sig.ident), - ImplItem::Type(item) => Some(&item.ident), - ImplItem::Macro(item) => item.mac.path.get_ident(), - _ => None, - } + match impl_item { + ImplItem::Const(item) => Some(&item.ident), + ImplItem::Fn(item) => Some(&item.sig.ident), + ImplItem::Type(item) => Some(&item.ident), + ImplItem::Macro(item) => item.mac.path.get_ident(), + _ => None, + } } /// The real meat behind `derive_impl`. Takes in a `local_impl`, which is the impl for which we @@ -112,64 +114,68 @@ fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { /// into `local_impl`. Items that lack an ident and also exist verbatim in `local_impl` are not /// copied over. fn combine_impls( - local_impl: ItemImpl, - foreign_impl: ItemImpl, - default_impl_path: Path, - disambiguation_path: Path, - inject_runtime_types: bool, + local_impl: ItemImpl, + foreign_impl: ItemImpl, + default_impl_path: Path, + disambiguation_path: Path, + inject_runtime_types: bool, ) -> ItemImpl { - let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = - local_impl - .items - .iter() - .cloned() - .partition(|impl_item| impl_item_ident(impl_item).is_some()); - let existing_local_keys: HashSet = existing_local_keys - .into_iter() - .filter_map(|item| impl_item_ident(&item).cloned()) - .collect(); - let mut final_impl = local_impl; - let extended_items = foreign_impl.items.into_iter().filter_map(|item| { - if let Some(ident) = impl_item_ident(&item) { - if existing_local_keys.contains(&ident) { - // do not copy colliding items that have an ident - return None - } - if let ImplItem::Type(typ) = item.clone() { - let cfg_attrs = typ - .attrs - .iter() - .filter(|attr| attr.path().get_ident().map_or(false, |ident| ident == "cfg")) - .map(|attr| attr.to_token_stream()); - if is_runtime_type(&typ) { - let item: ImplItem = if inject_runtime_types { - parse_quote! { - #( #cfg_attrs )* - type #ident = #ident; - } - } else { - item - }; - return Some(item) - } - // modify and insert uncolliding type items - let modified_item: ImplItem = parse_quote! { - #( #cfg_attrs )* - type #ident = <#default_impl_path as #disambiguation_path>::#ident; - }; - return Some(modified_item) - } - // copy uncolliding non-type items that have an ident - Some(item) - } else { - // do not copy colliding items that lack an ident - (!existing_unsupported_items.contains(&item)) - // copy uncolliding items without an ident verbatim - .then_some(item) - } - }); - final_impl.items.extend(extended_items); - final_impl + let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = + local_impl + .items + .iter() + .cloned() + .partition(|impl_item| impl_item_ident(impl_item).is_some()); + let existing_local_keys: HashSet = existing_local_keys + .into_iter() + .filter_map(|item| impl_item_ident(&item).cloned()) + .collect(); + let mut final_impl = local_impl; + let extended_items = foreign_impl.items.into_iter().filter_map(|item| { + if let Some(ident) = impl_item_ident(&item) { + if existing_local_keys.contains(&ident) { + // do not copy colliding items that have an ident + return None; + } + if let ImplItem::Type(typ) = item.clone() { + let cfg_attrs = typ + .attrs + .iter() + .filter(|attr| { + attr.path() + .get_ident() + .map_or(false, |ident| ident == "cfg") + }) + .map(|attr| attr.to_token_stream()); + if is_runtime_type(&typ) { + let item: ImplItem = if inject_runtime_types { + parse_quote! { + #( #cfg_attrs )* + type #ident = #ident; + } + } else { + item + }; + return Some(item); + } + // modify and insert uncolliding type items + let modified_item: ImplItem = parse_quote! { + #( #cfg_attrs )* + type #ident = <#default_impl_path as #disambiguation_path>::#ident; + }; + return Some(modified_item); + } + // copy uncolliding non-type items that have an ident + Some(item) + } else { + // do not copy colliding items that lack an ident + (!existing_unsupported_items.contains(&item)) + // copy uncolliding items without an ident verbatim + .then_some(item) + } + }); + final_impl.items.extend(extended_items); + final_impl } /// Computes the disambiguation path for the `derive_impl` attribute macro. @@ -178,25 +184,26 @@ fn combine_impls( /// disambiguation is used as is. If not, we infer the disambiguation path from the /// `foreign_impl_path` and the computed scope. fn compute_disambiguation_path( - disambiguation_path: Option, - foreign_impl: ItemImpl, - default_impl_path: Path, + disambiguation_path: Option, + foreign_impl: ItemImpl, + default_impl_path: Path, ) -> Result { - match (disambiguation_path, foreign_impl.clone().trait_) { - (Some(disambiguation_path), _) => Ok(disambiguation_path), - (None, Some((_, foreign_impl_path, _))) => - if default_impl_path.segments.len() > 1 { - let scope = default_impl_path.segments.first(); - Ok(parse_quote!(#scope :: #foreign_impl_path)) - } else { - Ok(foreign_impl_path) - }, - _ => Err(syn::Error::new( - default_impl_path.span(), - "Impl statement must have a defined type being implemented \ + match (disambiguation_path, foreign_impl.clone().trait_) { + (Some(disambiguation_path), _) => Ok(disambiguation_path), + (None, Some((_, foreign_impl_path, _))) => { + if default_impl_path.segments.len() > 1 { + let scope = default_impl_path.segments.first(); + Ok(parse_quote!(#scope :: #foreign_impl_path)) + } else { + Ok(foreign_impl_path) + } + } + _ => Err(syn::Error::new( + default_impl_path.span(), + "Impl statement must have a defined type being implemented \ for a defined type such as `impl A for B`", - )), - } + )), + } } /// Internal implementation behind [`#[derive_impl(..)]`](`macro@crate::derive_impl`). @@ -211,93 +218,100 @@ fn compute_disambiguation_path( /// `disambiguation_path`: the module path of the external trait we will use to qualify /// defaults imported from the external `impl` statement pub fn derive_impl( - default_impl_path: TokenStream2, - foreign_tokens: TokenStream2, - local_tokens: TokenStream2, - disambiguation_path: Option, - no_aggregated_types: Option, + default_impl_path: TokenStream2, + foreign_tokens: TokenStream2, + local_tokens: TokenStream2, + disambiguation_path: Option, + no_aggregated_types: Option, ) -> Result { - let local_impl = parse2::(local_tokens)?; - let foreign_impl = parse2::(foreign_tokens)?; - let default_impl_path = parse2::(default_impl_path)?; + let local_impl = parse2::(local_tokens)?; + let foreign_impl = parse2::(foreign_tokens)?; + let default_impl_path = parse2::(default_impl_path)?; - let disambiguation_path = compute_disambiguation_path( - disambiguation_path, - foreign_impl.clone(), - default_impl_path.clone(), - )?; + let disambiguation_path = compute_disambiguation_path( + disambiguation_path, + foreign_impl.clone(), + default_impl_path.clone(), + )?; - // generate the combined impl - let combined_impl = combine_impls( - local_impl, - foreign_impl, - default_impl_path, - disambiguation_path, - no_aggregated_types.is_none(), - ); + // generate the combined impl + let combined_impl = combine_impls( + local_impl, + foreign_impl, + default_impl_path, + disambiguation_path, + no_aggregated_types.is_none(), + ); - Ok(quote!(#combined_impl)) + Ok(quote!(#combined_impl)) } #[test] fn test_derive_impl_attr_args_parsing() { - parse2::(quote!( - some::path::TestDefaultConfig as some::path::DefaultConfig - )) - .unwrap(); - parse2::(quote!( - frame_system::prelude::testing::TestDefaultConfig as DefaultConfig - )) - .unwrap(); - parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); - parse2::(quote!(Something as DefaultConfig)).unwrap(); - parse2::(quote!(DefaultConfig)).unwrap(); - assert!(parse2::(quote!()).is_err()); - assert!(parse2::(quote!(Config Config)).is_err()); + parse2::(quote!( + some::path::TestDefaultConfig as some::path::DefaultConfig + )) + .unwrap(); + parse2::(quote!( + frame_system::prelude::testing::TestDefaultConfig as DefaultConfig + )) + .unwrap(); + parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); + parse2::(quote!(Something as DefaultConfig)).unwrap(); + parse2::(quote!(DefaultConfig)).unwrap(); + assert!(parse2::(quote!()).is_err()); + assert!(parse2::(quote!(Config Config)).is_err()); } #[test] fn test_runtime_type_with_doc() { - trait TestTrait { - type Test; - } - #[allow(unused)] - struct TestStruct; - let p = parse2::(quote!( - impl TestTrait for TestStruct { - /// Some doc - #[inject_runtime_type] - type Test = u32; - } - )) - .unwrap(); - for item in p.items { - if let ImplItem::Type(typ) = item { - assert_eq!(is_runtime_type(&typ), true); - } - } + #[allow(unused)] + trait TestTrait { + type Test; + } + #[allow(unused)] + struct TestStruct; + let p = parse2::(quote!( + impl TestTrait for TestStruct { + /// Some doc + #[inject_runtime_type] + type Test = u32; + } + )) + .unwrap(); + for item in p.items { + if let ImplItem::Type(typ) = item { + assert_eq!(is_runtime_type(&typ), true); + } + } } #[test] fn test_disambiguation_path() { - let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); - let default_impl_path: Path = parse_quote!(SomeScope::SomeType); + let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); + let default_impl_path: Path = parse_quote!(SomeScope::SomeType); - // disambiguation path is specified - let disambiguation_path = compute_disambiguation_path( - Some(parse_quote!(SomeScope::SomePath)), - foreign_impl.clone(), - default_impl_path.clone(), - ); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomePath)); + // disambiguation path is specified + let disambiguation_path = compute_disambiguation_path( + Some(parse_quote!(SomeScope::SomePath)), + foreign_impl.clone(), + default_impl_path.clone(), + ); + assert_eq!( + disambiguation_path.unwrap(), + parse_quote!(SomeScope::SomePath) + ); - // disambiguation path is not specified and the default_impl_path has more than one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomeTrait)); + // disambiguation path is not specified and the default_impl_path has more than one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); + assert_eq!( + disambiguation_path.unwrap(), + parse_quote!(SomeScope::SomeTrait) + ); - // disambiguation path is not specified and the default_impl_path has only one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); + // disambiguation path is not specified and the default_impl_path has only one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); } diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index 6405bb415..f1728f824 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -30,96 +30,103 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use syn::{ - parse::ParseStream, - parse2, - spanned::Spanned, - token::{Bracket, Paren, PathSep, Pound}, - Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, - PathArguments, Result, TypePath, + parse::ParseStream, + parse2, + spanned::Spanned, + token::{Bracket, Paren, PathSep, Pound}, + Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, + PathArguments, Result, TypePath, }; pub mod keywords { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(tasks_experimental); - custom_keyword!(task_enum); - custom_keyword!(task_list); - custom_keyword!(task_condition); - custom_keyword!(task_index); - custom_keyword!(task_weight); - custom_keyword!(pallet); + custom_keyword!(tasks_experimental); + custom_keyword!(task_enum); + custom_keyword!(task_list); + custom_keyword!(task_condition); + custom_keyword!(task_index); + custom_keyword!(task_weight); + custom_keyword!(pallet); } /// Represents the `#[pallet::tasks_experimental]` attribute and its attached item. Also includes /// metadata about the linked [`TaskEnumDef`] if applicable. #[derive(Clone, Debug)] pub struct TasksDef { - pub tasks_attr: Option, - pub tasks: Vec, - pub item_impl: ItemImpl, - /// Path to `frame_support` - pub scrate: Path, - pub enum_ident: Ident, - pub enum_arguments: PathArguments, + pub tasks_attr: Option, + pub tasks: Vec, + pub item_impl: ItemImpl, + /// Path to `frame_support` + pub scrate: Path, + pub enum_ident: Ident, + pub enum_arguments: PathArguments, } impl syn::parse::Parse for TasksDef { - fn parse(input: ParseStream) -> Result { - let item_impl: ItemImpl = input.parse()?; - let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); - let tasks_attr = match tasks_attrs.first() { - Some(attr) => Some(parse2::(attr.to_token_stream())?), - None => None, - }; - if let Some(extra_tasks_attr) = tasks_attrs.get(1) { - return Err(Error::new( - extra_tasks_attr.span(), - "unexpected extra `#[pallet::tasks_experimental]` attribute", - )) - } - let tasks: Vec = if tasks_attr.is_some() { - item_impl - .items - .clone() - .into_iter() - .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) - .map(|item| parse2::(item.to_token_stream())) - .collect::>()? - } else { - Vec::new() - }; - let mut task_indices = HashSet::::new(); - for task in tasks.iter() { - let task_index = &task.index_attr.meta.index; - if !task_indices.insert(task_index.clone()) { - return Err(Error::new( - task_index.span(), - format!("duplicate task index `{}`", task_index), - )) - } - } - let mut item_impl = item_impl; - item_impl.attrs = normal_attrs; - - // we require the path on the impl to be a TypePath - let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; - let segments = enum_path.path.segments.iter().collect::>(); - let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { - return Err(Error::new( - enum_path.span(), - "if specified manually, the task enum must be defined locally in this \ + fn parse(input: ParseStream) -> Result { + let item_impl: ItemImpl = input.parse()?; + let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); + let tasks_attr = match tasks_attrs.first() { + Some(attr) => Some(parse2::(attr.to_token_stream())?), + None => None, + }; + if let Some(extra_tasks_attr) = tasks_attrs.get(1) { + return Err(Error::new( + extra_tasks_attr.span(), + "unexpected extra `#[pallet::tasks_experimental]` attribute", + )); + } + let tasks: Vec = if tasks_attr.is_some() { + item_impl + .items + .clone() + .into_iter() + .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) + .map(|item| parse2::(item.to_token_stream())) + .collect::>()? + } else { + Vec::new() + }; + let mut task_indices = HashSet::::new(); + for task in tasks.iter() { + let task_index = &task.index_attr.meta.index; + if !task_indices.insert(task_index.clone()) { + return Err(Error::new( + task_index.span(), + format!("duplicate task index `{}`", task_index), + )); + } + } + let mut item_impl = item_impl; + item_impl.attrs = normal_attrs; + + // we require the path on the impl to be a TypePath + let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; + let segments = enum_path.path.segments.iter().collect::>(); + let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + return Err(Error::new( + enum_path.span(), + "if specified manually, the task enum must be defined locally in this \ pallet and cannot be a re-export", - )) - }; - let enum_ident = last_seg.ident.clone(); - let enum_arguments = last_seg.arguments.clone(); - - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; - - Ok(TasksDef { tasks_attr, item_impl, tasks, scrate, enum_ident, enum_arguments }) - } + )); + }; + let enum_ident = last_seg.ident.clone(); + let enum_arguments = last_seg.arguments.clone(); + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + Ok(TasksDef { + tasks_attr, + item_impl, + tasks, + scrate, + enum_ident, + enum_arguments, + }) + } } /// Parsing for a `#[pallet::tasks_experimental]` attr. @@ -148,821 +155,851 @@ pub type PalletTaskEnumAttr = PalletTaskAttr; /// attached `#[pallet::task_enum]` attribute. #[derive(Clone, Debug)] pub struct TaskEnumDef { - pub attr: Option, - pub item_enum: ItemEnum, - pub scrate: Path, - pub type_use_generics: TokenStream2, + pub attr: Option, + pub item_enum: ItemEnum, + pub scrate: Path, + pub type_use_generics: TokenStream2, } impl syn::parse::Parse for TaskEnumDef { - fn parse(input: ParseStream) -> Result { - let mut item_enum = input.parse::()?; - let attr = extract_pallet_attr(&mut item_enum)?; - let attr = match attr { - Some(attr) => Some(parse2(attr)?), - None => None, - }; + fn parse(input: ParseStream) -> Result { + let mut item_enum = input.parse::()?; + let attr = extract_pallet_attr(&mut item_enum)?; + let attr = match attr { + Some(attr) => Some(parse2(attr)?), + None => None, + }; - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; - let type_use_generics = quote!(T); + let type_use_generics = quote!(T); - Ok(TaskEnumDef { attr, item_enum, scrate, type_use_generics }) - } + Ok(TaskEnumDef { + attr, + item_enum, + scrate, + type_use_generics, + }) + } } /// Represents an individual tasks within a [`TasksDef`]. #[derive(Debug, Clone)] pub struct TaskDef { - pub index_attr: TaskIndexAttr, - pub condition_attr: TaskConditionAttr, - pub list_attr: TaskListAttr, - pub weight_attr: TaskWeightAttr, - pub normal_attrs: Vec, - pub item: ImplItemFn, - pub arg_names: Vec, + pub index_attr: TaskIndexAttr, + pub condition_attr: TaskConditionAttr, + pub list_attr: TaskListAttr, + pub weight_attr: TaskWeightAttr, + pub normal_attrs: Vec, + pub item: ImplItemFn, + pub arg_names: Vec, } impl syn::parse::Parse for TaskDef { - fn parse(input: ParseStream) -> Result { - let item = input.parse::()?; - // we only want to activate TaskAttrType parsing errors for tasks-related attributes, - // so we filter them here - let (task_attrs, normal_attrs) = partition_task_attrs(&item); - - let task_attrs: Vec = task_attrs - .into_iter() - .map(|attr| parse2(attr.to_token_stream())) - .collect::>()?; - - let Some(index_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_index(..)]` attribute", - )) - }; - - let Some(condition_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_condition(..)]` attribute", - )) - }; - - let Some(list_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_list(..)]` attribute", - )) - }; - - let Some(weight_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_weight(..)]` attribute", - )) - }; - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_condition(..)]` attribute", - )) - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_list(..)]` attribute", - )) - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_index(..)]` attribute", - )) - } - - let mut arg_names = vec![]; - for input in item.sig.inputs.iter() { - match input { - syn::FnArg::Typed(pat_type) => match &*pat_type.pat { - syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), - _ => return Err(Error::new(input.span(), "unexpected pattern type")), - }, - _ => return Err(Error::new(input.span(), "unexpected function argument type")), - } - } - - let index_attr = index_attr.try_into().expect("we check the type above; QED"); - let condition_attr = condition_attr.try_into().expect("we check the type above; QED"); - let list_attr = list_attr.try_into().expect("we check the type above; QED"); - let weight_attr = weight_attr.try_into().expect("we check the type above; QED"); - - Ok(TaskDef { - index_attr, - condition_attr, - list_attr, - weight_attr, - normal_attrs, - item, - arg_names, - }) - } + fn parse(input: ParseStream) -> Result { + let item = input.parse::()?; + // we only want to activate TaskAttrType parsing errors for tasks-related attributes, + // so we filter them here + let (task_attrs, normal_attrs) = partition_task_attrs(&item); + + let task_attrs: Vec = task_attrs + .into_iter() + .map(|attr| parse2(attr.to_token_stream())) + .collect::>()?; + + let Some(index_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_index(..)]` attribute", + )); + }; + + let Some(condition_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_condition(..)]` attribute", + )); + }; + + let Some(list_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_list(..)]` attribute", + )); + }; + + let Some(weight_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_weight(..)]` attribute", + )); + }; + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_condition(..)]` attribute", + )); + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_list(..)]` attribute", + )); + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_index(..)]` attribute", + )); + } + + let mut arg_names = vec![]; + for input in item.sig.inputs.iter() { + match input { + syn::FnArg::Typed(pat_type) => match &*pat_type.pat { + syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), + _ => return Err(Error::new(input.span(), "unexpected pattern type")), + }, + _ => { + return Err(Error::new( + input.span(), + "unexpected function argument type", + )) + } + } + } + + let index_attr = index_attr.try_into().expect("we check the type above; QED"); + let condition_attr = condition_attr + .try_into() + .expect("we check the type above; QED"); + let list_attr = list_attr.try_into().expect("we check the type above; QED"); + let weight_attr = weight_attr + .try_into() + .expect("we check the type above; QED"); + + Ok(TaskDef { + index_attr, + condition_attr, + list_attr, + weight_attr, + normal_attrs, + item, + arg_names, + }) + } } /// The contents of a [`TasksDef`]-related attribute. #[derive(Parse, Debug, Clone)] pub enum TaskAttrMeta { - #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] - TaskList(TaskListAttrMeta), - #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] - TaskIndex(TaskIndexAttrMeta), - #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] - TaskCondition(TaskConditionAttrMeta), - #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] - TaskWeight(TaskWeightAttrMeta), + #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] + TaskList(TaskListAttrMeta), + #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] + TaskIndex(TaskIndexAttrMeta), + #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] + TaskCondition(TaskConditionAttrMeta), + #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] + TaskWeight(TaskWeightAttrMeta), } /// The contents of a `#[pallet::task_list]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskListAttrMeta { - pub task_list: keywords::task_list, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_list: keywords::task_list, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_index]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskIndexAttrMeta { - pub task_index: keywords::task_index, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub index: LitInt, + pub task_index: keywords::task_index, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub index: LitInt, } /// The contents of a `#[pallet::task_condition]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskConditionAttrMeta { - pub task_condition: keywords::task_condition, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_condition: keywords::task_condition, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_weight]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskWeightAttrMeta { - pub task_weight: keywords::task_weight, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_weight: keywords::task_weight, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task]` attribute. #[derive(Parse, Debug, Clone)] pub struct PalletTaskAttr { - pub pound: Pound, - #[bracket] - _bracket: Bracket, - #[inside(_bracket)] - pub pallet: keywords::pallet, - #[inside(_bracket)] - pub colons: PathSep, - #[inside(_bracket)] - pub meta: T, + pub pound: Pound, + #[bracket] + _bracket: Bracket, + #[inside(_bracket)] + pub pallet: keywords::pallet, + #[inside(_bracket)] + pub colons: PathSep, + #[inside(_bracket)] + pub meta: T, } impl ToTokens for TaskListAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_list = self.task_list; - let expr = &self.expr; - tokens.extend(quote!(#task_list(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_list = self.task_list; + let expr = &self.expr; + tokens.extend(quote!(#task_list(#expr))); + } } impl ToTokens for TaskConditionAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_condition = self.task_condition; - let expr = &self.expr; - tokens.extend(quote!(#task_condition(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_condition = self.task_condition; + let expr = &self.expr; + tokens.extend(quote!(#task_condition(#expr))); + } } impl ToTokens for TaskWeightAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_weight = self.task_weight; - let expr = &self.expr; - tokens.extend(quote!(#task_weight(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_weight = self.task_weight; + let expr = &self.expr; + tokens.extend(quote!(#task_weight(#expr))); + } } impl ToTokens for TaskIndexAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_index = self.task_index; - let index = &self.index; - tokens.extend(quote!(#task_index(#index))) - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_index = self.task_index; + let index = &self.index; + tokens.extend(quote!(#task_index(#index))) + } } impl ToTokens for TaskAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), - TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), - TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), - TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), + TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), + TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), + TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), + } + } } impl ToTokens for PalletTaskAttr { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let pound = self.pound; - let pallet = self.pallet; - let colons = self.colons; - let meta = &self.meta; - tokens.extend(quote!(#pound[#pallet #colons #meta])); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let pound = self.pound; + let pallet = self.pallet; + let colons = self.colons; + let meta = &self.meta; + tokens.extend(quote!(#pound[#pallet #colons #meta])); + } } impl TryFrom> for TaskIndexAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskIndexAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskIndexAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskConditionAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskConditionAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskConditionAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskWeightAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskWeightAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskWeightAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskListAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )) + } + } + } } fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> { - let mut duplicate = None; - let mut attr = None; - item_enum.attrs = item_enum - .attrs - .iter() - .filter(|found_attr| { - let segs = found_attr - .path() - .segments - .iter() - .map(|seg| seg.ident.clone()) - .collect::>(); - let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { - return true - }; - if seg1 != "pallet" { - return true - } - if attr.is_some() { - duplicate = Some(found_attr.span()); - } - attr = Some(found_attr.to_token_stream()); - false - }) - .cloned() - .collect(); - if let Some(span) = duplicate { - return Err(Error::new(span, "only one `#[pallet::_]` attribute is supported on this item")) - } - Ok(attr) + let mut duplicate = None; + let mut attr = None; + item_enum.attrs = item_enum + .attrs + .iter() + .filter(|found_attr| { + let segs = found_attr + .path() + .segments + .iter() + .map(|seg| seg.ident.clone()) + .collect::>(); + let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + return true; + }; + if seg1 != "pallet" { + return true; + } + if attr.is_some() { + duplicate = Some(found_attr.span()); + } + attr = Some(found_attr.to_token_stream()); + false + }) + .cloned() + .collect(); + if let Some(span) = duplicate { + return Err(Error::new( + span, + "only one `#[pallet::_]` attribute is supported on this item", + )); + } + Ok(attr) } fn partition_tasks_attrs(item_impl: &ItemImpl) -> (Vec, Vec) { - item_impl.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix), None) = - (path_segs.next(), path_segs.next(), path_segs.next()) - else { - return false - }; - prefix.ident == "pallet" && suffix.ident == "tasks_experimental" - }) + item_impl.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix), None) = + (path_segs.next(), path_segs.next(), path_segs.next()) + else { + return false; + }; + prefix.ident == "pallet" && suffix.ident == "tasks_experimental" + }) } fn partition_task_attrs(item: &ImplItemFn) -> (Vec, Vec) { - item.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { - return false - }; - // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than - // parsing and makes no stack or heap allocations - prefix.ident == "pallet" && - (suffix.ident == "tasks_experimental" || - suffix.ident == "task_list" || - suffix.ident == "task_condition" || - suffix.ident == "task_weight" || - suffix.ident == "task_index") - }) + item.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { + return false; + }; + // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than + // parsing and makes no stack or heap allocations + prefix.ident == "pallet" + && (suffix.ident == "tasks_experimental" + || suffix.ident == "task_list" + || suffix.ident == "task_condition" + || suffix.ident == "task_weight" + || suffix.ident == "task_index") + }) } #[test] fn test_parse_task_list_() { - parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); - parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); - parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list()])), - "expected an expression" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list])), - "expected parentheses" - ); + parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); + parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); + parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list()])), + "expected an expression" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list])), + "expected parentheses" + ); } #[test] fn test_parse_task_index() { - parse2::(quote!(#[pallet::task_index(3)])).unwrap(); - parse2::(quote!(#[pallet::task_index(0)])).unwrap(); - parse2::(quote!(#[pallet::task_index(17)])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index])), - "expected parentheses" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index("hey")])), - "expected integer literal" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index(0.3)])), - "expected integer literal" - ); + parse2::(quote!(#[pallet::task_index(3)])).unwrap(); + parse2::(quote!(#[pallet::task_index(0)])).unwrap(); + parse2::(quote!(#[pallet::task_index(17)])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index])), + "expected parentheses" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index("hey")])), + "expected integer literal" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index(0.3)])), + "expected integer literal" + ); } #[test] fn test_parse_task_condition() { - parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); } #[test] fn test_parse_tasks_attr() { - parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::taskss])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pal::tasks])), - "expected `pallet`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_experimental()])), - "unexpected token" - ); + parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::taskss])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pal::tasks])), + "expected `pallet`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_experimental()])), + "unexpected token" + ); } #[test] fn test_parse_tasks_def_basic() { - simulate_manifest_dir("../../examples/basic", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Add a pair of numbers into the totals and remove them. - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn add_number_into_total(i: u32) -> DispatchResult { - let v = Numbers::::take(i).ok_or(Error::::NotFound)?; - Total::::mutate(|(total_keys, total_values)| { - *total_keys += i; - *total_values += v; - }); - Ok(()) - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 1); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 1); + }); } #[test] fn test_parse_tasks_def_basic_increment_decrement() { - simulate_manifest_dir("../../examples/basic", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Get the value and check if it can be incremented - #[pallet::task_index(0)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value < 255 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn increment() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value >= 255 { - Err(Error::::ValueOverflow.into()) - } else { - let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Incremented { new_val }); - Ok(()) - } - } - - // Get the value and check if it can be decremented - #[pallet::task_index(1)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value > 0 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn decrement() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value == 0 { - Err(Error::::ValueUnderflow.into()) - } else { - let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Decremented { new_val }); - Ok(()) - } - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 2); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Get the value and check if it can be incremented + #[pallet::task_index(0)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value < 255 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn increment() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value >= 255 { + Err(Error::::ValueOverflow.into()) + } else { + let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Incremented { new_val }); + Ok(()) + } + } + + // Get the value and check if it can be decremented + #[pallet::task_index(1)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value > 0 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn decrement() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value == 0 { + Err(Error::::ValueUnderflow.into()) + } else { + let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Decremented { new_val }); + Ok(()) + } + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 2); + }); } #[test] fn test_parse_tasks_def_duplicate_index() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn bar(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - "duplicate task index `0`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn bar(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + "duplicate task index `0`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_list() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_condition() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_index() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_weight() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_weight\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_weight\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_list_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_list(SomethingElse::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_list(SomethingElse::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_condition_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_condition(|i| i % 4 == 0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_condition(|i| i % 4 == 0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_index_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_extra_tasks_attribute() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - #[pallet::tasks_experimental] - impl, I: 'static> Pallet {} - }), - r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + #[pallet::tasks_experimental] + impl, I: 'static> Pallet {} + }), + r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" + ); + }); } #[test] fn test_parse_task_enum_def_basic() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_non_task_name() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Something { - Foo - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Something { + Foo + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_allowed() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + parse2::(quote! { + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_alternate_name_allowed() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - pub enum Foo { - Red, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + parse2::(quote! { + pub enum Foo { + Red, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_wrong_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::something] - pub enum Task { - Increment, - Decrement, - } - }), - "expected `task_enum`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::something] + pub enum Task { + Increment, + Decrement, + } + }), + "expected `task_enum`" + ); + }); } #[test] fn test_parse_task_enum_def_wrong_item() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::task_enum] - pub struct Something; - }), - "expected `enum`" - ); - }); + simulate_manifest_dir("../../pallets/subtensor", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::task_enum] + pub struct Something; + }), + "expected `enum`" + ); + }); } diff --git a/support/procedural-fork/src/pallet/parse/tests/mod.rs b/support/procedural-fork/src/pallet/parse/tests/mod.rs index a3661f307..7206a20bf 100644 --- a/support/procedural-fork/src/pallet/parse/tests/mod.rs +++ b/support/procedural-fork/src/pallet/parse/tests/mod.rs @@ -20,7 +20,7 @@ use syn::parse_quote; #[doc(hidden)] pub mod __private { - pub use regex; + pub use regex; } /// Allows you to assert that the input expression resolves to an error whose string @@ -63,22 +63,22 @@ pub mod __private { /// enough that it will work with any error with a reasonable [`core::fmt::Display`] impl. #[macro_export] macro_rules! assert_parse_error_matches { - ($expr:expr, $reg:literal) => { - match $expr { - Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), - Err(e) => { - let error_message = e.to_string(); - let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) - .expect("Invalid regex pattern"); - assert!( - re.is_match(&error_message), - "Error message \"{}\" does not match the pattern \"{}\"", - error_message, - $reg - ); - }, - } - }; + ($expr:expr, $reg:literal) => { + match $expr { + Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), + Err(e) => { + let error_message = e.to_string(); + let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) + .expect("Invalid regex pattern"); + assert!( + re.is_match(&error_message), + "Error message \"{}\" does not match the pattern \"{}\"", + error_message, + $reg + ); + } + } + }; } /// Allows you to assert that an entire pallet parses successfully. A custom syntax is used for @@ -88,7 +88,7 @@ macro_rules! assert_parse_error_matches { /// /// ```ignore /// assert_pallet_parses! { -/// #[manifest_dir("../../examples/basic")] +/// #[manifest_dir("../../pallets/subtensor")] /// #[frame_support::pallet] /// pub mod pallet { /// #[pallet::config] @@ -142,7 +142,7 @@ macro_rules! assert_pallet_parses { /// /// ``` /// assert_pallet_parse_error! { -/// #[manifest_dir("../../examples/basic")] +/// #[manifest_dir("../../pallets/subtensor")] /// #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] /// #[frame_support::pallet] /// pub mod pallet { @@ -183,82 +183,82 @@ macro_rules! assert_pallet_parse_error { /// This function uses a [`Mutex`] to avoid a race condition created when multiple tests try to /// modify and then restore the `CARGO_MANIFEST_DIR` ENV var in an overlapping way. pub fn simulate_manifest_dir, F: FnOnce() + std::panic::UnwindSafe>( - path: P, - closure: F, + path: P, + closure: F, ) { - use std::{env::*, path::*}; + use std::{env::*, path::*}; - /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, - /// avoiding a race condition because `cargo test` runs tests in parallel. - /// - /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with - /// respect to each other, this is still several orders of magnitude faster than using UI - /// tests, even if they are run in parallel. - static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); + /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, + /// avoiding a race condition because `cargo test` runs tests in parallel. + /// + /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with + /// respect to each other, this is still several orders of magnitude faster than using UI + /// tests, even if they are run in parallel. + static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); - // avoid race condition when swapping out `CARGO_MANIFEST_DIR` - let guard = MANIFEST_DIR_LOCK.lock().unwrap(); + // avoid race condition when swapping out `CARGO_MANIFEST_DIR` + let guard = MANIFEST_DIR_LOCK.lock().unwrap(); - // obtain the current/original `CARGO_MANIFEST_DIR` - let orig = PathBuf::from( - var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), - ); + // obtain the current/original `CARGO_MANIFEST_DIR` + let orig = PathBuf::from( + var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), + ); - // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir - set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); + // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir + set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); - // safely run closure catching any panics - let result = panic::catch_unwind(closure); + // safely run closure catching any panics + let result = panic::catch_unwind(closure); - // restore original `CARGO_MANIFEST_DIR` before unwinding - set_var("CARGO_MANIFEST_DIR", &orig); + // restore original `CARGO_MANIFEST_DIR` before unwinding + set_var("CARGO_MANIFEST_DIR", &orig); - // unlock the mutex so we don't poison it if there is a panic - drop(guard); + // unlock the mutex so we don't poison it if there is a panic + drop(guard); - // unwind any panics originally encountered when running closure - result.unwrap(); + // unwind any panics originally encountered when running closure + result.unwrap(); } mod tasks; #[test] fn test_parse_minimal_pallet() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} - #[pallet::pallet] - pub struct Pallet(_); - } - }; + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_missing_pallet() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + } + } } #[test] fn test_parse_pallet_missing_config() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::config\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex("Missing `\\#\\[pallet::config\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::pallet] + pub struct Pallet(_); + } + } } diff --git a/support/procedural-fork/src/pallet/parse/tests/tasks.rs b/support/procedural-fork/src/pallet/parse/tests/tasks.rs index 9f1436284..22a757c8f 100644 --- a/support/procedural-fork/src/pallet/parse/tests/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tests/tasks.rs @@ -19,222 +19,222 @@ use syn::parse_quote; #[test] fn test_parse_pallet_with_task_enum_missing_impl() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum Task { - Something, - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum Task { + Something, + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_with_task_enum_wrong_attribute() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("expected one of")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::wrong_attribute] - pub enum Task { - Something, - } - - #[pallet::task_list] - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex("expected one of")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::wrong_attribute] + pub enum Task { + Something, + } + + #[pallet::task_list] + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_missing_task_enum() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::tasks_experimental] - #[cfg(test)] // aha, this means it's being eaten - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::tasks_experimental] + #[cfg(test)] // aha, this means it's being eaten + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_task_list_in_wrong_place() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("can only be used on items within an `impl` statement.")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::task_list] - pub fn something() { - println!("hey"); - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex("can only be used on items within an `impl` statement.")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::task_list] + pub fn something() { + println!("hey"); + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex(".*attribute must be attached to your.*")] - #[frame_support::pallet] - pub mod pallet { - - impl frame_support::traits::Task for Task - where - T: TypeInfo, - { - type Enumeration = sp_std::vec::IntoIter>; - - fn iter() -> Self::Enumeration { - sp_std::vec![Task::increment, Task::decrement].into_iter() - } - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../pallets/subtensor")] + #[error_regex(".*attribute must be attached to your.*")] + #[frame_support::pallet] + pub mod pallet { + + impl frame_support::traits::Task for Task + where + T: TypeInfo, + { + type Enumeration = sp_std::vec::IntoIter>; + + fn iter() -> Self::Enumeration { + sp_std::vec![Task::increment, Task::decrement].into_iter() + } + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_task_enum_non_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_non_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_mismatch_ident() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum WrongIdent { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../pallets/subtensor")] + #[frame_support::pallet] + pub mod pallet { + pub enum WrongIdent { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } From 2c2247f752592bff53b5f8fa4f9ec2536b4d0f02 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:53:11 -0400 Subject: [PATCH 085/213] cargo fmt --- support/procedural-fork/src/benchmark.rs | 2117 +++++++++-------- .../src/construct_runtime/expand/call.rs | 393 +-- .../expand/composite_helper.rs | 132 +- .../src/construct_runtime/expand/config.rs | 208 +- .../construct_runtime/expand/freeze_reason.rs | 90 +- .../construct_runtime/expand/hold_reason.rs | 90 +- .../src/construct_runtime/expand/inherent.rs | 459 ++-- .../src/construct_runtime/expand/lock_id.rs | 72 +- .../src/construct_runtime/expand/metadata.rs | 399 ++-- .../src/construct_runtime/expand/origin.rs | 846 +++---- .../construct_runtime/expand/outer_enums.rs | 379 +-- .../construct_runtime/expand/slash_reason.rs | 72 +- .../src/construct_runtime/expand/task.rs | 212 +- .../src/construct_runtime/expand/unsigned.rs | 113 +- .../src/construct_runtime/mod.rs | 1010 ++++---- .../src/construct_runtime/parse.rs | 1257 +++++----- support/procedural-fork/src/crate_version.rs | 36 +- .../procedural-fork/src/dummy_part_checker.rs | 98 +- support/procedural-fork/src/dynamic_params.rs | 422 ++-- support/procedural-fork/src/key_prefix.rs | 142 +- .../procedural-fork/src/match_and_insert.rs | 244 +- support/procedural-fork/src/no_bound/clone.rs | 162 +- support/procedural-fork/src/no_bound/debug.rs | 186 +- .../procedural-fork/src/no_bound/default.rs | 139 +- support/procedural-fork/src/no_bound/ord.rs | 96 +- .../src/no_bound/partial_eq.rs | 214 +- .../src/no_bound/partial_ord.rs | 119 +- .../procedural-fork/src/pallet/expand/call.rs | 874 +++---- .../src/pallet/expand/composite.rs | 20 +- .../src/pallet/expand/config.rs | 120 +- .../src/pallet/expand/constants.rs | 172 +- .../src/pallet/expand/doc_only.rs | 152 +- .../src/pallet/expand/documentation.rs | 189 +- .../src/pallet/expand/error.rs | 298 +-- .../src/pallet/expand/event.rs | 303 +-- .../src/pallet/expand/genesis_build.rs | 50 +- .../src/pallet/expand/genesis_config.rs | 239 +- .../src/pallet/expand/hooks.rs | 588 ++--- .../src/pallet/expand/inherent.rs | 59 +- .../src/pallet/expand/instances.rs | 32 +- .../procedural-fork/src/pallet/expand/mod.rs | 124 +- .../src/pallet/expand/origin.rs | 59 +- .../src/pallet/expand/pallet_struct.rs | 524 ++-- .../src/pallet/expand/storage.rs | 1415 +++++------ .../src/pallet/expand/tasks.rs | 308 +-- .../src/pallet/expand/tt_default_parts.rs | 394 +-- .../src/pallet/expand/type_value.rs | 90 +- .../src/pallet/expand/validate_unsigned.rs | 60 +- .../src/pallet/expand/warnings.rs | 111 +- support/procedural-fork/src/pallet/mod.rs | 38 +- .../procedural-fork/src/pallet/parse/call.rs | 791 +++--- .../src/pallet/parse/composite.rs | 330 +-- .../src/pallet/parse/config.rs | 983 ++++---- .../procedural-fork/src/pallet/parse/error.rs | 148 +- .../procedural-fork/src/pallet/parse/event.rs | 206 +- .../src/pallet/parse/extra_constants.rs | 240 +- .../src/pallet/parse/genesis_build.rs | 69 +- .../src/pallet/parse/genesis_config.rs | 81 +- .../src/pallet/parse/helper.rs | 933 ++++---- .../procedural-fork/src/pallet/parse/hooks.rs | 104 +- .../src/pallet/parse/inherent.rs | 68 +- .../procedural-fork/src/pallet/parse/mod.rs | 1160 ++++----- .../src/pallet/parse/origin.rs | 80 +- .../src/pallet/parse/pallet_struct.rs | 220 +- .../src/pallet/parse/storage.rs | 1739 +++++++------- .../src/pallet/parse/type_value.rs | 176 +- .../src/pallet/parse/validate_unsigned.rs | 64 +- support/procedural-fork/src/pallet_error.rs | 307 +-- .../procedural-fork/src/runtime/expand/mod.rs | 574 ++--- support/procedural-fork/src/runtime/mod.rs | 32 +- .../src/runtime/parse/helper.rs | 29 +- .../procedural-fork/src/runtime/parse/mod.rs | 345 +-- .../src/runtime/parse/pallet.rs | 146 +- .../src/runtime/parse/pallet_decl.rs | 77 +- .../src/runtime/parse/runtime_struct.rs | 25 +- .../src/runtime/parse/runtime_types.rs | 90 +- support/procedural-fork/src/storage_alias.rs | 1211 +++++----- support/procedural-fork/src/transactional.rs | 76 +- support/procedural-fork/src/tt_macro.rs | 82 +- 79 files changed, 13576 insertions(+), 12736 deletions(-) diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs index 0a62c3f92..376200d6e 100644 --- a/support/procedural-fork/src/benchmark.rs +++ b/support/procedural-fork/src/benchmark.rs @@ -23,332 +23,369 @@ use proc_macro::TokenStream; use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use quote::{quote, ToTokens}; use syn::{ - parse::{Nothing, ParseStream}, - parse_quote, - punctuated::Punctuated, - spanned::Spanned, - token::{Comma, Gt, Lt, PathSep}, - Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, - PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, - Visibility, WhereClause, + parse::{Nothing, ParseStream}, + parse_quote, + punctuated::Punctuated, + spanned::Spanned, + token::{Comma, Gt, Lt, PathSep}, + Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, + PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, + Visibility, WhereClause, }; mod keywords { - use syn::custom_keyword; - - custom_keyword!(benchmark); - custom_keyword!(benchmarks); - custom_keyword!(block); - custom_keyword!(extra); - custom_keyword!(pov_mode); - custom_keyword!(extrinsic_call); - custom_keyword!(skip_meta); - custom_keyword!(BenchmarkError); - custom_keyword!(Result); - custom_keyword!(MaxEncodedLen); - custom_keyword!(Measured); - custom_keyword!(Ignored); - - pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); - pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); + use syn::custom_keyword; + + custom_keyword!(benchmark); + custom_keyword!(benchmarks); + custom_keyword!(block); + custom_keyword!(extra); + custom_keyword!(pov_mode); + custom_keyword!(extrinsic_call); + custom_keyword!(skip_meta); + custom_keyword!(BenchmarkError); + custom_keyword!(Result); + custom_keyword!(MaxEncodedLen); + custom_keyword!(Measured); + custom_keyword!(Ignored); + + pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); + pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); } /// This represents the raw parsed data for a param definition such as `x: Linear<10, 20>`. #[derive(Clone)] struct ParamDef { - name: String, - _typ: Type, - start: syn::GenericArgument, - end: syn::GenericArgument, + name: String, + _typ: Type, + start: syn::GenericArgument, + end: syn::GenericArgument, } /// Allows easy parsing of the `<10, 20>` component of `x: Linear<10, 20>`. #[derive(Parse)] struct RangeArgs { - _lt_token: Lt, - start: syn::GenericArgument, - _comma: Comma, - end: syn::GenericArgument, - _trailing_comma: Option, - _gt_token: Gt, + _lt_token: Lt, + start: syn::GenericArgument, + _comma: Comma, + end: syn::GenericArgument, + _trailing_comma: Option, + _gt_token: Gt, } #[derive(Clone, Debug)] struct BenchmarkAttrs { - skip_meta: bool, - extra: bool, - pov_mode: Option, + skip_meta: bool, + extra: bool, + pov_mode: Option, } /// Represents a single benchmark option enum BenchmarkAttr { - Extra, - SkipMeta, - /// How the PoV should be measured. - PoV(PovModeAttr), + Extra, + SkipMeta, + /// How the PoV should be measured. + PoV(PovModeAttr), } impl syn::parse::Parse for PovModeAttr { - fn parse(input: ParseStream) -> Result { - let _pov: keywords::pov_mode = input.parse()?; - let _eq: Token![=] = input.parse()?; - let root = PovEstimationMode::parse(input)?; - - let mut maybe_content = None; - let _ = || -> Result<()> { - let content; - syn::braced!(content in input); - maybe_content = Some(content); - Ok(()) - }(); - - let per_key = match maybe_content { - Some(content) => { - let per_key = Punctuated::::parse_terminated(&content)?; - per_key.into_iter().collect() - }, - None => Vec::new(), - }; - - Ok(Self { root, per_key }) - } + fn parse(input: ParseStream) -> Result { + let _pov: keywords::pov_mode = input.parse()?; + let _eq: Token![=] = input.parse()?; + let root = PovEstimationMode::parse(input)?; + + let mut maybe_content = None; + let _ = || -> Result<()> { + let content; + syn::braced!(content in input); + maybe_content = Some(content); + Ok(()) + }(); + + let per_key = match maybe_content { + Some(content) => { + let per_key = Punctuated::::parse_terminated(&content)?; + per_key.into_iter().collect() + } + None => Vec::new(), + }; + + Ok(Self { root, per_key }) + } } impl syn::parse::Parse for BenchmarkAttr { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::extra) { - let _extra: keywords::extra = input.parse()?; - Ok(BenchmarkAttr::Extra) - } else if lookahead.peek(keywords::skip_meta) { - let _skip_meta: keywords::skip_meta = input.parse()?; - Ok(BenchmarkAttr::SkipMeta) - } else if lookahead.peek(keywords::pov_mode) { - PovModeAttr::parse(input).map(BenchmarkAttr::PoV) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::extra) { + let _extra: keywords::extra = input.parse()?; + Ok(BenchmarkAttr::Extra) + } else if lookahead.peek(keywords::skip_meta) { + let _skip_meta: keywords::skip_meta = input.parse()?; + Ok(BenchmarkAttr::SkipMeta) + } else if lookahead.peek(keywords::pov_mode) { + PovModeAttr::parse(input).map(BenchmarkAttr::PoV) + } else { + Err(lookahead.error()) + } + } } /// A `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone)] struct PovModeAttr { - /// The root mode for this benchmarks. - root: PovEstimationMode, - /// The pov-mode for a specific key. This overwrites `root` for this key. - per_key: Vec, + /// The root mode for this benchmarks. + root: PovEstimationMode, + /// The pov-mode for a specific key. This overwrites `root` for this key. + per_key: Vec, } /// A single key-value pair inside the `{}` of a `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone, derive_syn_parse::Parse)] struct PovModeKeyAttr { - /// A specific storage key for which to set the PoV mode. - key: Path, - _underscore: Token![:], - /// The PoV mode for this key. - mode: PovEstimationMode, + /// A specific storage key for which to set the PoV mode. + key: Path, + _underscore: Token![:], + /// The PoV mode for this key. + mode: PovEstimationMode, } /// How the PoV should be estimated. #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum PovEstimationMode { - /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. - MaxEncodedLen, - /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. - Measured, - /// Do not estimate the PoV size for this storage item or benchmark. - Ignored, + /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. + MaxEncodedLen, + /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. + Measured, + /// Do not estimate the PoV size for this storage item or benchmark. + Ignored, } impl syn::parse::Parse for PovEstimationMode { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::MaxEncodedLen) { - let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; - return Ok(PovEstimationMode::MaxEncodedLen) - } else if lookahead.peek(keywords::Measured) { - let _measured: keywords::Measured = input.parse()?; - return Ok(PovEstimationMode::Measured) - } else if lookahead.peek(keywords::Ignored) { - let _ignored: keywords::Ignored = input.parse()?; - return Ok(PovEstimationMode::Ignored) - } else { - return Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::MaxEncodedLen) { + let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; + return Ok(PovEstimationMode::MaxEncodedLen); + } else if lookahead.peek(keywords::Measured) { + let _measured: keywords::Measured = input.parse()?; + return Ok(PovEstimationMode::Measured); + } else if lookahead.peek(keywords::Ignored) { + let _ignored: keywords::Ignored = input.parse()?; + return Ok(PovEstimationMode::Ignored); + } else { + return Err(lookahead.error()); + } + } } impl ToString for PovEstimationMode { - fn to_string(&self) -> String { - match self { - PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), - PovEstimationMode::Measured => "Measured".into(), - PovEstimationMode::Ignored => "Ignored".into(), - } - } + fn to_string(&self) -> String { + match self { + PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), + PovEstimationMode::Measured => "Measured".into(), + PovEstimationMode::Ignored => "Ignored".into(), + } + } } impl quote::ToTokens for PovEstimationMode { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), - PovEstimationMode::Measured => tokens.extend(quote!(Measured)), - PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), + PovEstimationMode::Measured => tokens.extend(quote!(Measured)), + PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), + } + } } impl syn::parse::Parse for BenchmarkAttrs { - fn parse(input: ParseStream) -> syn::Result { - let mut extra = false; - let mut skip_meta = false; - let mut pov_mode = None; - let args = Punctuated::::parse_terminated(&input)?; - - for arg in args.into_iter() { - match arg { - BenchmarkAttr::Extra => { - if extra { - return Err(input.error("`extra` can only be specified once")) - } - extra = true; - }, - BenchmarkAttr::SkipMeta => { - if skip_meta { - return Err(input.error("`skip_meta` can only be specified once")) - } - skip_meta = true; - }, - BenchmarkAttr::PoV(mode) => { - if pov_mode.is_some() { - return Err(input.error("`pov_mode` can only be specified once")) - } - pov_mode = Some(mode); - }, - } - } - Ok(BenchmarkAttrs { extra, skip_meta, pov_mode }) - } + fn parse(input: ParseStream) -> syn::Result { + let mut extra = false; + let mut skip_meta = false; + let mut pov_mode = None; + let args = Punctuated::::parse_terminated(&input)?; + + for arg in args.into_iter() { + match arg { + BenchmarkAttr::Extra => { + if extra { + return Err(input.error("`extra` can only be specified once")); + } + extra = true; + } + BenchmarkAttr::SkipMeta => { + if skip_meta { + return Err(input.error("`skip_meta` can only be specified once")); + } + skip_meta = true; + } + BenchmarkAttr::PoV(mode) => { + if pov_mode.is_some() { + return Err(input.error("`pov_mode` can only be specified once")); + } + pov_mode = Some(mode); + } + } + } + Ok(BenchmarkAttrs { + extra, + skip_meta, + pov_mode, + }) + } } /// Represents the parsed extrinsic call for a benchmark #[derive(Clone)] enum BenchmarkCallDef { - ExtrinsicCall { origin: Expr, expr_call: ExprCall, attr_span: Span }, // #[extrinsic_call] - Block { block: ExprBlock, attr_span: Span }, // #[block] + ExtrinsicCall { + origin: Expr, + expr_call: ExprCall, + attr_span: Span, + }, // #[extrinsic_call] + Block { + block: ExprBlock, + attr_span: Span, + }, // #[block] } impl BenchmarkCallDef { - /// Returns the `span()` for attribute - fn attr_span(&self) -> Span { - match self { - BenchmarkCallDef::ExtrinsicCall { origin: _, expr_call: _, attr_span } => *attr_span, - BenchmarkCallDef::Block { block: _, attr_span } => *attr_span, - } - } + /// Returns the `span()` for attribute + fn attr_span(&self) -> Span { + match self { + BenchmarkCallDef::ExtrinsicCall { + origin: _, + expr_call: _, + attr_span, + } => *attr_span, + BenchmarkCallDef::Block { + block: _, + attr_span, + } => *attr_span, + } + } } /// Represents a parsed `#[benchmark]` or `#[instance_benchmark]` item. #[derive(Clone)] struct BenchmarkDef { - params: Vec, - setup_stmts: Vec, - call_def: BenchmarkCallDef, - verify_stmts: Vec, - last_stmt: Option, - fn_sig: Signature, - fn_vis: Visibility, - fn_attrs: Vec, + params: Vec, + setup_stmts: Vec, + call_def: BenchmarkCallDef, + verify_stmts: Vec, + last_stmt: Option, + fn_sig: Signature, + fn_vis: Visibility, + fn_attrs: Vec, } /// used to parse something compatible with `Result` #[derive(Parse)] struct ResultDef { - _result_kw: keywords::Result, - _lt: Token![<], - unit: Type, - _comma: Comma, - e_type: TypePath, - _gt: Token![>], + _result_kw: keywords::Result, + _lt: Token![<], + unit: Type, + _comma: Comma, + e_type: TypePath, + _gt: Token![>], } /// Ensures that `ReturnType` is a `Result<(), BenchmarkError>`, if specified fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { - if let ReturnType::Type(_, typ) = &item_fn.sig.output { - let non_unit = |span| return Err(Error::new(span, "expected `()`")); - let Type::Path(TypePath { path, qself: _ }) = &**typ else { - return Err(Error::new( + if let ReturnType::Type(_, typ) = &item_fn.sig.output { + let non_unit = |span| return Err(Error::new(span, "expected `()`")); + let Type::Path(TypePath { path, qself: _ }) = &**typ else { + return Err(Error::new( typ.span(), "Only `Result<(), BenchmarkError>` or a blank return type is allowed on benchmark function definitions", - )) - }; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - let res: ResultDef = syn::parse2(seg.to_token_stream())?; - // ensure T in Result is () - let Type::Tuple(tup) = res.unit else { return non_unit(res.unit.span()) }; - if !tup.elems.is_empty() { - return non_unit(tup.span()) - } - let TypePath { path, qself: _ } = res.e_type; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - syn::parse2::(seg.to_token_stream())?; - } - Ok(()) + )); + }; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + let res: ResultDef = syn::parse2(seg.to_token_stream())?; + // ensure T in Result is () + let Type::Tuple(tup) = res.unit else { + return non_unit(res.unit.span()); + }; + if !tup.elems.is_empty() { + return non_unit(tup.span()); + } + let TypePath { path, qself: _ } = res.e_type; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + syn::parse2::(seg.to_token_stream())?; + } + Ok(()) } /// Parses params such as `x: Linear<0, 1>` fn parse_params(item_fn: &ItemFn) -> Result> { - let mut params: Vec = Vec::new(); - for arg in &item_fn.sig.inputs { - let invalid_param = |span| { - return Err(Error::new( - span, - "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", - )) - }; - - let FnArg::Typed(arg) = arg else { return invalid_param(arg.span()) }; - let Pat::Ident(ident) = &*arg.pat else { return invalid_param(arg.span()) }; - - // check param name - let var_span = ident.span(); - let invalid_param_name = || { - return Err(Error::new( + let mut params: Vec = Vec::new(); + for arg in &item_fn.sig.inputs { + let invalid_param = |span| { + return Err(Error::new( + span, + "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", + )); + }; + + let FnArg::Typed(arg) = arg else { + return invalid_param(arg.span()); + }; + let Pat::Ident(ident) = &*arg.pat else { + return invalid_param(arg.span()); + }; + + // check param name + let var_span = ident.span(); + let invalid_param_name = || { + return Err(Error::new( var_span, "Benchmark parameter names must consist of a single lowercase letter (a-z) and no other characters.", )); - }; - let name = ident.ident.to_token_stream().to_string(); - if name.len() > 1 { - return invalid_param_name() - }; - let Some(name_char) = name.chars().next() else { return invalid_param_name() }; - if !name_char.is_alphabetic() || !name_char.is_lowercase() { - return invalid_param_name() - } - - // parse type - let typ = &*arg.ty; - let Type::Path(tpath) = typ else { return invalid_param(typ.span()) }; - let Some(segment) = tpath.path.segments.last() else { return invalid_param(typ.span()) }; - let args = segment.arguments.to_token_stream().into(); - let Ok(args) = syn::parse::(args) else { return invalid_param(typ.span()) }; - - params.push(ParamDef { name, _typ: typ.clone(), start: args.start, end: args.end }); - } - Ok(params) + }; + let name = ident.ident.to_token_stream().to_string(); + if name.len() > 1 { + return invalid_param_name(); + }; + let Some(name_char) = name.chars().next() else { + return invalid_param_name(); + }; + if !name_char.is_alphabetic() || !name_char.is_lowercase() { + return invalid_param_name(); + } + + // parse type + let typ = &*arg.ty; + let Type::Path(tpath) = typ else { + return invalid_param(typ.span()); + }; + let Some(segment) = tpath.path.segments.last() else { + return invalid_param(typ.span()); + }; + let args = segment.arguments.to_token_stream().into(); + let Ok(args) = syn::parse::(args) else { + return invalid_param(typ.span()); + }; + + params.push(ParamDef { + name, + _typ: typ.clone(), + start: args.start, + end: args.end, + }); + } + Ok(params) } /// Used in several places where the `#[extrinsic_call]` or `#[body]` annotation is missing fn missing_call(item_fn: &ItemFn) -> Result { - return Err(Error::new( + return Err(Error::new( item_fn.block.brace_token.span.join(), "No valid #[extrinsic_call] or #[block] annotation could be found in benchmark function body." )); @@ -358,8 +395,8 @@ fn missing_call(item_fn: &ItemFn) -> Result { /// returns them. Also handles parsing errors for invalid / extra call defs. AKA this is /// general handling for `#[extrinsic_call]` and `#[block]` fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { - // #[extrinsic_call] / #[block] handling - let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { + // #[extrinsic_call] / #[block] handling + let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { if let Stmt::Expr(Expr::Call(expr_call), _semi) = child { // #[extrinsic_call] case expr_call.attrs.iter().enumerate().find_map(|(k, attr)| { @@ -393,810 +430,850 @@ fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { None } }).collect::>>()?; - Ok(match &call_defs[..] { - [(i, call_def)] => (*i, call_def.clone()), // = 1 - [] => return missing_call(item_fn), - _ => - return Err(Error::new( - call_defs[1].1.attr_span(), - "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", - )), - }) + Ok(match &call_defs[..] { + [(i, call_def)] => (*i, call_def.clone()), // = 1 + [] => return missing_call(item_fn), + _ => { + return Err(Error::new( + call_defs[1].1.attr_span(), + "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", + )) + } + }) } impl BenchmarkDef { - /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. - pub fn from(item_fn: &ItemFn) -> Result { - let params = parse_params(item_fn)?; - ensure_valid_return_type(item_fn)?; - let (i, call_def) = parse_call_def(&item_fn)?; - - let (verify_stmts, last_stmt) = match item_fn.sig.output { - ReturnType::Default => - // no return type, last_stmt should be None - (Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), None), - ReturnType::Type(_, _) => { - // defined return type, last_stmt should be Result<(), BenchmarkError> - // compatible and should not be included in verify_stmts - if i + 1 >= item_fn.block.stmts.len() { - return Err(Error::new( - item_fn.block.span(), - "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ + /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. + pub fn from(item_fn: &ItemFn) -> Result { + let params = parse_params(item_fn)?; + ensure_valid_return_type(item_fn)?; + let (i, call_def) = parse_call_def(&item_fn)?; + + let (verify_stmts, last_stmt) = match item_fn.sig.output { + ReturnType::Default => + // no return type, last_stmt should be None + { + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), + None, + ) + } + ReturnType::Type(_, _) => { + // defined return type, last_stmt should be Result<(), BenchmarkError> + // compatible and should not be included in verify_stmts + if i + 1 >= item_fn.block.stmts.len() { + return Err(Error::new( + item_fn.block.span(), + "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ last statement of your benchmark function definition if you have \ defined a return type. You should return something compatible \ with Result<(), BenchmarkError> (i.e. `Ok(())`) as the last statement \ or change your signature to a blank return type.", - )) - } - let Some(stmt) = item_fn.block.stmts.last() else { return missing_call(item_fn) }; - ( - Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), - Some(stmt.clone()), - ) - }, - }; - - Ok(BenchmarkDef { - params, - setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), - call_def, - verify_stmts, - last_stmt, - fn_sig: item_fn.sig.clone(), - fn_vis: item_fn.vis.clone(), - fn_attrs: item_fn.attrs.clone(), - }) - } + )); + } + let Some(stmt) = item_fn.block.stmts.last() else { + return missing_call(item_fn); + }; + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), + Some(stmt.clone()), + ) + } + }; + + Ok(BenchmarkDef { + params, + setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), + call_def, + verify_stmts, + last_stmt, + fn_sig: item_fn.sig.clone(), + fn_vis: item_fn.vis.clone(), + fn_attrs: item_fn.attrs.clone(), + }) + } } /// Parses and expands a `#[benchmarks]` or `#[instance_benchmarks]` invocation pub fn benchmarks( - attrs: TokenStream, - tokens: TokenStream, - instance: bool, + attrs: TokenStream, + tokens: TokenStream, + instance: bool, ) -> syn::Result { - let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; - // gather module info - let module: ItemMod = syn::parse(tokens)?; - let mod_span = module.span(); - let where_clause = match syn::parse::(attrs.clone()) { - Ok(_) => quote!(), - Err(_) => syn::parse::(attrs)?.predicates.to_token_stream(), - }; - let mod_vis = module.vis; - let mod_name = module.ident; - - // consume #[benchmarks] attribute by excluding it from mod_attrs - let mod_attrs: Vec<&Attribute> = module - .attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) - .collect(); - - let mut benchmark_names: Vec = Vec::new(); - let mut extra_benchmark_names: Vec = Vec::new(); - let mut skip_meta_benchmark_names: Vec = Vec::new(); - // Map benchmarks to PoV modes. - let mut pov_modes = Vec::new(); - - let (_brace, mut content) = - module.content.ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; - - // find all function defs marked with #[benchmark] - let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { - // parse as a function def first - let Item::Fn(func) = stmt else { return None }; - - // find #[benchmark] attribute on function def - let benchmark_attr = - func.attrs.iter().find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; - - Some((benchmark_attr.clone(), func.clone(), stmt)) - }); - - // parse individual benchmark defs and args - for (benchmark_attr, func, stmt) in benchmark_fn_metas { - // parse benchmark def - let benchmark_def = BenchmarkDef::from(&func)?; - - // record benchmark name - let name = &func.sig.ident; - benchmark_names.push(name.clone()); - - // Check if we need to parse any args - if benchmark_attr.meta.require_path_only().is_err() { - // parse any args provided to #[benchmark] - let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; - - // record name sets - if benchmark_attrs.extra { - extra_benchmark_names.push(name.clone()); - } else if benchmark_attrs.skip_meta { - skip_meta_benchmark_names.push(name.clone()); - } - - if let Some(mode) = benchmark_attrs.pov_mode { - let mut modes = Vec::new(); - // We cannot expand strings here since it is no-std, but syn does not expand bytes. - let name = name.to_string(); - let m = mode.root.to_string(); - modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); - - for attr in mode.per_key.iter() { - // syn always puts spaces in quoted paths: - let key = attr.key.clone().into_token_stream().to_string().replace(" ", ""); - let mode = attr.mode.to_string(); - modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); - } - - pov_modes.push( - quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), - ); - } - } - - // expand benchmark - let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); - - // replace original function def with expanded code - *stmt = Item::Verbatim(expanded); - } - - // generics - let type_use_generics = match instance { - false => quote!(T), - true => quote!(T, I), - }; - let type_impl_generics = match instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - - // benchmark name variables - let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); - let extra_benchmark_names_str: Vec = - extra_benchmark_names.iter().map(|n| n.to_string()).collect(); - let skip_meta_benchmark_names_str: Vec = - skip_meta_benchmark_names.iter().map(|n| n.to_string()).collect(); - let mut selected_benchmark_mappings: Vec = Vec::new(); - let mut benchmarks_by_name_mappings: Vec = Vec::new(); - let test_idents: Vec = benchmark_names_str - .iter() - .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) - .collect(); - for i in 0..benchmark_names.len() { - let name_ident = &benchmark_names[i]; - let name_str = &benchmark_names_str[i]; - let test_ident = &test_idents[i]; - selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); - benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) - } - - let impl_test_function = content - .iter_mut() - .find_map(|item| { - let Item::Macro(item_macro) = item else { - return None; - }; - - if !item_macro - .mac - .path - .segments - .iter() - .any(|s| s.ident == "impl_benchmark_test_suite") - { - return None; - } - - let tokens = item_macro.mac.tokens.clone(); - *item = Item::Verbatim(quote! {}); - - Some(quote! { - impl_test_function!( - (#( {} #benchmark_names )*) - (#( #extra_benchmark_names )*) - (#( #skip_meta_benchmark_names )*) - #tokens - ); - }) - }) - .unwrap_or(quote! {}); - - // emit final quoted tokens - let res = quote! { - #(#mod_attrs) - * - #mod_vis mod #mod_name { - #(#content) - * - - #[allow(non_camel_case_types)] - enum SelectedBenchmark { - #(#benchmark_names), - * - } - - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) - } - ) - * - } - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool, - ) -> Result< - #krate::__private::Box Result<(), #krate::BenchmarkError>>, - #krate::BenchmarkError, - > { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup< - #type_use_generics - >>::instance(&#benchmark_names, components, verify) - } - ) - * - } - } - } - #[cfg(any(feature = "runtime-benchmarks", test))] - impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> - where T: #frame_system::Config, #where_clause - { - fn benchmarks( - extra: bool, - ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { - let mut all_names = #krate::__private::vec![ - #(#benchmark_names_str), - * - ]; - if !extra { - let extra = [ - #(#extra_benchmark_names_str), - * - ]; - all_names.retain(|x| !extra.contains(x)); - } - let pov_modes: - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec - )>, - )> = #krate::__private::vec![ - #( #pov_modes ),* - ]; - all_names.into_iter().map(|benchmark| { - let selected_benchmark = match benchmark { - #(#selected_benchmark_mappings), - *, - _ => panic!("all benchmarks should be selectable") - }; - let components = >::components(&selected_benchmark); - let name = benchmark.as_bytes().to_vec(); - let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); - - #krate::BenchmarkMetadata { - name: benchmark.as_bytes().to_vec(), - components, - pov_modes: modes.unwrap_or_default(), - } - }).collect::<#krate::__private::Vec<_>>() - } - - fn run_benchmark( - extrinsic: &[u8], - c: &[(#krate::BenchmarkParameter, u32)], - whitelist: &[#krate::__private::TrackedStorageKey], - verify: bool, - internal_repeats: u32, - ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { - let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; - let selected_benchmark = match extrinsic { - #(#selected_benchmark_mappings), - *, - _ => return Err("Could not find extrinsic.".into()), - }; - let mut whitelist = whitelist.to_vec(); - let whitelisted_caller_key = <#frame_system::Account< - T, - > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( - #krate::whitelisted_caller::() - ); - whitelist.push(whitelisted_caller_key.into()); - let transactional_layer_key = #krate::__private::TrackedStorageKey::new( - #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), - ); - whitelist.push(transactional_layer_key); - // Whitelist the `:extrinsic_index`. - let extrinsic_index = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() - ); - whitelist.push(extrinsic_index); - // Whitelist the `:intrablock_entropy`. - let intrablock_entropy = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() - ); - whitelist.push(intrablock_entropy); - - #krate::benchmarking::set_whitelist(whitelist.clone()); - let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); - - // Always do at least one internal repeat... - for _ in 0 .. internal_repeats.max(1) { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the externalities environment for the setup we want to - // benchmark. - let closure_to_benchmark = < - SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> - >::instance(&selected_benchmark, c, verify)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Commit the externalities to the database, flushing the DB cache. - // This will enable worst case scenario for reading from the database. - #krate::benchmarking::commit_db(); - - // Access all whitelisted keys to get them into the proof recorder since the - // recorder does now have a whitelist. - for key in &whitelist { - #krate::__private::storage::unhashed::get_raw(&key.key); - } - - // Reset the read/write counter so we don't count operations in the setup process. - #krate::benchmarking::reset_read_write_count(); - - // Time the extrinsic logic. - #krate::__private::log::trace!( - target: "benchmark", - "Start Benchmark: {} ({:?})", - extrinsic, - c - ); - - let start_pov = #krate::benchmarking::proof_size(); - let start_extrinsic = #krate::benchmarking::current_time(); - - closure_to_benchmark()?; - - let finish_extrinsic = #krate::benchmarking::current_time(); - let end_pov = #krate::benchmarking::proof_size(); - - // Calculate the diff caused by the benchmark. - let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); - let diff_pov = match (start_pov, end_pov) { - (Some(start), Some(end)) => end.saturating_sub(start), - _ => Default::default(), - }; - - // Commit the changes to get proper write count - #krate::benchmarking::commit_db(); - #krate::__private::log::trace!( - target: "benchmark", - "End Benchmark: {} ns", elapsed_extrinsic - ); - let read_write_count = #krate::benchmarking::read_write_count(); - #krate::__private::log::trace!( - target: "benchmark", - "Read/Write Count {:?}", read_write_count - ); - - // Time the storage root recalculation. - let start_storage_root = #krate::benchmarking::current_time(); - #krate::__private::storage_root(#krate::__private::StateVersion::V1); - let finish_storage_root = #krate::benchmarking::current_time(); - let elapsed_storage_root = finish_storage_root - start_storage_root; - - let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; - let read_and_written_keys = if skip_meta.contains(&extrinsic) { - #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] - } else { - #krate::benchmarking::get_read_and_written_keys() - }; - - results.push(#krate::BenchmarkResult { - components: c.to_vec(), - extrinsic_time: elapsed_extrinsic, - storage_root_time: elapsed_storage_root, - reads: read_write_count.0, - repeat_reads: read_write_count.1, - writes: read_write_count.2, - repeat_writes: read_write_count.3, - proof_size: diff_pov, - keys: read_and_written_keys, - }); - } - - return Ok(results); - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - /// Test a particular benchmark by name. - /// - /// This isn't called `test_benchmark_by_name` just in case some end-user eventually - /// writes a benchmark, itself called `by_name`; the function would be shadowed in - /// that case. - /// - /// This is generally intended to be used by child test modules such as those created - /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet - /// author chooses not to implement benchmarks. - #[allow(unused)] - fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { - let name = #krate::__private::str::from_utf8(name) - .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; - match name { - #(#benchmarks_by_name_mappings), - *, - _ => Err("Could not find test for requested benchmark.".into()), - } - } - } - - #impl_test_function - } - #mod_vis use #mod_name::*; - }; - Ok(res.into()) + let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; + // gather module info + let module: ItemMod = syn::parse(tokens)?; + let mod_span = module.span(); + let where_clause = match syn::parse::(attrs.clone()) { + Ok(_) => quote!(), + Err(_) => syn::parse::(attrs)? + .predicates + .to_token_stream(), + }; + let mod_vis = module.vis; + let mod_name = module.ident; + + // consume #[benchmarks] attribute by excluding it from mod_attrs + let mod_attrs: Vec<&Attribute> = module + .attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) + .collect(); + + let mut benchmark_names: Vec = Vec::new(); + let mut extra_benchmark_names: Vec = Vec::new(); + let mut skip_meta_benchmark_names: Vec = Vec::new(); + // Map benchmarks to PoV modes. + let mut pov_modes = Vec::new(); + + let (_brace, mut content) = module + .content + .ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; + + // find all function defs marked with #[benchmark] + let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { + // parse as a function def first + let Item::Fn(func) = stmt else { return None }; + + // find #[benchmark] attribute on function def + let benchmark_attr = func + .attrs + .iter() + .find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; + + Some((benchmark_attr.clone(), func.clone(), stmt)) + }); + + // parse individual benchmark defs and args + for (benchmark_attr, func, stmt) in benchmark_fn_metas { + // parse benchmark def + let benchmark_def = BenchmarkDef::from(&func)?; + + // record benchmark name + let name = &func.sig.ident; + benchmark_names.push(name.clone()); + + // Check if we need to parse any args + if benchmark_attr.meta.require_path_only().is_err() { + // parse any args provided to #[benchmark] + let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; + + // record name sets + if benchmark_attrs.extra { + extra_benchmark_names.push(name.clone()); + } else if benchmark_attrs.skip_meta { + skip_meta_benchmark_names.push(name.clone()); + } + + if let Some(mode) = benchmark_attrs.pov_mode { + let mut modes = Vec::new(); + // We cannot expand strings here since it is no-std, but syn does not expand bytes. + let name = name.to_string(); + let m = mode.root.to_string(); + modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); + + for attr in mode.per_key.iter() { + // syn always puts spaces in quoted paths: + let key = attr + .key + .clone() + .into_token_stream() + .to_string() + .replace(" ", ""); + let mode = attr.mode.to_string(); + modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); + } + + pov_modes.push( + quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), + ); + } + } + + // expand benchmark + let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); + + // replace original function def with expanded code + *stmt = Item::Verbatim(expanded); + } + + // generics + let type_use_generics = match instance { + false => quote!(T), + true => quote!(T, I), + }; + let type_impl_generics = match instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + + // benchmark name variables + let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); + let extra_benchmark_names_str: Vec = extra_benchmark_names + .iter() + .map(|n| n.to_string()) + .collect(); + let skip_meta_benchmark_names_str: Vec = skip_meta_benchmark_names + .iter() + .map(|n| n.to_string()) + .collect(); + let mut selected_benchmark_mappings: Vec = Vec::new(); + let mut benchmarks_by_name_mappings: Vec = Vec::new(); + let test_idents: Vec = benchmark_names_str + .iter() + .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) + .collect(); + for i in 0..benchmark_names.len() { + let name_ident = &benchmark_names[i]; + let name_str = &benchmark_names_str[i]; + let test_ident = &test_idents[i]; + selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); + benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) + } + + let impl_test_function = content + .iter_mut() + .find_map(|item| { + let Item::Macro(item_macro) = item else { + return None; + }; + + if !item_macro + .mac + .path + .segments + .iter() + .any(|s| s.ident == "impl_benchmark_test_suite") + { + return None; + } + + let tokens = item_macro.mac.tokens.clone(); + *item = Item::Verbatim(quote! {}); + + Some(quote! { + impl_test_function!( + (#( {} #benchmark_names )*) + (#( #extra_benchmark_names )*) + (#( #skip_meta_benchmark_names )*) + #tokens + ); + }) + }) + .unwrap_or(quote! {}); + + // emit final quoted tokens + let res = quote! { + #(#mod_attrs) + * + #mod_vis mod #mod_name { + #(#content) + * + + #[allow(non_camel_case_types)] + enum SelectedBenchmark { + #(#benchmark_names), + * + } + + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) + } + ) + * + } + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool, + ) -> Result< + #krate::__private::Box Result<(), #krate::BenchmarkError>>, + #krate::BenchmarkError, + > { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup< + #type_use_generics + >>::instance(&#benchmark_names, components, verify) + } + ) + * + } + } + } + #[cfg(any(feature = "runtime-benchmarks", test))] + impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> + where T: #frame_system::Config, #where_clause + { + fn benchmarks( + extra: bool, + ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { + let mut all_names = #krate::__private::vec![ + #(#benchmark_names_str), + * + ]; + if !extra { + let extra = [ + #(#extra_benchmark_names_str), + * + ]; + all_names.retain(|x| !extra.contains(x)); + } + let pov_modes: + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec + )>, + )> = #krate::__private::vec![ + #( #pov_modes ),* + ]; + all_names.into_iter().map(|benchmark| { + let selected_benchmark = match benchmark { + #(#selected_benchmark_mappings), + *, + _ => panic!("all benchmarks should be selectable") + }; + let components = >::components(&selected_benchmark); + let name = benchmark.as_bytes().to_vec(); + let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); + + #krate::BenchmarkMetadata { + name: benchmark.as_bytes().to_vec(), + components, + pov_modes: modes.unwrap_or_default(), + } + }).collect::<#krate::__private::Vec<_>>() + } + + fn run_benchmark( + extrinsic: &[u8], + c: &[(#krate::BenchmarkParameter, u32)], + whitelist: &[#krate::__private::TrackedStorageKey], + verify: bool, + internal_repeats: u32, + ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { + let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; + let selected_benchmark = match extrinsic { + #(#selected_benchmark_mappings), + *, + _ => return Err("Could not find extrinsic.".into()), + }; + let mut whitelist = whitelist.to_vec(); + let whitelisted_caller_key = <#frame_system::Account< + T, + > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( + #krate::whitelisted_caller::() + ); + whitelist.push(whitelisted_caller_key.into()); + let transactional_layer_key = #krate::__private::TrackedStorageKey::new( + #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), + ); + whitelist.push(transactional_layer_key); + // Whitelist the `:extrinsic_index`. + let extrinsic_index = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() + ); + whitelist.push(extrinsic_index); + // Whitelist the `:intrablock_entropy`. + let intrablock_entropy = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() + ); + whitelist.push(intrablock_entropy); + + #krate::benchmarking::set_whitelist(whitelist.clone()); + let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); + + // Always do at least one internal repeat... + for _ in 0 .. internal_repeats.max(1) { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the externalities environment for the setup we want to + // benchmark. + let closure_to_benchmark = < + SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> + >::instance(&selected_benchmark, c, verify)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Commit the externalities to the database, flushing the DB cache. + // This will enable worst case scenario for reading from the database. + #krate::benchmarking::commit_db(); + + // Access all whitelisted keys to get them into the proof recorder since the + // recorder does now have a whitelist. + for key in &whitelist { + #krate::__private::storage::unhashed::get_raw(&key.key); + } + + // Reset the read/write counter so we don't count operations in the setup process. + #krate::benchmarking::reset_read_write_count(); + + // Time the extrinsic logic. + #krate::__private::log::trace!( + target: "benchmark", + "Start Benchmark: {} ({:?})", + extrinsic, + c + ); + + let start_pov = #krate::benchmarking::proof_size(); + let start_extrinsic = #krate::benchmarking::current_time(); + + closure_to_benchmark()?; + + let finish_extrinsic = #krate::benchmarking::current_time(); + let end_pov = #krate::benchmarking::proof_size(); + + // Calculate the diff caused by the benchmark. + let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); + let diff_pov = match (start_pov, end_pov) { + (Some(start), Some(end)) => end.saturating_sub(start), + _ => Default::default(), + }; + + // Commit the changes to get proper write count + #krate::benchmarking::commit_db(); + #krate::__private::log::trace!( + target: "benchmark", + "End Benchmark: {} ns", elapsed_extrinsic + ); + let read_write_count = #krate::benchmarking::read_write_count(); + #krate::__private::log::trace!( + target: "benchmark", + "Read/Write Count {:?}", read_write_count + ); + + // Time the storage root recalculation. + let start_storage_root = #krate::benchmarking::current_time(); + #krate::__private::storage_root(#krate::__private::StateVersion::V1); + let finish_storage_root = #krate::benchmarking::current_time(); + let elapsed_storage_root = finish_storage_root - start_storage_root; + + let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; + let read_and_written_keys = if skip_meta.contains(&extrinsic) { + #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] + } else { + #krate::benchmarking::get_read_and_written_keys() + }; + + results.push(#krate::BenchmarkResult { + components: c.to_vec(), + extrinsic_time: elapsed_extrinsic, + storage_root_time: elapsed_storage_root, + reads: read_write_count.0, + repeat_reads: read_write_count.1, + writes: read_write_count.2, + repeat_writes: read_write_count.3, + proof_size: diff_pov, + keys: read_and_written_keys, + }); + } + + return Ok(results); + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + /// Test a particular benchmark by name. + /// + /// This isn't called `test_benchmark_by_name` just in case some end-user eventually + /// writes a benchmark, itself called `by_name`; the function would be shadowed in + /// that case. + /// + /// This is generally intended to be used by child test modules such as those created + /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet + /// author chooses not to implement benchmarks. + #[allow(unused)] + fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { + let name = #krate::__private::str::from_utf8(name) + .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; + match name { + #(#benchmarks_by_name_mappings), + *, + _ => Err("Could not find test for requested benchmark.".into()), + } + } + } + + #impl_test_function + } + #mod_vis use #mod_name::*; + }; + Ok(res.into()) } /// Prepares a [`Vec`] to be interpolated by [`quote!`] by creating easily-iterable /// arrays formatted in such a way that they can be interpolated directly. struct UnrolledParams { - param_ranges: Vec, - param_names: Vec, + param_ranges: Vec, + param_names: Vec, } impl UnrolledParams { - /// Constructs an [`UnrolledParams`] from a [`Vec`] - fn from(params: &Vec) -> UnrolledParams { - let param_ranges: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - let start = &p.start; - let end = &p.end; - quote!(#name, #start, #end) - }) - .collect(); - let param_names: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - quote!(#name) - }) - .collect(); - UnrolledParams { param_ranges, param_names } - } + /// Constructs an [`UnrolledParams`] from a [`Vec`] + fn from(params: &Vec) -> UnrolledParams { + let param_ranges: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + let start = &p.start; + let end = &p.end; + quote!(#name, #start, #end) + }) + .collect(); + let param_names: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + quote!(#name) + }) + .collect(); + UnrolledParams { + param_ranges, + param_names, + } + } } /// Performs expansion of an already-parsed [`BenchmarkDef`]. fn expand_benchmark( - benchmark_def: BenchmarkDef, - name: &Ident, - is_instance: bool, - where_clause: TokenStream2, + benchmark_def: BenchmarkDef, + name: &Ident, + is_instance: bool, + where_clause: TokenStream2, ) -> TokenStream2 { - // set up variables needed during quoting - let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { - Ok(ident) => ident, - Err(err) => return err.to_compile_error().into(), - }; - let frame_system = match generate_access_from_frame_or_crate("frame-system") { - Ok(path) => path, - Err(err) => return err.to_compile_error().into(), - }; - let codec = quote!(#krate::__private::codec); - let traits = quote!(#krate::__private::traits); - let setup_stmts = benchmark_def.setup_stmts; - let verify_stmts = benchmark_def.verify_stmts; - let last_stmt = benchmark_def.last_stmt; - let test_ident = - Ident::new(format!("test_benchmark_{}", name.to_string()).as_str(), Span::call_site()); - - // unroll params (prepare for quoting) - let unrolled = UnrolledParams::from(&benchmark_def.params); - let param_names = unrolled.param_names; - let param_ranges = unrolled.param_ranges; - - let type_use_generics = match is_instance { - false => quote!(T), - true => quote!(T, I), - }; - - let type_impl_generics = match is_instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - // used in the benchmarking impls - let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { - BenchmarkCallDef::ExtrinsicCall { origin, expr_call, attr_span: _ } => { - let mut expr_call = expr_call.clone(); - - // remove first arg from expr_call - let mut final_args = Punctuated::::new(); - let args: Vec<&Expr> = expr_call.args.iter().collect(); - for arg in &args[1..] { - final_args.push((*(*arg)).clone()); - } - expr_call.args = final_args; - - let origin = match origin { - Expr::Cast(t) => { - let ty = t.ty.clone(); - quote! { - <::RuntimeOrigin as From<#ty>>::from(#origin); - } - }, - _ => quote! { - #origin.into(); - }, - }; - - // determine call name (handles `_` and normal call syntax) - let expr_span = expr_call.span(); - let call_err = || { - syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") - .to_compile_error() - }; - let call_name = match *expr_call.func { - Expr::Path(expr_path) => { - // normal function call - let Some(segment) = expr_path.path.segments.last() else { return call_err() }; - segment.ident.to_string() - }, - Expr::Infer(_) => { - // `_` style - // replace `_` with fn name - name.to_string() - }, - _ => return call_err(), - }; - - // modify extrinsic call to be prefixed with "new_call_variant" - let call_name = format!("new_call_variant_{}", call_name); - let mut punct: Punctuated = Punctuated::new(); - punct.push(PathSegment { - arguments: PathArguments::None, - ident: Ident::new(call_name.as_str(), Span::call_site()), - }); - *expr_call.func = Expr::Path(ExprPath { - attrs: vec![], - qself: None, - path: Path { leading_colon: None, segments: punct }, - }); - let pre_call = quote! { - let __call = Call::<#type_use_generics>::#expr_call; - let __benchmarked_call_encoded = #codec::Encode::encode(&__call); - }; - let post_call = quote! { - let __call_decoded = as #codec::Decode> - ::decode(&mut &__benchmarked_call_encoded[..]) - .expect("call is encoded above, encoding must be correct"); - let __origin = #origin; - as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( - __call_decoded, - __origin, - ) - }; - ( - // (pre_call, post_call, fn_call_body): - pre_call.clone(), - quote!(#post_call?;), - quote! { - #pre_call - #post_call.unwrap(); - }, - ) - }, - BenchmarkCallDef::Block { block, attr_span: _ } => - (quote!(), quote!(#block), quote!(#block)), - }; - - let vis = benchmark_def.fn_vis; - - // remove #[benchmark] attribute - let fn_attrs = benchmark_def - .fn_attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); - - // modify signature generics, ident, and inputs, e.g: - // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` - // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), - // BenchmarkError>` - let mut sig = benchmark_def.fn_sig; - sig.generics = parse_quote!(<#type_impl_generics>); - if !where_clause.is_empty() { - sig.generics.where_clause = parse_quote!(where #where_clause); - } - sig.ident = - Ident::new(format!("_{}", name.to_token_stream().to_string()).as_str(), Span::call_site()); - let mut fn_param_inputs: Vec = - param_names.iter().map(|name| quote!(#name: u32)).collect(); - fn_param_inputs.push(quote!(verify: bool)); - sig.inputs = parse_quote!(#(#fn_param_inputs),*); - - // used in instance() impl - let impl_last_stmt = match &last_stmt { - Some(stmt) => quote!(#stmt), - None => quote!(Ok(())), - }; - let fn_attrs_clone = fn_attrs.clone(); - - let fn_def = quote! { - #( - #fn_attrs_clone - )* - #vis #sig { - #( - #setup_stmts - )* - #fn_call_body - if verify { - #( - #verify_stmts - )* - } - #last_stmt - } - }; - - // generate final quoted tokens - let res = quote! { - // benchmark function definition - #fn_def - - #[allow(non_camel_case_types)] - #( - #fn_attrs - )* - struct #name; - - #[allow(unused_variables)] - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> - for #name where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - #krate::__private::vec! [ - #( - (#krate::BenchmarkParameter::#param_ranges) - ),* - ] - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool - ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { - #( - // prepare instance #param_names - let #param_names = components.iter() - .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) - .ok_or("Could not find component during benchmark preparation.")? - .1; - )* - - // benchmark setup code - #( - #setup_stmts - )* - #pre_call - Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { - #post_call - if verify { - #( - #verify_stmts - )* - } - #impl_last_stmt - })) - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - #[allow(unused)] - fn #test_ident() -> Result<(), #krate::BenchmarkError> { - let selected_benchmark = SelectedBenchmark::#name; - let components = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::components(&selected_benchmark); - let execute_benchmark = | - c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> - | -> Result<(), #krate::BenchmarkError> { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the benchmark, return execution + verification function. - let closure_to_verify = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::instance(&selected_benchmark, &c, true)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Run execution + verification - closure_to_verify() - }; - - if components.is_empty() { - execute_benchmark(Default::default())?; - } else { - let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { - ev.parse().map_err(|_| { - #krate::BenchmarkError::Stop( - "Could not parse env var `VALUES_PER_COMPONENT` as u32." - ) - })? - } else { - 6 - }; - - if num_values < 2 { - return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); - } - - for (name, low, high) in components.clone().into_iter() { - // Test the lowest, highest (if its different from the lowest) - // and up to num_values-2 more equidistant values in between. - // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] - if high < low { - return Err("The start of a `ParamRange` must be less than or equal to the end".into()); - } - - let mut values = #krate::__private::vec![low]; - let diff = (high - low).min(num_values - 1); - let slope = (high - low) as f32 / diff as f32; - - for i in 1..=diff { - let value = ((low as f32 + slope * i as f32) as u32) - .clamp(low, high); - values.push(value); - } - - for component_value in values { - // Select the max value for all the other components. - let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components - .iter() - .map(|(n, _, h)| - if *n == name { - (*n, component_value) - } else { - (*n, *h) - } - ) - .collect(); - - execute_benchmark(c)?; - } - } - } - return Ok(()); - } - } - }; - res + // set up variables needed during quoting + let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { + Ok(ident) => ident, + Err(err) => return err.to_compile_error().into(), + }; + let frame_system = match generate_access_from_frame_or_crate("frame-system") { + Ok(path) => path, + Err(err) => return err.to_compile_error().into(), + }; + let codec = quote!(#krate::__private::codec); + let traits = quote!(#krate::__private::traits); + let setup_stmts = benchmark_def.setup_stmts; + let verify_stmts = benchmark_def.verify_stmts; + let last_stmt = benchmark_def.last_stmt; + let test_ident = Ident::new( + format!("test_benchmark_{}", name.to_string()).as_str(), + Span::call_site(), + ); + + // unroll params (prepare for quoting) + let unrolled = UnrolledParams::from(&benchmark_def.params); + let param_names = unrolled.param_names; + let param_ranges = unrolled.param_ranges; + + let type_use_generics = match is_instance { + false => quote!(T), + true => quote!(T, I), + }; + + let type_impl_generics = match is_instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + // used in the benchmarking impls + let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { + BenchmarkCallDef::ExtrinsicCall { + origin, + expr_call, + attr_span: _, + } => { + let mut expr_call = expr_call.clone(); + + // remove first arg from expr_call + let mut final_args = Punctuated::::new(); + let args: Vec<&Expr> = expr_call.args.iter().collect(); + for arg in &args[1..] { + final_args.push((*(*arg)).clone()); + } + expr_call.args = final_args; + + let origin = match origin { + Expr::Cast(t) => { + let ty = t.ty.clone(); + quote! { + <::RuntimeOrigin as From<#ty>>::from(#origin); + } + } + _ => quote! { + #origin.into(); + }, + }; + + // determine call name (handles `_` and normal call syntax) + let expr_span = expr_call.span(); + let call_err = || { + syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") + .to_compile_error() + }; + let call_name = match *expr_call.func { + Expr::Path(expr_path) => { + // normal function call + let Some(segment) = expr_path.path.segments.last() else { + return call_err(); + }; + segment.ident.to_string() + } + Expr::Infer(_) => { + // `_` style + // replace `_` with fn name + name.to_string() + } + _ => return call_err(), + }; + + // modify extrinsic call to be prefixed with "new_call_variant" + let call_name = format!("new_call_variant_{}", call_name); + let mut punct: Punctuated = Punctuated::new(); + punct.push(PathSegment { + arguments: PathArguments::None, + ident: Ident::new(call_name.as_str(), Span::call_site()), + }); + *expr_call.func = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: Path { + leading_colon: None, + segments: punct, + }, + }); + let pre_call = quote! { + let __call = Call::<#type_use_generics>::#expr_call; + let __benchmarked_call_encoded = #codec::Encode::encode(&__call); + }; + let post_call = quote! { + let __call_decoded = as #codec::Decode> + ::decode(&mut &__benchmarked_call_encoded[..]) + .expect("call is encoded above, encoding must be correct"); + let __origin = #origin; + as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( + __call_decoded, + __origin, + ) + }; + ( + // (pre_call, post_call, fn_call_body): + pre_call.clone(), + quote!(#post_call?;), + quote! { + #pre_call + #post_call.unwrap(); + }, + ) + } + BenchmarkCallDef::Block { + block, + attr_span: _, + } => (quote!(), quote!(#block), quote!(#block)), + }; + + let vis = benchmark_def.fn_vis; + + // remove #[benchmark] attribute + let fn_attrs = benchmark_def + .fn_attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); + + // modify signature generics, ident, and inputs, e.g: + // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` + // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), + // BenchmarkError>` + let mut sig = benchmark_def.fn_sig; + sig.generics = parse_quote!(<#type_impl_generics>); + if !where_clause.is_empty() { + sig.generics.where_clause = parse_quote!(where #where_clause); + } + sig.ident = Ident::new( + format!("_{}", name.to_token_stream().to_string()).as_str(), + Span::call_site(), + ); + let mut fn_param_inputs: Vec = + param_names.iter().map(|name| quote!(#name: u32)).collect(); + fn_param_inputs.push(quote!(verify: bool)); + sig.inputs = parse_quote!(#(#fn_param_inputs),*); + + // used in instance() impl + let impl_last_stmt = match &last_stmt { + Some(stmt) => quote!(#stmt), + None => quote!(Ok(())), + }; + let fn_attrs_clone = fn_attrs.clone(); + + let fn_def = quote! { + #( + #fn_attrs_clone + )* + #vis #sig { + #( + #setup_stmts + )* + #fn_call_body + if verify { + #( + #verify_stmts + )* + } + #last_stmt + } + }; + + // generate final quoted tokens + let res = quote! { + // benchmark function definition + #fn_def + + #[allow(non_camel_case_types)] + #( + #fn_attrs + )* + struct #name; + + #[allow(unused_variables)] + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> + for #name where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + #krate::__private::vec! [ + #( + (#krate::BenchmarkParameter::#param_ranges) + ),* + ] + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool + ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { + #( + // prepare instance #param_names + let #param_names = components.iter() + .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) + .ok_or("Could not find component during benchmark preparation.")? + .1; + )* + + // benchmark setup code + #( + #setup_stmts + )* + #pre_call + Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { + #post_call + if verify { + #( + #verify_stmts + )* + } + #impl_last_stmt + })) + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + #[allow(unused)] + fn #test_ident() -> Result<(), #krate::BenchmarkError> { + let selected_benchmark = SelectedBenchmark::#name; + let components = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::components(&selected_benchmark); + let execute_benchmark = | + c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> + | -> Result<(), #krate::BenchmarkError> { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the benchmark, return execution + verification function. + let closure_to_verify = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::instance(&selected_benchmark, &c, true)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Run execution + verification + closure_to_verify() + }; + + if components.is_empty() { + execute_benchmark(Default::default())?; + } else { + let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { + ev.parse().map_err(|_| { + #krate::BenchmarkError::Stop( + "Could not parse env var `VALUES_PER_COMPONENT` as u32." + ) + })? + } else { + 6 + }; + + if num_values < 2 { + return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); + } + + for (name, low, high) in components.clone().into_iter() { + // Test the lowest, highest (if its different from the lowest) + // and up to num_values-2 more equidistant values in between. + // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] + if high < low { + return Err("The start of a `ParamRange` must be less than or equal to the end".into()); + } + + let mut values = #krate::__private::vec![low]; + let diff = (high - low).min(num_values - 1); + let slope = (high - low) as f32 / diff as f32; + + for i in 1..=diff { + let value = ((low as f32 + slope * i as f32) as u32) + .clamp(low, high); + values.push(value); + } + + for component_value in values { + // Select the max value for all the other components. + let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components + .iter() + .map(|(n, _, h)| + if *n == name { + (*n, component_value) + } else { + (*n, *h) + } + ) + .collect(); + + execute_benchmark(c)?; + } + } + } + return Ok(()); + } + } + }; + res } diff --git a/support/procedural-fork/src/construct_runtime/expand/call.rs b/support/procedural-fork/src/construct_runtime/expand/call.rs index b0041ccc0..7e8c2e856 100644 --- a/support/procedural-fork/src/construct_runtime/expand/call.rs +++ b/support/procedural-fork/src/construct_runtime/expand/call.rs @@ -22,202 +22,205 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_dispatch( - runtime: &Ident, - system_pallet: &Pallet, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut variant_defs = TokenStream::new(); - let mut variant_patterns = Vec::new(); - let mut query_call_part_macros = Vec::new(); - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let system_path = &system_pallet.path; - - let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); - - for pallet_declaration in pallets_with_call { - let name = &pallet_declaration.name; - let path = &pallet_declaration.path; - let index = pallet_declaration.index; - let attr = - pallet_declaration.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - variant_defs.extend(quote! { - #attr - #[codec(index = #index)] - #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), - }); - variant_patterns.push(quote!(RuntimeCall::#name(call))); - pallet_names.push(name); - pallet_attrs.push(attr); - query_call_part_macros.push(quote! { - #path::__substrate_call_check::is_call_part_defined!(#name); - }); - } - - quote! { - #( #query_call_part_macros )* - - #[derive( - Clone, PartialEq, Eq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeCall { - #variant_defs - } - #[cfg(test)] - impl RuntimeCall { - /// Return a list of the module names together with their size in memory. - pub const fn sizes() -> &'static [( &'static str, usize )] { - use #scrate::dispatch::Callable; - use core::mem::size_of; - &[#( - #pallet_attrs - ( - stringify!(#pallet_names), - size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), - ), - )*] - } - - /// Panics with diagnostic information if the size is greater than the given `limit`. - pub fn assert_size_under(limit: usize) { - let size = core::mem::size_of::(); - let call_oversize = size > limit; - if call_oversize { - println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); - let mut sizes = Self::sizes().to_vec(); - sizes.sort_by_key(|x| -(x.1 as isize)); - for (i, &(name, size)) in sizes.iter().enumerate().take(5) { - println!("Offender #{}: {} at {} bytes", i + 1, name, size); - } - if let Some((_, next_size)) = sizes.get(5) { - println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); - } - panic!( - "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the + let mut variant_defs = TokenStream::new(); + let mut variant_patterns = Vec::new(); + let mut query_call_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let system_path = &system_pallet.path; + + let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); + + for pallet_declaration in pallets_with_call { + let name = &pallet_declaration.name; + let path = &pallet_declaration.path; + let index = pallet_declaration.index; + let attr = + pallet_declaration + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + variant_defs.extend(quote! { + #attr + #[codec(index = #index)] + #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), + }); + variant_patterns.push(quote!(RuntimeCall::#name(call))); + pallet_names.push(name); + pallet_attrs.push(attr); + query_call_part_macros.push(quote! { + #path::__substrate_call_check::is_call_part_defined!(#name); + }); + } + + quote! { + #( #query_call_part_macros )* + + #[derive( + Clone, PartialEq, Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeCall { + #variant_defs + } + #[cfg(test)] + impl RuntimeCall { + /// Return a list of the module names together with their size in memory. + pub const fn sizes() -> &'static [( &'static str, usize )] { + use #scrate::dispatch::Callable; + use core::mem::size_of; + &[#( + #pallet_attrs + ( + stringify!(#pallet_names), + size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), + ), + )*] + } + + /// Panics with diagnostic information if the size is greater than the given `limit`. + pub fn assert_size_under(limit: usize) { + let size = core::mem::size_of::(); + let call_oversize = size > limit; + if call_oversize { + println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); + let mut sizes = Self::sizes().to_vec(); + sizes.sort_by_key(|x| -(x.1 as isize)); + for (i, &(name, size)) in sizes.iter().enumerate().take(5) { + println!("Offender #{}: {} at {} bytes", i + 1, name, size); + } + if let Some((_, next_size)) = sizes.get(5) { + println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); + } + panic!( + "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the size of `Call`. If the limit is too strong, maybe consider providing a higher limit." - ); - } - } - } - impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { - fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { - match self { - #( - #pallet_attrs - #variant_patterns => call.get_dispatch_info(), - )* - } - } - } - - impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { - type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match self { - #( - #pallet_attrs - #variant_patterns => call.is_feeless(origin), - )* - } - } - } - - impl #scrate::traits::GetCallMetadata for RuntimeCall { - fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { - use #scrate::traits::GetCallName; - match self { - #( - #pallet_attrs - #variant_patterns => { - let function_name = call.get_call_name(); - let pallet_name = stringify!(#pallet_names); - #scrate::traits::CallMetadata { function_name, pallet_name } - } - )* - } - } - - fn get_module_names() -> &'static [&'static str] { - &[#( - #pallet_attrs - stringify!(#pallet_names), - )*] - } - - fn get_call_names(module: &str) -> &'static [&'static str] { - use #scrate::{dispatch::Callable, traits::GetCallName}; - match module { - #( - #pallet_attrs - stringify!(#pallet_names) => - <<#pallet_names as Callable<#runtime>>::RuntimeCall - as GetCallName>::get_call_names(), - )* - _ => unreachable!(), - } - } - } - impl #scrate::__private::Dispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - type Config = RuntimeCall; - type Info = #scrate::dispatch::DispatchInfo; - type PostInfo = #scrate::dispatch::PostDispatchInfo; - fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - if !::filter_call(&origin, &self) { - return ::core::result::Result::Err( - #system_path::Error::<#runtime>::CallFiltered.into() - ); - } - - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) - } - } - impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - match self { - #( - #pallet_attrs - #variant_patterns => - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), - )* - } - } - } - - #( - #pallet_attrs - impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - #[allow(unreachable_patterns)] - fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { - match self { - #variant_patterns => Some(call), - // May be unreachable - _ => None, - } - } - } - - #pallet_attrs - impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { - #variant_patterns - } - } - )* - } + ); + } + } + } + impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { + fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { + match self { + #( + #pallet_attrs + #variant_patterns => call.get_dispatch_info(), + )* + } + } + } + + impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { + type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match self { + #( + #pallet_attrs + #variant_patterns => call.is_feeless(origin), + )* + } + } + } + + impl #scrate::traits::GetCallMetadata for RuntimeCall { + fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { + use #scrate::traits::GetCallName; + match self { + #( + #pallet_attrs + #variant_patterns => { + let function_name = call.get_call_name(); + let pallet_name = stringify!(#pallet_names); + #scrate::traits::CallMetadata { function_name, pallet_name } + } + )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[#( + #pallet_attrs + stringify!(#pallet_names), + )*] + } + + fn get_call_names(module: &str) -> &'static [&'static str] { + use #scrate::{dispatch::Callable, traits::GetCallName}; + match module { + #( + #pallet_attrs + stringify!(#pallet_names) => + <<#pallet_names as Callable<#runtime>>::RuntimeCall + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } + impl #scrate::__private::Dispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + type Config = RuntimeCall; + type Info = #scrate::dispatch::DispatchInfo; + type PostInfo = #scrate::dispatch::PostDispatchInfo; + fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + if !::filter_call(&origin, &self) { + return ::core::result::Result::Err( + #system_path::Error::<#runtime>::CallFiltered.into() + ); + } + + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) + } + } + impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + match self { + #( + #pallet_attrs + #variant_patterns => + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), + )* + } + } + } + + #( + #pallet_attrs + impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + #[allow(unreachable_patterns)] + fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { + match self { + #variant_patterns => Some(call), + // May be unreachable + _ => None, + } + } + } + + #pallet_attrs + impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { + #variant_patterns + } + } + )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs index 101a476fb..be6b2f085 100644 --- a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs +++ b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs @@ -20,82 +20,82 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; pub(crate) fn expand_conversion_fn( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); - let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); + let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); - if let Some(inst) = instance { - quote! { - impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { - fn from(hr: #path::#composite_name<#path::#inst>) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } else { - quote! { - impl From<#path::#composite_name> for #runtime_composite_name { - fn from(hr: #path::#composite_name) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } + if let Some(inst) = instance { + quote! { + impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { + fn from(hr: #path::#composite_name<#path::#inst>) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } else { + quote! { + impl From<#path::#composite_name> for #runtime_composite_name { + fn from(hr: #path::#composite_name) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } } pub(crate) fn expand_variant( - composite_name: &str, - index: u8, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + index: u8, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name<#path::#inst>), - } - } else { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name), - } - } + if let Some(inst) = instance { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name<#path::#inst>), + } + } else { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name), + } + } } pub(crate) fn expand_variant_count( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #path::#composite_name::<#path::#inst>::VARIANT_COUNT - } - } else { - // Wrapped `<`..`>` means: use default type parameter for enum. - // - // This is used for pallets without instance support or pallets with instance support when - // we don't specify instance: - // - // ``` - // pub struct Pallet{..} - // - // #[pallet::composite_enum] - // pub enum HoldReason {..} - // - // Pallet1: pallet_x, // <- default type parameter - // ``` - quote! { - <#path::#composite_name>::VARIANT_COUNT - } - } + if let Some(inst) = instance { + quote! { + #path::#composite_name::<#path::#inst>::VARIANT_COUNT + } + } else { + // Wrapped `<`..`>` means: use default type parameter for enum. + // + // This is used for pallets without instance support or pallets with instance support when + // we don't specify instance: + // + // ``` + // pub struct Pallet{..} + // + // #[pallet::composite_enum] + // pub enum HoldReason {..} + // + // Pallet1: pallet_x, // <- default type parameter + // ``` + quote! { + <#path::#composite_name>::VARIANT_COUNT + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/config.rs b/support/procedural-fork/src/construct_runtime/expand/config.rs index dbbe6ba6e..ff715e584 100644 --- a/support/procedural-fork/src/construct_runtime/expand/config.rs +++ b/support/procedural-fork/src/construct_runtime/expand/config.rs @@ -23,125 +23,135 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_config( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut types = TokenStream::new(); - let mut fields = TokenStream::new(); - let mut genesis_build_calls = TokenStream::new(); - let mut query_genesis_config_part_macros = Vec::new(); + let mut types = TokenStream::new(); + let mut fields = TokenStream::new(); + let mut genesis_build_calls = TokenStream::new(); + let mut query_genesis_config_part_macros = Vec::new(); - for decl in pallet_decls { - if let Some(pallet_entry) = decl.find_part("Config") { - let path = &decl.path; - let pallet_name = &decl.name; - let path_str = path.into_token_stream().to_string(); - let config = format_ident!("{}Config", pallet_name); - let field_name = - &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); - let part_is_generic = !pallet_entry.generics.params.is_empty(); - let attr = &decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for decl in pallet_decls { + if let Some(pallet_entry) = decl.find_part("Config") { + let path = &decl.path; + let pallet_name = &decl.name; + let path_str = path.into_token_stream().to_string(); + let config = format_ident!("{}Config", pallet_name); + let field_name = + &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); + let part_is_generic = !pallet_entry.generics.params.is_empty(); + let attr = &decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - types.extend(expand_config_types(attr, runtime, decl, &config, part_is_generic)); - fields.extend(quote!(#attr pub #field_name: #config,)); - genesis_build_calls - .extend(expand_config_build_storage_call(scrate, &config, attr, field_name)); - query_genesis_config_part_macros.push(quote! { + types.extend(expand_config_types( + attr, + runtime, + decl, + &config, + part_is_generic, + )); + fields.extend(quote!(#attr pub #field_name: #config,)); + genesis_build_calls.extend(expand_config_build_storage_call( + scrate, &config, attr, field_name, + )); + query_genesis_config_part_macros.push(quote! { #path::__substrate_genesis_config_check::is_genesis_config_defined!(#pallet_name); #[cfg(feature = "std")] #path::__substrate_genesis_config_check::is_std_enabled_for_genesis!(#pallet_name, #path_str); }); - } - } + } + } - quote! { - #( #query_genesis_config_part_macros )* + quote! { + #( #query_genesis_config_part_macros )* - #types + #types - use #scrate::__private::serde as __genesis_config_serde_import__; - #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] - #[serde(rename_all = "camelCase")] - #[serde(deny_unknown_fields)] - #[serde(crate = "__genesis_config_serde_import__")] - pub struct RuntimeGenesisConfig { - #fields - } + use #scrate::__private::serde as __genesis_config_serde_import__; + #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] + #[serde(rename_all = "camelCase")] + #[serde(deny_unknown_fields)] + #[serde(crate = "__genesis_config_serde_import__")] + pub struct RuntimeGenesisConfig { + #fields + } - #[cfg(any(feature = "std", test))] - impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { - fn assimilate_storage( - &self, - storage: &mut #scrate::sp_runtime::Storage, - ) -> std::result::Result<(), String> { - #scrate::__private::BasicExternalities::execute_with_storage(storage, || { - ::build(&self); - Ok(()) - }) - } - } + #[cfg(any(feature = "std", test))] + impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { + fn assimilate_storage( + &self, + storage: &mut #scrate::sp_runtime::Storage, + ) -> std::result::Result<(), String> { + #scrate::__private::BasicExternalities::execute_with_storage(storage, || { + ::build(&self); + Ok(()) + }) + } + } - impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { - fn build(&self) { - #genesis_build_calls - ::on_genesis(); - } - } + impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { + fn build(&self) { + #genesis_build_calls + ::on_genesis(); + } + } - /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. - #[cfg(test)] - #[test] - fn test_genesis_config_builds() { - #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { - ::build( - &RuntimeGenesisConfig::default() - ); - }); - } - } + /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. + #[cfg(test)] + #[test] + fn test_genesis_config_builds() { + #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { + ::build( + &RuntimeGenesisConfig::default() + ); + }); + } + } } fn expand_config_types( - attr: &TokenStream, - runtime: &Ident, - decl: &Pallet, - config: &Ident, - part_is_generic: bool, + attr: &TokenStream, + runtime: &Ident, + decl: &Pallet, + config: &Ident, + part_is_generic: bool, ) -> TokenStream { - let path = &decl.path; + let path = &decl.path; - match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; - }, - (None, true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime>; - }, - (_, false) => quote! { - #attr - pub type #config = #path::GenesisConfig; - }, - } + match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; + }, + (None, true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime>; + }, + (_, false) => quote! { + #attr + pub type #config = #path::GenesisConfig; + }, + } } fn expand_config_build_storage_call( - scrate: &TokenStream, - pallet_genesis_config: &Ident, - attr: &TokenStream, - field_name: &Ident, + scrate: &TokenStream, + pallet_genesis_config: &Ident, + attr: &TokenStream, + field_name: &Ident, ) -> TokenStream { - quote! { - #attr - <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); - } + quote! { + #attr + <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs index f12f99526..131c919ef 100644 --- a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_freeze_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut freeze_reason_variants = Vec::new(); - let mut freeze_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("FreezeReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut freeze_reason_variants = Vec::new(); + let mut freeze_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("FreezeReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "FreezeReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "FreezeReason", + path, + instance, + variant_name, + )); - freeze_reason_variants.push(composite_helper::expand_variant( - "FreezeReason", - index, - path, - instance, - variant_name, - )); + freeze_reason_variants.push(composite_helper::expand_variant( + "FreezeReason", + index, + path, + instance, + variant_name, + )); - freeze_reason_variants_count.push(composite_helper::expand_variant_count( - "FreezeReason", - path, - instance, - )); - } - } + freeze_reason_variants_count.push(composite_helper::expand_variant_count( + "FreezeReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a freeze on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeFreezeReason { - #( #freeze_reason_variants )* - } + quote! { + /// A reason for placing a freeze on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeFreezeReason { + #( #freeze_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeFreezeReason { - const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeFreezeReason { + const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs index cdab92712..58870a321 100644 --- a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_hold_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut hold_reason_variants = Vec::new(); - let mut hold_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("HoldReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut hold_reason_variants = Vec::new(); + let mut hold_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("HoldReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "HoldReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "HoldReason", + path, + instance, + variant_name, + )); - hold_reason_variants.push(composite_helper::expand_variant( - "HoldReason", - index, - path, - instance, - variant_name, - )); + hold_reason_variants.push(composite_helper::expand_variant( + "HoldReason", + index, + path, + instance, + variant_name, + )); - hold_reason_variants_count.push(composite_helper::expand_variant_count( - "HoldReason", - path, - instance, - )); - } - } + hold_reason_variants_count.push(composite_helper::expand_variant_count( + "HoldReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a hold on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeHoldReason { - #( #hold_reason_variants )* - } + quote! { + /// A reason for placing a hold on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeHoldReason { + #( #hold_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeHoldReason { - const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeHoldReason { + const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/inherent.rs b/support/procedural-fork/src/construct_runtime/expand/inherent.rs index da483fa6c..b58d540fe 100644 --- a/support/procedural-fork/src/construct_runtime/expand/inherent.rs +++ b/support/procedural-fork/src/construct_runtime/expand/inherent.rs @@ -22,233 +22,236 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_inherent( - runtime: &Ident, - block: &TokenStream, - unchecked_extrinsic: &TokenStream, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + block: &TokenStream, + unchecked_extrinsic: &TokenStream, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_inherent_part_macros = Vec::new(); - - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("Inherent") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - pallet_names.push(name); - pallet_attrs.push(attr); - query_inherent_part_macros.push(quote! { - #path::__substrate_inherent_check::is_inherent_part_defined!(#name); - }); - } - } - - quote! { - #( #query_inherent_part_macros )* - - trait InherentDataExt { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; - } - - impl InherentDataExt for #scrate::inherent::InherentData { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> - { - use #scrate::inherent::ProvideInherent; - - let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); - - #( - #pallet_attrs - if let Some(inherent) = #pallet_names::create_inherent(self) { - let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( - inherent.into(), - None, - ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ - `Some`; qed"); - - inherents.push(inherent); - } - )* - - inherents - } - - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { - use #scrate::inherent::{ProvideInherent, IsFatalError}; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - use #scrate::__private::{sp_inherents::Error, log}; - - let mut result = #scrate::inherent::CheckInherentsResult::new(); - - // This handle assume we abort on the first fatal error. - fn handle_put_error_result(res: Result<(), Error>) { - const LOG_TARGET: &str = "runtime::inherent"; - match res { - Ok(()) => (), - Err(Error::InherentDataExists(id)) => - log::debug!( - target: LOG_TARGET, - "Some error already reported for inherent {:?}, new non fatal \ - error is ignored", - id - ), - Err(Error::FatalErrorReported) => - log::error!( - target: LOG_TARGET, - "Fatal error already reported, unexpected considering there is \ - only one fatal error", - ), - Err(_) => - log::error!( - target: LOG_TARGET, - "Unexpected error from `put_error` operation", - ), - } - } - - for xt in block.extrinsics() { - // Inherents are before any other extrinsics. - // And signed extrinsics are not inherents. - if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { - break - } - - let mut is_inherent = false; - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if #pallet_names::is_inherent(call) { - is_inherent = true; - if let Err(e) = #pallet_names::check_inherent(call, self) { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - } - } - } - )* - - // Inherents are before any other extrinsics. - // No module marked it as inherent thus it is not. - if !is_inherent { - break - } - } - - #( - #pallet_attrs - match #pallet_names::is_inherent_required(self) { - Ok(Some(e)) => { - let found = block.extrinsics().iter().any(|xt| { - let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) - .unwrap_or(false); - - if !is_signed { - let call = < - #unchecked_extrinsic as ExtrinsicCall - >::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - #pallet_names::is_inherent(&call) - } else { - false - } - } else { - // Signed extrinsics are not inherents. - false - } - }); - - if !found { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - }, - Ok(None) => (), - Err(e) => { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - }, - } - )* - - result - } - } - - impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { - fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - - if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { - // Signed extrinsics are never inherents. - return false - } - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if <#pallet_names as ProvideInherent>::is_inherent(&call) { - return true; - } - } - } - )* - false - } - } - - impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { - fn ensure_inherents_are_first(block: &#block) -> Result { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - - let mut num_inherents = 0u32; - - for (i, xt) in block.extrinsics().iter().enumerate() { - if >::is_inherent(xt) { - if num_inherents != i as u32 { - return Err(i as u32); - } - - num_inherents += 1; // Safe since we are in an `enumerate` loop. - } - } - - Ok(num_inherents) - } - } - } + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_inherent_part_macros = Vec::new(); + + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("Inherent") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + pallet_names.push(name); + pallet_attrs.push(attr); + query_inherent_part_macros.push(quote! { + #path::__substrate_inherent_check::is_inherent_part_defined!(#name); + }); + } + } + + quote! { + #( #query_inherent_part_macros )* + + trait InherentDataExt { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; + } + + impl InherentDataExt for #scrate::inherent::InherentData { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> + { + use #scrate::inherent::ProvideInherent; + + let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); + + #( + #pallet_attrs + if let Some(inherent) = #pallet_names::create_inherent(self) { + let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( + inherent.into(), + None, + ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ + `Some`; qed"); + + inherents.push(inherent); + } + )* + + inherents + } + + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { + use #scrate::inherent::{ProvideInherent, IsFatalError}; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + use #scrate::__private::{sp_inherents::Error, log}; + + let mut result = #scrate::inherent::CheckInherentsResult::new(); + + // This handle assume we abort on the first fatal error. + fn handle_put_error_result(res: Result<(), Error>) { + const LOG_TARGET: &str = "runtime::inherent"; + match res { + Ok(()) => (), + Err(Error::InherentDataExists(id)) => + log::debug!( + target: LOG_TARGET, + "Some error already reported for inherent {:?}, new non fatal \ + error is ignored", + id + ), + Err(Error::FatalErrorReported) => + log::error!( + target: LOG_TARGET, + "Fatal error already reported, unexpected considering there is \ + only one fatal error", + ), + Err(_) => + log::error!( + target: LOG_TARGET, + "Unexpected error from `put_error` operation", + ), + } + } + + for xt in block.extrinsics() { + // Inherents are before any other extrinsics. + // And signed extrinsics are not inherents. + if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { + break + } + + let mut is_inherent = false; + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if #pallet_names::is_inherent(call) { + is_inherent = true; + if let Err(e) = #pallet_names::check_inherent(call, self) { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + } + } + } + )* + + // Inherents are before any other extrinsics. + // No module marked it as inherent thus it is not. + if !is_inherent { + break + } + } + + #( + #pallet_attrs + match #pallet_names::is_inherent_required(self) { + Ok(Some(e)) => { + let found = block.extrinsics().iter().any(|xt| { + let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) + .unwrap_or(false); + + if !is_signed { + let call = < + #unchecked_extrinsic as ExtrinsicCall + >::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + #pallet_names::is_inherent(&call) + } else { + false + } + } else { + // Signed extrinsics are not inherents. + false + } + }); + + if !found { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + }, + Ok(None) => (), + Err(e) => { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + }, + } + )* + + result + } + } + + impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { + fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + + if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { + // Signed extrinsics are never inherents. + return false + } + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if <#pallet_names as ProvideInherent>::is_inherent(&call) { + return true; + } + } + } + )* + false + } + } + + impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { + fn ensure_inherents_are_first(block: &#block) -> Result { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + + let mut num_inherents = 0u32; + + for (i, xt) in block.extrinsics().iter().enumerate() { + if >::is_inherent(xt) { + if num_inherents != i as u32 { + return Err(i as u32); + } + + num_inherents += 1; // Safe since we are in an `enumerate` loop. + } + } + + Ok(num_inherents) + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs index e67c0da00..67c2fb933 100644 --- a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs +++ b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_lock_id(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut lock_id_variants = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("LockId") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut lock_id_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("LockId") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "LockId", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "LockId", + path, + instance, + variant_name, + )); - lock_id_variants.push(composite_helper::expand_variant( - "LockId", - index, - path, - instance, - variant_name, - )); - } - } + lock_id_variants.push(composite_helper::expand_variant( + "LockId", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// An identifier for each lock placed on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeLockId { - #( #lock_id_variants )* - } + quote! { + /// An identifier for each lock placed on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeLockId { + #( #lock_id_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/metadata.rs b/support/procedural-fork/src/construct_runtime/expand/metadata.rs index 0e76f9a92..f98c719ca 100644 --- a/support/procedural-fork/src/construct_runtime/expand/metadata.rs +++ b/support/procedural-fork/src/construct_runtime/expand/metadata.rs @@ -22,237 +22,240 @@ use std::str::FromStr; use syn::Ident; pub fn expand_runtime_metadata( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream, - extrinsic: &TokenStream, - system_path: &PalletPath, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream, + extrinsic: &TokenStream, + system_path: &PalletPath, ) -> TokenStream { - let pallets = pallet_declarations - .iter() - .filter_map(|pallet_declaration| { - pallet_declaration.find_part("Pallet").map(|_| { - let filtered_names: Vec<_> = pallet_declaration - .pallet_parts() - .iter() - .filter(|part| part.name() != "Pallet") - .map(|part| part.name()) - .collect(); - (pallet_declaration, filtered_names) - }) - }) - .map(|(decl, filtered_names)| { - let name = &decl.name; - let index = &decl.index; - let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); - let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); - let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); - let constants = expand_pallet_metadata_constants(runtime, decl); - let errors = expand_pallet_metadata_errors(runtime, decl); - let docs = expand_pallet_metadata_docs(runtime, decl); - let attr = decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let pallets = pallet_declarations + .iter() + .filter_map(|pallet_declaration| { + pallet_declaration.find_part("Pallet").map(|_| { + let filtered_names: Vec<_> = pallet_declaration + .pallet_parts() + .iter() + .filter(|part| part.name() != "Pallet") + .map(|part| part.name()) + .collect(); + (pallet_declaration, filtered_names) + }) + }) + .map(|(decl, filtered_names)| { + let name = &decl.name; + let index = &decl.index; + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); + let docs = expand_pallet_metadata_docs(runtime, decl); + let attr = decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - #scrate::__private::metadata_ir::PalletMetadataIR { - name: stringify!(#name), - index: #index, - storage: #storage, - calls: #calls, - event: #event, - constants: #constants, - error: #errors, - docs: #docs, - } - } - }) - .collect::>(); + quote! { + #attr + #scrate::__private::metadata_ir::PalletMetadataIR { + name: stringify!(#name), + index: #index, + storage: #storage, + calls: #calls, + event: #event, + constants: #constants, + error: #errors, + docs: #docs, + } + } + }) + .collect::>(); - quote! { - impl #runtime { - fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - // - // `Deref` needs a reference for resolving the function call. - let rt = #runtime; + quote! { + impl #runtime { + fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + // + // `Deref` needs a reference for resolving the function call. + let rt = #runtime; - let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); - let address_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress - >(); - let call_ty = #scrate::__private::scale_info::meta_type::< - <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call - >(); - let signature_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature - >(); - let extra_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra - >(); + let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); + let address_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress + >(); + let call_ty = #scrate::__private::scale_info::meta_type::< + <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call + >(); + let signature_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature + >(); + let extra_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra + >(); - #scrate::__private::metadata_ir::MetadataIR { - pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], - extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { - ty, - version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - address_ty, - call_ty, - signature_ty, - extra_ty, - signed_extensions: < - < - #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata - >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension - >::metadata() - .into_iter() - .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { - identifier: meta.identifier, - ty: meta.ty, - additional_signed: meta.additional_signed, - }) - .collect(), - }, - ty: #scrate::__private::scale_info::meta_type::<#runtime>(), - apis: (&rt).runtime_metadata(), - outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { - call_enum_ty: #scrate::__private::scale_info::meta_type::< - <#runtime as #system_path::Config>::RuntimeCall - >(), - event_enum_ty: #scrate::__private::scale_info::meta_type::(), - error_enum_ty: #scrate::__private::scale_info::meta_type::(), - } - } - } + #scrate::__private::metadata_ir::MetadataIR { + pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], + extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { + ty, + version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + address_ty, + call_ty, + signature_ty, + extra_ty, + signed_extensions: < + < + #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension + >::metadata() + .into_iter() + .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, + }) + .collect(), + }, + ty: #scrate::__private::scale_info::meta_type::<#runtime>(), + apis: (&rt).runtime_metadata(), + outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { + call_enum_ty: #scrate::__private::scale_info::meta_type::< + <#runtime as #system_path::Config>::RuntimeCall + >(), + event_enum_ty: #scrate::__private::scale_info::meta_type::(), + error_enum_ty: #scrate::__private::scale_info::meta_type::(), + } + } + } - pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { - // Note: this always returns the V14 version. The runtime API function - // must be deprecated. - #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) - } + pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { + // Note: this always returns the V14 version. The runtime API function + // must be deprecated. + #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) + } - pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { - #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { - #scrate::__private::OpaqueMetadata::new(prefixed.into()) - }) - } + pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { + #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { + #scrate::__private::OpaqueMetadata::new(prefixed.into()) + }) + } - pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { - #scrate::__private::metadata_ir::supported_versions() - } - } - } + pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { + #scrate::__private::metadata_ir::supported_versions() + } + } + } } fn expand_pallet_metadata_storage( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Storage") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Storage") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_calls( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Call") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Call") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_events( - filtered_names: &[&'static str], - runtime: &Ident, - scrate: &TokenStream, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + scrate: &TokenStream, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Event") { - let path = &decl.path; - let part_is_generic = !decl - .find_part("Event") - .expect("Event part exists; qed") - .generics - .params - .is_empty(); - let pallet_event = match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::Event::<#path::#inst>), - (None, true) => quote!(#path::Event::<#runtime>), - (None, false) => quote!(#path::Event), - }; + if filtered_names.contains(&"Event") { + let path = &decl.path; + let part_is_generic = !decl + .find_part("Event") + .expect("Event part exists; qed") + .generics + .params + .is_empty(); + let pallet_event = match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::Event::<#path::#inst>), + (None, true) => quote!(#path::Event::<#runtime>), + (None, false) => quote!(#path::Event), + }; - quote! { - Some( - #scrate::__private::metadata_ir::PalletEventMetadataIR { - ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() - } - ) - } - } else { - quote!(None) - } + quote! { + Some( + #scrate::__private::metadata_ir::PalletEventMetadataIR { + ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() + } + ) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() + } } fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() + } } fn expand_pallet_metadata_docs(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/origin.rs b/support/procedural-fork/src/construct_runtime/expand/origin.rs index 83049919d..2d50777bf 100644 --- a/support/procedural-fork/src/construct_runtime/expand/origin.rs +++ b/support/procedural-fork/src/construct_runtime/expand/origin.rs @@ -22,434 +22,448 @@ use std::str::FromStr; use syn::{Generics, Ident}; pub fn expand_outer_origin( - runtime: &Ident, - system_pallet: &Pallet, - pallets: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallets: &[Pallet], + scrate: &TokenStream, ) -> syn::Result { - let mut caller_variants = TokenStream::new(); - let mut pallet_conversions = TokenStream::new(); - let mut query_origin_part_macros = Vec::new(); - - for pallet_decl in pallets.iter().filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) { - if let Some(pallet_entry) = pallet_decl.find_part("Origin") { - let instance = pallet_decl.instance.as_ref(); - let index = pallet_decl.index; - let generics = &pallet_entry.generics; - let name = &pallet_decl.name; - let path = &pallet_decl.path; - - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `Origin` cannot \ + let mut caller_variants = TokenStream::new(); + let mut pallet_conversions = TokenStream::new(); + let mut query_origin_part_macros = Vec::new(); + + for pallet_decl in pallets + .iter() + .filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) + { + if let Some(pallet_entry) = pallet_decl.find_part("Origin") { + let instance = pallet_decl.instance.as_ref(); + let index = pallet_decl.index; + let generics = &pallet_entry.generics; + let name = &pallet_decl.name; + let path = &pallet_decl.path; + + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `Origin` cannot \ be constructed: pallet `{}` must have generic `Origin`", - name - ); - return Err(syn::Error::new(name.span(), msg)) - } - - caller_variants.extend(expand_origin_caller_variant( - runtime, - pallet_decl, - index, - instance, - generics, - )); - pallet_conversions.extend(expand_origin_pallet_conversions( - scrate, - runtime, - pallet_decl, - instance, - generics, - )); - query_origin_part_macros.push(quote! { - #path::__substrate_origin_check::is_origin_part_defined!(#name); - }); - } - } - - let system_path = &system_pallet.path; - - let system_index = system_pallet.index; - - let system_path_name = system_path.module_name(); - - let doc_string = get_intra_doc_string( - "Origin is always created with the base filter configured in", - &system_path_name, - ); - - let doc_string_none_origin = - get_intra_doc_string("Create with system none origin and", &system_path_name); - - let doc_string_root_origin = - get_intra_doc_string("Create with system root origin and", &system_path_name); - - let doc_string_signed_origin = - get_intra_doc_string("Create with system signed origin and", &system_path_name); - - let doc_string_runtime_origin = - get_intra_doc_string("Convert to runtime origin, using as filter:", &system_path_name); - - let doc_string_runtime_origin_with_caller = get_intra_doc_string( - "Convert to runtime origin with caller being system signed or none and use filter", - &system_path_name, - ); - - Ok(quote! { - #( #query_origin_part_macros )* - - /// The runtime origin type representing the origin of a call. - /// - #[doc = #doc_string] - #[derive(Clone)] - pub struct RuntimeOrigin { - pub caller: OriginCaller, - filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, - } - - #[cfg(not(feature = "std"))] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.write_str("") - } - } - - #[cfg(feature = "std")] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.debug_struct("Origin") - .field("caller", &self.caller) - .field("filter", &"[function ptr]") - .finish() - } - } - - impl #scrate::traits::OriginTrait for RuntimeOrigin { - type Call = <#runtime as #system_path::Config>::RuntimeCall; - type PalletsOrigin = OriginCaller; - type AccountId = <#runtime as #system_path::Config>::AccountId; - - fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { - let f = self.filter.clone(); - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { - f(call) && filter(call) - })); - } - - fn reset_filter(&mut self) { - let filter = < - <#runtime as #system_path::Config>::BaseCallFilter - as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> - >::contains; - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); - } - - fn set_caller_from(&mut self, other: impl Into) { - self.caller = other.into().caller; - } - - fn filter_call(&self, call: &Self::Call) -> bool { - match self.caller { - // Root bypasses all filters - OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, - _ => (self.filter)(call), - } - } - - fn caller(&self) -> &Self::PalletsOrigin { - &self.caller - } - - fn into_caller(self) -> Self::PalletsOrigin { - self.caller - } - - fn try_with_caller( - mut self, - f: impl FnOnce(Self::PalletsOrigin) -> Result, - ) -> Result { - match f(self.caller) { - Ok(r) => Ok(r), - Err(caller) => { self.caller = caller; Err(self) } - } - } - - fn none() -> Self { - #system_path::RawOrigin::None.into() - } - - fn root() -> Self { - #system_path::RawOrigin::Root.into() - } - - fn signed(by: Self::AccountId) -> Self { - #system_path::RawOrigin::Signed(by).into() - } - } - - #[derive( - Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, - )] - #[allow(non_camel_case_types)] - pub enum OriginCaller { - #[codec(index = #system_index)] - system(#system_path::Origin<#runtime>), - #caller_variants - #[allow(dead_code)] - Void(#scrate::__private::Void) - } - - // For backwards compatibility and ease of accessing these functions. - #[allow(dead_code)] - impl RuntimeOrigin { - #[doc = #doc_string_none_origin] - pub fn none() -> Self { - ::none() - } - - #[doc = #doc_string_root_origin] - pub fn root() -> Self { - ::root() - } - - #[doc = #doc_string_signed_origin] - pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { - ::signed(by) - } - } - - impl From<#system_path::Origin<#runtime>> for OriginCaller { - fn from(x: #system_path::Origin<#runtime>) -> Self { - OriginCaller::system(x) - } - } - - impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { - fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match self { - OriginCaller::system(x) => Some(x), - _ => None, - } - } - fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match &self { - OriginCaller::system(o) => Some(o), - _ => None, - } - } - } - - impl TryFrom for #system_path::Origin<#runtime> { - type Error = OriginCaller; - fn try_from(x: OriginCaller) - -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> - { - if let OriginCaller::system(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { - - #[doc = #doc_string_runtime_origin] - fn from(x: #system_path::Origin<#runtime>) -> Self { - let o: OriginCaller = x.into(); - o.into() - } - } - - impl From for RuntimeOrigin { - fn from(x: OriginCaller) -> Self { - let mut o = RuntimeOrigin { - caller: x, - filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), - }; - - #scrate::traits::OriginTrait::reset_filter(&mut o); - - o - } - } - - impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::system(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - impl From::AccountId>> for RuntimeOrigin { - #[doc = #doc_string_runtime_origin_with_caller] - fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { - <#system_path::Origin<#runtime>>::from(x).into() - } - } - - #pallet_conversions - }) + name + ); + return Err(syn::Error::new(name.span(), msg)); + } + + caller_variants.extend(expand_origin_caller_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); + pallet_conversions.extend(expand_origin_pallet_conversions( + scrate, + runtime, + pallet_decl, + instance, + generics, + )); + query_origin_part_macros.push(quote! { + #path::__substrate_origin_check::is_origin_part_defined!(#name); + }); + } + } + + let system_path = &system_pallet.path; + + let system_index = system_pallet.index; + + let system_path_name = system_path.module_name(); + + let doc_string = get_intra_doc_string( + "Origin is always created with the base filter configured in", + &system_path_name, + ); + + let doc_string_none_origin = + get_intra_doc_string("Create with system none origin and", &system_path_name); + + let doc_string_root_origin = + get_intra_doc_string("Create with system root origin and", &system_path_name); + + let doc_string_signed_origin = + get_intra_doc_string("Create with system signed origin and", &system_path_name); + + let doc_string_runtime_origin = get_intra_doc_string( + "Convert to runtime origin, using as filter:", + &system_path_name, + ); + + let doc_string_runtime_origin_with_caller = get_intra_doc_string( + "Convert to runtime origin with caller being system signed or none and use filter", + &system_path_name, + ); + + Ok(quote! { + #( #query_origin_part_macros )* + + /// The runtime origin type representing the origin of a call. + /// + #[doc = #doc_string] + #[derive(Clone)] + pub struct RuntimeOrigin { + pub caller: OriginCaller, + filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, + } + + #[cfg(not(feature = "std"))] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.write_str("") + } + } + + #[cfg(feature = "std")] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.debug_struct("Origin") + .field("caller", &self.caller) + .field("filter", &"[function ptr]") + .finish() + } + } + + impl #scrate::traits::OriginTrait for RuntimeOrigin { + type Call = <#runtime as #system_path::Config>::RuntimeCall; + type PalletsOrigin = OriginCaller; + type AccountId = <#runtime as #system_path::Config>::AccountId; + + fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { + let f = self.filter.clone(); + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { + f(call) && filter(call) + })); + } + + fn reset_filter(&mut self) { + let filter = < + <#runtime as #system_path::Config>::BaseCallFilter + as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> + >::contains; + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); + } + + fn set_caller_from(&mut self, other: impl Into) { + self.caller = other.into().caller; + } + + fn filter_call(&self, call: &Self::Call) -> bool { + match self.caller { + // Root bypasses all filters + OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, + _ => (self.filter)(call), + } + } + + fn caller(&self) -> &Self::PalletsOrigin { + &self.caller + } + + fn into_caller(self) -> Self::PalletsOrigin { + self.caller + } + + fn try_with_caller( + mut self, + f: impl FnOnce(Self::PalletsOrigin) -> Result, + ) -> Result { + match f(self.caller) { + Ok(r) => Ok(r), + Err(caller) => { self.caller = caller; Err(self) } + } + } + + fn none() -> Self { + #system_path::RawOrigin::None.into() + } + + fn root() -> Self { + #system_path::RawOrigin::Root.into() + } + + fn signed(by: Self::AccountId) -> Self { + #system_path::RawOrigin::Signed(by).into() + } + } + + #[derive( + Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, + )] + #[allow(non_camel_case_types)] + pub enum OriginCaller { + #[codec(index = #system_index)] + system(#system_path::Origin<#runtime>), + #caller_variants + #[allow(dead_code)] + Void(#scrate::__private::Void) + } + + // For backwards compatibility and ease of accessing these functions. + #[allow(dead_code)] + impl RuntimeOrigin { + #[doc = #doc_string_none_origin] + pub fn none() -> Self { + ::none() + } + + #[doc = #doc_string_root_origin] + pub fn root() -> Self { + ::root() + } + + #[doc = #doc_string_signed_origin] + pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { + ::signed(by) + } + } + + impl From<#system_path::Origin<#runtime>> for OriginCaller { + fn from(x: #system_path::Origin<#runtime>) -> Self { + OriginCaller::system(x) + } + } + + impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { + fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match self { + OriginCaller::system(x) => Some(x), + _ => None, + } + } + fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match &self { + OriginCaller::system(o) => Some(o), + _ => None, + } + } + } + + impl TryFrom for #system_path::Origin<#runtime> { + type Error = OriginCaller; + fn try_from(x: OriginCaller) + -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> + { + if let OriginCaller::system(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { + + #[doc = #doc_string_runtime_origin] + fn from(x: #system_path::Origin<#runtime>) -> Self { + let o: OriginCaller = x.into(); + o.into() + } + } + + impl From for RuntimeOrigin { + fn from(x: OriginCaller) -> Self { + let mut o = RuntimeOrigin { + caller: x, + filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), + }; + + #scrate::traits::OriginTrait::reset_filter(&mut o); + + o + } + } + + impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::system(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + impl From::AccountId>> for RuntimeOrigin { + #[doc = #doc_string_runtime_origin_with_caller] + fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { + <#system_path::Origin<#runtime>>::from(x).into() + } + } + + #pallet_conversions + }) } fn expand_origin_caller_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let part_is_generic = !generics.params.is_empty(); - let variant_name = &pallet.name; - let path = &pallet.path; - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin), - }, - } + let part_is_generic = !generics.params.is_empty(); + let variant_name = &pallet.name; + let path = &pallet.path; + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin), + }, + } } fn expand_origin_pallet_conversions( - scrate: &TokenStream, - runtime: &Ident, - pallet: &Pallet, - instance: Option<&Ident>, - generics: &Generics, + scrate: &TokenStream, + runtime: &Ident, + pallet: &Pallet, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - - let part_is_generic = !generics.params.is_empty(); - let pallet_origin = match instance { - Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), - Some(inst) => quote!(#path::Origin<#path::#inst>), - None if part_is_generic => quote!(#path::Origin<#runtime>), - None => quote!(#path::Origin), - }; - - let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - quote! { - #attr - impl From<#pallet_origin> for OriginCaller { - fn from(x: #pallet_origin) -> Self { - OriginCaller::#variant_name(x) - } - } - - #attr - impl From<#pallet_origin> for RuntimeOrigin { - #[doc = #doc_string] - fn from(x: #pallet_origin) -> Self { - let x: OriginCaller = x.into(); - x.into() - } - } - - #attr - impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::#variant_name(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - - #attr - impl TryFrom for #pallet_origin { - type Error = OriginCaller; - fn try_from( - x: OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { - if let OriginCaller::#variant_name(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - #attr - impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = x { - Ok(&l) - } else { - Err(()) - } - } - } - - #attr - impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a RuntimeOrigin, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = &x.caller { - Ok(&l) - } else { - Err(()) - } - } - } - } + let path = &pallet.path; + let variant_name = &pallet.name; + + let part_is_generic = !generics.params.is_empty(); + let pallet_origin = match instance { + Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), + Some(inst) => quote!(#path::Origin<#path::#inst>), + None if part_is_generic => quote!(#path::Origin<#runtime>), + None => quote!(#path::Origin), + }; + + let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + impl From<#pallet_origin> for OriginCaller { + fn from(x: #pallet_origin) -> Self { + OriginCaller::#variant_name(x) + } + } + + #attr + impl From<#pallet_origin> for RuntimeOrigin { + #[doc = #doc_string] + fn from(x: #pallet_origin) -> Self { + let x: OriginCaller = x.into(); + x.into() + } + } + + #attr + impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::#variant_name(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + + #attr + impl TryFrom for #pallet_origin { + type Error = OriginCaller; + fn try_from( + x: OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { + if let OriginCaller::#variant_name(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + #attr + impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = x { + Ok(&l) + } else { + Err(()) + } + } + } + + #attr + impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a RuntimeOrigin, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = &x.caller { + Ok(&l) + } else { + Err(()) + } + } + } + } } // Get the actual documentation using the doc information and system path name fn get_intra_doc_string(doc_info: &str, system_path_name: &String) -> String { - format!(" {} [`{}::Config::BaseCallFilter`].", doc_info, system_path_name) + format!( + " {} [`{}::Config::BaseCallFilter`].", + doc_info, system_path_name + ) } diff --git a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs index 80b242ccb..28e39c7a2 100644 --- a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs +++ b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs @@ -24,37 +24,37 @@ use syn::{Generics, Ident}; /// Represents the types supported for creating an outer enum. #[derive(Clone, Copy, PartialEq)] pub enum OuterEnumType { - /// Collects the Event enums from all pallets. - Event, - /// Collects the Error enums from all pallets. - Error, + /// Collects the Event enums from all pallets. + Event, + /// Collects the Error enums from all pallets. + Error, } impl OuterEnumType { - /// The name of the structure this enum represents. - fn struct_name(&self) -> &str { - match self { - OuterEnumType::Event => "RuntimeEvent", - OuterEnumType::Error => "RuntimeError", - } - } + /// The name of the structure this enum represents. + fn struct_name(&self) -> &str { + match self { + OuterEnumType::Event => "RuntimeEvent", + OuterEnumType::Error => "RuntimeError", + } + } - /// The name of the variant (ie `Event` or `Error`). - fn variant_name(&self) -> &str { - match self { - OuterEnumType::Event => "Event", - OuterEnumType::Error => "Error", - } - } + /// The name of the variant (ie `Event` or `Error`). + fn variant_name(&self) -> &str { + match self { + OuterEnumType::Event => "Event", + OuterEnumType::Error => "Error", + } + } } impl ToTokens for OuterEnumType { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - OuterEnumType::Event => quote!(Event).to_tokens(tokens), - OuterEnumType::Error => quote!(Error).to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + OuterEnumType::Event => quote!(Event).to_tokens(tokens), + OuterEnumType::Error => quote!(Error).to_tokens(tokens), + } + } } /// Create an outer enum that encapsulates all pallets as variants. @@ -84,196 +84,207 @@ impl ToTokens for OuterEnumType { /// /// Notice that the pallet index is preserved using the `#[codec(index = ..)]` attribute. pub fn expand_outer_enum( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, + enum_ty: OuterEnumType, ) -> syn::Result { - // Stores all pallet variants. - let mut enum_variants = TokenStream::new(); - // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. - let mut enum_conversions = TokenStream::new(); - // Specific for events to query via `is_event_part_defined!`. - let mut query_enum_part_macros = Vec::new(); + // Stores all pallet variants. + let mut enum_variants = TokenStream::new(); + // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. + let mut enum_conversions = TokenStream::new(); + // Specific for events to query via `is_event_part_defined!`. + let mut query_enum_part_macros = Vec::new(); - let enum_name_str = enum_ty.variant_name(); - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_str = enum_ty.variant_name(); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - for pallet_decl in pallet_decls { - let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { continue }; + for pallet_decl in pallet_decls { + let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { + continue; + }; - let path = &pallet_decl.path; - let pallet_name = &pallet_decl.name; - let index = pallet_decl.index; - let instance = pallet_decl.instance.as_ref(); - let generics = &pallet_entry.generics; + let path = &pallet_decl.path; + let pallet_name = &pallet_decl.name; + let index = pallet_decl.index; + let instance = pallet_decl.instance.as_ref(); + let generics = &pallet_entry.generics; - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `{}` cannot \ + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `{}` cannot \ be constructed: pallet `{}` must have generic `{}`", - enum_name_str, pallet_name, enum_name_str, - ); - return Err(syn::Error::new(pallet_name.span(), msg)) - } + enum_name_str, pallet_name, enum_name_str, + ); + return Err(syn::Error::new(pallet_name.span(), msg)); + } - let part_is_generic = !generics.params.is_empty(); - let pallet_enum = match (instance, part_is_generic) { - (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), - (None, true) => quote!(#path::#enum_ty::<#runtime>), - (None, false) => quote!(#path::#enum_ty), - }; + let part_is_generic = !generics.params.is_empty(); + let pallet_enum = match (instance, part_is_generic) { + (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), + (None, true) => quote!(#path::#enum_ty::<#runtime>), + (None, false) => quote!(#path::#enum_ty), + }; - enum_variants.extend(expand_enum_variant( - runtime, - pallet_decl, - index, - instance, - generics, - enum_ty, - )); - enum_conversions.extend(expand_enum_conversion( - pallet_decl, - &pallet_enum, - &enum_name_ident, - )); + enum_variants.extend(expand_enum_variant( + runtime, + pallet_decl, + index, + instance, + generics, + enum_ty, + )); + enum_conversions.extend(expand_enum_conversion( + pallet_decl, + &pallet_enum, + &enum_name_ident, + )); - if enum_ty == OuterEnumType::Event { - query_enum_part_macros.push(quote! { - #path::__substrate_event_check::is_event_part_defined!(#pallet_name); - }); - } - } + if enum_ty == OuterEnumType::Event { + query_enum_part_macros.push(quote! { + #path::__substrate_event_check::is_event_part_defined!(#pallet_name); + }); + } + } - // Derives specific for the event. - let event_custom_derives = - if enum_ty == OuterEnumType::Event { quote!(Clone, PartialEq, Eq,) } else { quote!() }; + // Derives specific for the event. + let event_custom_derives = if enum_ty == OuterEnumType::Event { + quote!(Clone, PartialEq, Eq,) + } else { + quote!() + }; - // Implementation specific for errors. - let error_custom_impl = generate_error_impl(scrate, enum_ty); + // Implementation specific for errors. + let error_custom_impl = generate_error_impl(scrate, enum_ty); - Ok(quote! { - #( #query_enum_part_macros )* + Ok(quote! { + #( #query_enum_part_macros )* - #[derive( - #event_custom_derives - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - #[allow(non_camel_case_types)] - pub enum #enum_name_ident { - #enum_variants - } + #[derive( + #event_custom_derives + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + #[allow(non_camel_case_types)] + pub enum #enum_name_ident { + #enum_variants + } - #enum_conversions + #enum_conversions - #error_custom_impl - }) + #error_custom_impl + }) } fn expand_enum_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, + enum_ty: OuterEnumType, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - let part_is_generic = !generics.params.is_empty(); - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let path = &pallet.path; + let variant_name = &pallet.name; + let part_is_generic = !generics.params.is_empty(); + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty), - }, - } + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty), + }, + } } fn expand_enum_conversion( - pallet: &Pallet, - pallet_enum: &TokenStream, - enum_name_ident: &Ident, + pallet: &Pallet, + pallet_enum: &TokenStream, + enum_name_ident: &Ident, ) -> TokenStream { - let variant_name = &pallet.name; - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let variant_name = &pallet.name; + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - impl From<#pallet_enum> for #enum_name_ident { - fn from(x: #pallet_enum) -> Self { - #enum_name_ident - ::#variant_name(x) - } - } - #attr - impl TryInto<#pallet_enum> for #enum_name_ident { - type Error = (); + quote! { + #attr + impl From<#pallet_enum> for #enum_name_ident { + fn from(x: #pallet_enum) -> Self { + #enum_name_ident + ::#variant_name(x) + } + } + #attr + impl TryInto<#pallet_enum> for #enum_name_ident { + type Error = (); - fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { - match self { - Self::#variant_name(evt) => Ok(evt), - _ => Err(()), - } - } - } - } + fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { + match self { + Self::#variant_name(evt) => Ok(evt), + _ => Err(()), + } + } + } + } } fn generate_error_impl(scrate: &TokenStream, enum_ty: OuterEnumType) -> TokenStream { - // Implementation is specific to `Error`s. - if enum_ty == OuterEnumType::Event { - return quote! {} - } + // Implementation is specific to `Error`s. + if enum_ty == OuterEnumType::Event { + return quote! {}; + } - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - quote! { - impl #enum_name_ident { - /// Optionally convert the `DispatchError` into the `RuntimeError`. - /// - /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. - pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { - let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; + quote! { + impl #enum_name_ident { + /// Optionally convert the `DispatchError` into the `RuntimeError`. + /// + /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. + pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { + let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; - let bytes = #scrate::__private::codec::Encode::encode(&module_error); - #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() - } - } - } + let bytes = #scrate::__private::codec::Encode::encode(&module_error); + #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs index 892b842b1..0695d8102 100644 --- a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_slash_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut slash_reason_variants = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("SlashReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut slash_reason_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("SlashReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "SlashReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "SlashReason", + path, + instance, + variant_name, + )); - slash_reason_variants.push(composite_helper::expand_variant( - "SlashReason", - index, - path, - instance, - variant_name, - )); - } - } + slash_reason_variants.push(composite_helper::expand_variant( + "SlashReason", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// A reason for slashing funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeSlashReason { - #( #slash_reason_variants )* - } + quote! { + /// A reason for slashing funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeSlashReason { + #( #slash_reason_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/task.rs b/support/procedural-fork/src/construct_runtime/expand/task.rs index 6531c0e9e..94a5f52bb 100644 --- a/support/procedural-fork/src/construct_runtime/expand/task.rs +++ b/support/procedural-fork/src/construct_runtime/expand/task.rs @@ -21,111 +21,111 @@ use quote::quote; /// Expands aggregate `RuntimeTask` enum. pub fn expand_outer_task( - runtime_name: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime_name: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let mut from_impls = Vec::new(); - let mut task_variants = Vec::new(); - let mut variant_names = Vec::new(); - let mut task_paths = Vec::new(); - for decl in pallet_decls { - if decl.find_part("Task").is_none() { - continue - } - - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - - from_impls.push(quote! { - impl From<#path::Task<#runtime_name>> for RuntimeTask { - fn from(hr: #path::Task<#runtime_name>) -> Self { - RuntimeTask::#variant_name(hr) - } - } - - impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { - type Error = (); - - fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { - match self { - RuntimeTask::#variant_name(hr) => Ok(hr), - _ => Err(()), - } - } - } - }); - - task_variants.push(quote! { - #[codec(index = #index)] - #variant_name(#path::Task<#runtime_name>), - }); - - variant_names.push(quote!(#variant_name)); - - task_paths.push(quote!(#path::Task)); - } - - let prelude = quote!(#scrate::traits::tasks::__private); - - const INCOMPLETE_MATCH_QED: &'static str = - "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; - - let output = quote! { - /// An aggregation of all `Task` enums across all pallets included in the current runtime. - #[derive( - Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeTask { - #( #task_variants )* - } - - #[automatically_derived] - impl #scrate::traits::Task for RuntimeTask { - type Enumeration = #prelude::IntoIter; - - fn is_valid(&self) -> bool { - match self { - #(RuntimeTask::#variant_names(val) => val.is_valid(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { - match self { - #(RuntimeTask::#variant_names(val) => val.run(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn weight(&self) -> #scrate::pallet_prelude::Weight { - match self { - #(RuntimeTask::#variant_names(val) => val.weight(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn task_index(&self) -> u32 { - match self { - #(RuntimeTask::#variant_names(val) => val.task_index(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn iter() -> Self::Enumeration { - let mut all_tasks = Vec::new(); - #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* - all_tasks.into_iter() - } - } - - #( #from_impls )* - }; - - output + let mut from_impls = Vec::new(); + let mut task_variants = Vec::new(); + let mut variant_names = Vec::new(); + let mut task_paths = Vec::new(); + for decl in pallet_decls { + if decl.find_part("Task").is_none() { + continue; + } + + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + + from_impls.push(quote! { + impl From<#path::Task<#runtime_name>> for RuntimeTask { + fn from(hr: #path::Task<#runtime_name>) -> Self { + RuntimeTask::#variant_name(hr) + } + } + + impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { + type Error = (); + + fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { + match self { + RuntimeTask::#variant_name(hr) => Ok(hr), + _ => Err(()), + } + } + } + }); + + task_variants.push(quote! { + #[codec(index = #index)] + #variant_name(#path::Task<#runtime_name>), + }); + + variant_names.push(quote!(#variant_name)); + + task_paths.push(quote!(#path::Task)); + } + + let prelude = quote!(#scrate::traits::tasks::__private); + + const INCOMPLETE_MATCH_QED: &'static str = + "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; + + let output = quote! { + /// An aggregation of all `Task` enums across all pallets included in the current runtime. + #[derive( + Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeTask { + #( #task_variants )* + } + + #[automatically_derived] + impl #scrate::traits::Task for RuntimeTask { + type Enumeration = #prelude::IntoIter; + + fn is_valid(&self) -> bool { + match self { + #(RuntimeTask::#variant_names(val) => val.is_valid(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { + match self { + #(RuntimeTask::#variant_names(val) => val.run(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self { + #(RuntimeTask::#variant_names(val) => val.weight(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn task_index(&self) -> u32 { + match self { + #(RuntimeTask::#variant_names(val) => val.task_index(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn iter() -> Self::Enumeration { + let mut all_tasks = Vec::new(); + #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* + all_tasks.into_iter() + } + } + + #( #from_impls )* + }; + + output } diff --git a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs index 33aadba0d..109f7081c 100644 --- a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs +++ b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs @@ -22,68 +22,71 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_validate_unsigned( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_validate_unsigned_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_validate_unsigned_part_macros = Vec::new(); - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("ValidateUnsigned") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("ValidateUnsigned") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - pallet_names.push(name); - pallet_attrs.push(attr); - query_validate_unsigned_part_macros.push(quote! { + pallet_names.push(name); + pallet_attrs.push(attr); + query_validate_unsigned_part_macros.push(quote! { #path::__substrate_validate_unsigned_check::is_validate_unsigned_part_defined!(#name); }); - } - } + } + } - quote! { - #( #query_validate_unsigned_part_macros )* + quote! { + #( #query_validate_unsigned_part_macros )* - impl #scrate::unsigned::ValidateUnsigned for #runtime { - type Call = RuntimeCall; + impl #scrate::unsigned::ValidateUnsigned for #runtime { + type Call = RuntimeCall; - fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), - )* - // pre-dispatch should not stop inherent extrinsics, validation should prevent - // including arbitrary (non-inherent) extrinsics to blocks. - _ => Ok(()), - } - } + fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), + )* + // pre-dispatch should not stop inherent extrinsics, validation should prevent + // including arbitrary (non-inherent) extrinsics to blocks. + _ => Ok(()), + } + } - fn validate_unsigned( - #[allow(unused_variables)] - source: #scrate::unsigned::TransactionSource, - call: &Self::Call, - ) -> #scrate::unsigned::TransactionValidity { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), - )* - _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), - } - } - } - } + fn validate_unsigned( + #[allow(unused_variables)] + source: #scrate::unsigned::TransactionSource, + call: &Self::Call, + ) -> #scrate::unsigned::TransactionValidity { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), + )* + _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), + } + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index b083abbb2..de688b3d6 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -214,7 +214,7 @@ pub(crate) mod parse; use crate::pallet::parse::helper::two128_str; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use itertools::Itertools; use parse::{ExplicitRuntimeDeclaration, ImplicitRuntimeDeclaration, Pallet, RuntimeDeclaration}; @@ -230,48 +230,48 @@ const SYSTEM_PALLET_NAME: &str = "System"; /// Implementation of `construct_runtime` macro. Either expand to some code which will call /// `construct_runtime` again, or expand to the final runtime definition. pub fn construct_runtime(input: TokenStream) -> TokenStream { - let input_copy = input.clone(); - let definition = syn::parse_macro_input!(input as RuntimeDeclaration); - - let (check_pallet_number_res, res) = match definition { - RuntimeDeclaration::Implicit(implicit_def) => ( - check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), - construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), - ), - RuntimeDeclaration::Explicit(explicit_decl) => ( - check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), - construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), - ), - RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( - check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), - construct_runtime_final_expansion(explicit_decl), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - let res = expander::Expander::new("construct_runtime") - .dry(std::env::var("EXPAND_MACROS").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + let input_copy = input.clone(); + let definition = syn::parse_macro_input!(input as RuntimeDeclaration); + + let (check_pallet_number_res, res) = match definition { + RuntimeDeclaration::Implicit(implicit_def) => ( + check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), + construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), + ), + RuntimeDeclaration::Explicit(explicit_decl) => ( + check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), + construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), + ), + RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( + check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), + construct_runtime_final_expansion(explicit_decl), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("EXPAND_MACROS").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } /// All pallets that have implicit pallet parts (ie `System: frame_system`) are @@ -282,30 +282,37 @@ pub fn construct_runtime(input: TokenStream) -> TokenStream { /// /// For more details, please refer to the root documentation. fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitRuntimeDeclaration, + input: TokenStream2, + definition: ImplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition.pallets.iter().filter(|pallet| pallet.pallet_parts.is_none()) { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition + .pallets + .iter() + .filter(|pallet| pallet.pallet_parts.is_none()) + { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets that have @@ -318,264 +325,283 @@ fn construct_runtime_implicit_to_explicit( /// /// For more details, please refer to the root documentation. fn construct_runtime_explicit_to_explicit_expanded( - input: TokenStream2, - definition: ExplicitRuntimeDeclaration, + input: TokenStream2, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition.pallets.iter().filter(|pallet| !pallet.is_expanded) { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_extra_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition + .pallets + .iter() + .filter(|pallet| !pallet.is_expanded) + { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_extra_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets have explicit definition of parts, this will expand to the runtime declaration. fn construct_runtime_final_expansion( - definition: ExplicitRuntimeDeclaration, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let ExplicitRuntimeDeclaration { name, pallets, pallets_token, where_section } = definition; - - let system_pallet = - pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { - syn::Error::new( - pallets_token.span.join(), - "`System` pallet declaration is missing. \ + let ExplicitRuntimeDeclaration { + name, + pallets, + pallets_token, + where_section, + } = definition; + + let system_pallet = pallets + .iter() + .find(|decl| decl.name == SYSTEM_PALLET_NAME) + .ok_or_else(|| { + syn::Error::new( + pallets_token.span.join(), + "`System` pallet declaration is missing. \ Please add this line: `System: frame_system,`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )) - } - - let features = pallets - .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - }) - .flatten() - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let outer_event = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; - let outer_error = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; - - let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); - let tasks = expand::expand_outer_task(&name, &pallets, &scrate); - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); - let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); - let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); - let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let warning = where_section.map_or(None, |where_section| { - Some( - proc_macro_warning::Warning::new_deprecated("WhereSection") - .old("use a `where` clause in `construct_runtime`") - .new( - "use `frame_system::Config` to set the `Block` type and delete this clause. + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )); + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let outer_event = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; + let outer_error = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; + + let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); + let tasks = expand::expand_outer_task(&name, &pallets, &scrate); + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); + let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); + let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); + let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let warning = where_section.map_or(None, |where_section| { + Some( + proc_macro_warning::Warning::new_deprecated("WhereSection") + .old("use a `where` clause in `construct_runtime`") + .new( + "use `frame_system::Config` to set the `Block` type and delete this clause. It is planned to be removed in December 2023", - ) - .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) - .span(where_section.span) - .build_or_panic(), - ) - }); - - let res = quote!( - #warning - - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} + ) + .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) + .span(where_section.span) + .build_or_panic(), + ) + }); + + let res = quote!( + #warning + + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; - #outer_event + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } - #outer_error + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. - #outer_origin + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} - #all_pallets + #outer_event - #pallet_to_index + #outer_error - #dispatch + #outer_origin - #tasks + #all_pallets - #metadata + #pallet_to_index - #outer_config + #dispatch - #inherent + #tasks - #validate_unsigned + #metadata - #freeze_reason + #outer_config - #hold_reason + #inherent - #lock_id + #validate_unsigned - #slash_reason + #freeze_reason - #integrity_test + #hold_reason - #static_assertions - ); + #lock_id - Ok(res) + #slash_reason + + #integrity_test + + #static_assertions + ); + + Ok(res) } pub(crate) fn decl_all_pallets<'a>( - runtime: &'a Ident, - pallet_declarations: impl Iterator, - features: &HashSet<&str>, + runtime: &'a Ident, + pallet_declarations: impl Iterator, + features: &HashSet<&str>, ) -> TokenStream2 { - let mut types = TokenStream2::new(); - - // Every feature set to the pallet names that should be included by this feature set. - let mut features_to_names = features - .iter() - .map(|f| *f) - .powerset() - .map(|feat| (HashSet::from_iter(feat), Vec::new())) - .collect::, Vec<_>)>>(); - - for pallet_declaration in pallet_declarations { - let type_name = &pallet_declaration.name; - let pallet = &pallet_declaration.path; - let mut generics = vec![quote!(#runtime)]; - generics.extend(pallet_declaration.instance.iter().map(|name| quote!(#pallet::#name))); - let mut attrs = Vec::new(); - for cfg in &pallet_declaration.cfg_pattern { - let feat = format!("#[cfg({})]\n", cfg.original()); - attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); - } - let type_decl = quote!( - #(#attrs)* - pub type #type_name = #pallet::Pallet <#(#generics),*>; - ); - types.extend(type_decl); - - if pallet_declaration.cfg_pattern.is_empty() { - for (_, names) in features_to_names.iter_mut() { - names.push(&pallet_declaration.name); - } - } else { - for (feature_set, names) in &mut features_to_names { - // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the - // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, - // meaning that all of them must match in order to activate the item - let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { - expr.eval(|pred| match pred { - Predicate::Feature(f) => feature_set.contains(f), - Predicate::Test => feature_set.contains(&"test"), - _ => false, - }) - }); - - if is_feature_active { - names.push(&pallet_declaration.name); - } - } - } - } - - // All possible features. This will be used below for the empty feature set. - let mut all_features = features_to_names - .iter() - .flat_map(|f| f.0.iter().cloned()) - .collect::>(); - let attribute_to_names = features_to_names + let mut types = TokenStream2::new(); + + // Every feature set to the pallet names that should be included by this feature set. + let mut features_to_names = features + .iter() + .map(|f| *f) + .powerset() + .map(|feat| (HashSet::from_iter(feat), Vec::new())) + .collect::, Vec<_>)>>(); + + for pallet_declaration in pallet_declarations { + let type_name = &pallet_declaration.name; + let pallet = &pallet_declaration.path; + let mut generics = vec![quote!(#runtime)]; + generics.extend( + pallet_declaration + .instance + .iter() + .map(|name| quote!(#pallet::#name)), + ); + let mut attrs = Vec::new(); + for cfg in &pallet_declaration.cfg_pattern { + let feat = format!("#[cfg({})]\n", cfg.original()); + attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); + } + let type_decl = quote!( + #(#attrs)* + pub type #type_name = #pallet::Pallet <#(#generics),*>; + ); + types.extend(type_decl); + + if pallet_declaration.cfg_pattern.is_empty() { + for (_, names) in features_to_names.iter_mut() { + names.push(&pallet_declaration.name); + } + } else { + for (feature_set, names) in &mut features_to_names { + // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the + // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, + // meaning that all of them must match in order to activate the item + let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { + expr.eval(|pred| match pred { + Predicate::Feature(f) => feature_set.contains(f), + Predicate::Test => feature_set.contains(&"test"), + _ => false, + }) + }); + + if is_feature_active { + names.push(&pallet_declaration.name); + } + } + } + } + + // All possible features. This will be used below for the empty feature set. + let mut all_features = features_to_names + .iter() + .flat_map(|f| f.0.iter().cloned()) + .collect::>(); + let attribute_to_names = features_to_names .into_iter() .map(|(mut features, names)| { // If this is the empty feature set, it needs to be changed to negate all available @@ -598,212 +624,222 @@ pub(crate) fn decl_all_pallets<'a>( }) .collect::>(); - let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { - let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - /// Excludes the System pallet. - pub type AllPalletsWithoutSystem = ( #(#names,)* ); - } - }); - - let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - pub type AllPalletsWithSystem = ( #(#names,)* ); - } - }); - - quote!( - #types - - #( #all_pallets_with_system )* - - #( #all_pallets_without_system )* - ) + let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { + let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + /// Excludes the System pallet. + pub type AllPalletsWithoutSystem = ( #(#names,)* ); + } + }); + + let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + pub type AllPalletsWithSystem = ( #(#names,)* ); + } + }); + + quote!( + #types + + #( #all_pallets_with_system )* + + #( #all_pallets_without_system )* + ) } pub(crate) fn decl_pallet_runtime_setup( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let names = pallet_declarations.iter().map(|d| &d.name).collect::>(); - let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); - let name_hashes = pallet_declarations.iter().map(|d| two128_str(&d.name.to_string())); - let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); - let indices = pallet_declarations.iter().map(|pallet| pallet.index as usize); - let pallet_structs = pallet_declarations - .iter() - .map(|pallet| { - let path = &pallet.path; - match pallet.instance.as_ref() { - Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), - None => quote!(#path::Pallet<#runtime>), - } - }) - .collect::>(); - let pallet_attrs = pallet_declarations - .iter() - .map(|pallet| { - pallet.cfg_pattern.iter().fold(TokenStream2::new(), |acc, pattern| { - let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }) - }) - .collect::>(); - - quote!( - /// Provides an implementation of `PalletInfo` to provide information - /// about the pallet setup in the runtime. - pub struct PalletInfo; - - impl #scrate::traits::PalletInfo for PalletInfo { - - fn index() -> Option { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#indices) - } - )* - - None - } - - fn name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_strings) - } - )* - - None - } - - fn name_hash() -> Option<[u8; 16]> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_hashes) - } - )* - - None - } - - fn module_name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#module_names) - } - )* - - None - } - - fn crate_version() -> Option<#scrate::traits::CrateVersion> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some( - <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() - ) - } - )* - - None - } - } - ) + let names = pallet_declarations + .iter() + .map(|d| &d.name) + .collect::>(); + let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); + let name_hashes = pallet_declarations + .iter() + .map(|d| two128_str(&d.name.to_string())); + let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); + let indices = pallet_declarations + .iter() + .map(|pallet| pallet.index as usize); + let pallet_structs = pallet_declarations + .iter() + .map(|pallet| { + let path = &pallet.path; + match pallet.instance.as_ref() { + Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), + None => quote!(#path::Pallet<#runtime>), + } + }) + .collect::>(); + let pallet_attrs = pallet_declarations + .iter() + .map(|pallet| { + pallet + .cfg_pattern + .iter() + .fold(TokenStream2::new(), |acc, pattern| { + let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }) + }) + .collect::>(); + + quote!( + /// Provides an implementation of `PalletInfo` to provide information + /// about the pallet setup in the runtime. + pub struct PalletInfo; + + impl #scrate::traits::PalletInfo for PalletInfo { + + fn index() -> Option { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#indices) + } + )* + + None + } + + fn name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_strings) + } + )* + + None + } + + fn name_hash() -> Option<[u8; 16]> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_hashes) + } + )* + + None + } + + fn module_name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#module_names) + } + )* + + None + } + + fn crate_version() -> Option<#scrate::traits::CrateVersion> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some( + <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() + ) + } + )* + + None + } + } + ) } pub(crate) fn decl_integrity_test(scrate: &TokenStream2) -> TokenStream2 { - quote!( - #[cfg(test)] - mod __construct_runtime_integrity_test { - use super::*; - - #[test] - pub fn runtime_integrity_tests() { - #scrate::__private::sp_tracing::try_init_simple(); - ::integrity_test(); - } - } - ) + quote!( + #[cfg(test)] + mod __construct_runtime_integrity_test { + use super::*; + + #[test] + pub fn runtime_integrity_tests() { + #scrate::__private::sp_tracing::try_init_simple(); + ::integrity_test(); + } + } + ) } pub(crate) fn decl_static_assertions( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let error_encoded_size_check = pallet_decls.iter().map(|decl| { - let path = &decl.path; - let assert_message = format!( - "The maximum encoded size of the error type in the `{}` pallet exceeds \ + let error_encoded_size_check = pallet_decls.iter().map(|decl| { + let path = &decl.path; + let assert_message = format!( + "The maximum encoded size of the error type in the `{}` pallet exceeds \ `MAX_MODULE_ERROR_ENCODED_SIZE`", - decl.name, - ); - - quote! { - #scrate::__private::tt_call! { - macro = [{ #path::tt_error_token }] - your_tt_return = [{ #scrate::__private::tt_return }] - ~~> #scrate::assert_error_encoded_size! { - path = [{ #path }] - runtime = [{ #runtime }] - assert_message = [{ #assert_message }] - } - } - } - }); - - quote! { - #(#error_encoded_size_check)* - } + decl.name, + ); + + quote! { + #scrate::__private::tt_call! { + macro = [{ #path::tt_error_token }] + your_tt_return = [{ #scrate::__private::tt_return }] + ~~> #scrate::assert_error_encoded_size! { + path = [{ #path }] + runtime = [{ #runtime }] + assert_message = [{ #assert_message }] + } + } + } + }); + + quote! { + #(#error_encoded_size_check)* + } } pub(crate) fn check_pallet_number(input: TokenStream2, pallet_num: usize) -> Result<()> { - let max_pallet_num = { - if cfg!(feature = "tuples-96") { - 96 - } else if cfg!(feature = "tuples-128") { - 128 - } else { - 64 - } - }; - - if pallet_num > max_pallet_num { - let no_feature = max_pallet_num == 128; - return Err(syn::Error::new( - input.span(), - format!( - "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", - "The number of pallets exceeds the maximum number of tuple elements.", - max_pallet_num + 32, - if no_feature { - "If the feature does not exist - it needs to be implemented." - } else { - "" - }, - ), - )) - } - - Ok(()) + let max_pallet_num = { + if cfg!(feature = "tuples-96") { + 96 + } else if cfg!(feature = "tuples-128") { + 128 + } else { + 64 + } + }; + + if pallet_num > max_pallet_num { + let no_feature = max_pallet_num == 128; + return Err(syn::Error::new( + input.span(), + format!( + "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", + "The number of pallets exceeds the maximum number of tuple elements.", + max_pallet_num + 32, + if no_feature { + "If the feature does not exist - it needs to be implemented." + } else { + "" + }, + ), + )); + } + + Ok(()) } diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs index 31866c787..173a8dd12 100644 --- a/support/procedural-fork/src/construct_runtime/parse.rs +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -20,34 +20,34 @@ use proc_macro2::{Span, TokenStream}; use quote::ToTokens; use std::collections::{HashMap, HashSet}; use syn::{ - ext::IdentExt, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, Attribute, Error, Ident, Path, Result, Token, + ext::IdentExt, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, Attribute, Error, Ident, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(Block); - syn::custom_keyword!(NodeBlock); - syn::custom_keyword!(UncheckedExtrinsic); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(Call); - syn::custom_keyword!(Storage); - syn::custom_keyword!(Event); - syn::custom_keyword!(Error); - syn::custom_keyword!(Config); - syn::custom_keyword!(Origin); - syn::custom_keyword!(Inherent); - syn::custom_keyword!(ValidateUnsigned); - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(Task); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(exclude_parts); - syn::custom_keyword!(use_parts); - syn::custom_keyword!(expanded); + syn::custom_keyword!(Block); + syn::custom_keyword!(NodeBlock); + syn::custom_keyword!(UncheckedExtrinsic); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(Call); + syn::custom_keyword!(Storage); + syn::custom_keyword!(Event); + syn::custom_keyword!(Error); + syn::custom_keyword!(Config); + syn::custom_keyword!(Origin); + syn::custom_keyword!(Inherent); + syn::custom_keyword!(ValidateUnsigned); + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(Task); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(exclude_parts); + syn::custom_keyword!(use_parts); + syn::custom_keyword!(expanded); } /// Declaration of a runtime. @@ -57,266 +57,298 @@ mod keyword { /// implicit. #[derive(Debug)] pub enum RuntimeDeclaration { - Implicit(ImplicitRuntimeDeclaration), - Explicit(ExplicitRuntimeDeclaration), - ExplicitExpanded(ExplicitRuntimeDeclaration), + Implicit(ImplicitRuntimeDeclaration), + Explicit(ExplicitRuntimeDeclaration), + ExplicitExpanded(ExplicitRuntimeDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug)] pub struct ImplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug)] pub struct ExplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, - pub pallets_token: token::Brace, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, + pub pallets_token: token::Brace, } impl Parse for RuntimeDeclaration { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - // Support either `enum` or `struct`. - if input.peek(Token![struct]) { - input.parse::()?; - } else { - input.parse::()?; - } - - let name = input.parse::()?; - let where_section = if input.peek(token::Where) { Some(input.parse()?) } else { None }; - let pallets = - input.parse::>>()?; - let pallets_token = pallets.token; - - match convert_pallets(pallets.content.inner.into_iter().collect())? { - PalletsConversion::Implicit(pallets) => - Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { - name, - where_section, - pallets, - })), - PalletsConversion::Explicit(pallets) => - Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - })), - PalletsConversion::ExplicitExpanded(pallets) => - Ok(RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - })), - } - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + // Support either `enum` or `struct`. + if input.peek(Token![struct]) { + input.parse::()?; + } else { + input.parse::()?; + } + + let name = input.parse::()?; + let where_section = if input.peek(token::Where) { + Some(input.parse()?) + } else { + None + }; + let pallets = + input.parse::>>()?; + let pallets_token = pallets.token; + + match convert_pallets(pallets.content.inner.into_iter().collect())? { + PalletsConversion::Implicit(pallets) => { + Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { + name, + where_section, + pallets, + })) + } + PalletsConversion::Explicit(pallets) => { + Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })) + } + PalletsConversion::ExplicitExpanded(pallets) => Ok( + RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + }), + ), + } + } } #[derive(Debug)] pub struct WhereSection { - pub span: Span, - pub block: syn::TypePath, - pub node_block: syn::TypePath, - pub unchecked_extrinsic: syn::TypePath, + pub span: Span, + pub block: syn::TypePath, + pub node_block: syn::TypePath, + pub unchecked_extrinsic: syn::TypePath, } impl Parse for WhereSection { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - let mut definitions = Vec::new(); - while !input.peek(token::Brace) { - let definition: WhereDefinition = input.parse()?; - definitions.push(definition); - if !input.peek(Token![,]) { - if !input.peek(token::Brace) { - return Err(input.error("Expected `,` or `{`")) - } - break - } - input.parse::()?; - } - let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; - let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; - let unchecked_extrinsic = - remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; - if let Some(WhereDefinition { ref kind_span, ref kind, .. }) = definitions.first() { - let msg = format!( - "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", - kind, kind - ); - return Err(Error::new(*kind_span, msg)) - } - Ok(Self { span: input.span(), block, node_block, unchecked_extrinsic }) - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + let mut definitions = Vec::new(); + while !input.peek(token::Brace) { + let definition: WhereDefinition = input.parse()?; + definitions.push(definition); + if !input.peek(Token![,]) { + if !input.peek(token::Brace) { + return Err(input.error("Expected `,` or `{`")); + } + break; + } + input.parse::()?; + } + let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; + let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; + let unchecked_extrinsic = + remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; + if let Some(WhereDefinition { + ref kind_span, + ref kind, + .. + }) = definitions.first() + { + let msg = format!( + "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", + kind, kind + ); + return Err(Error::new(*kind_span, msg)); + } + Ok(Self { + span: input.span(), + block, + node_block, + unchecked_extrinsic, + }) + } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum WhereKind { - Block, - NodeBlock, - UncheckedExtrinsic, + Block, + NodeBlock, + UncheckedExtrinsic, } #[derive(Debug)] pub struct WhereDefinition { - pub kind_span: Span, - pub kind: WhereKind, - pub value: syn::TypePath, + pub kind_span: Span, + pub kind: WhereKind, + pub value: syn::TypePath, } impl Parse for WhereDefinition { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - let (kind_span, kind) = if lookahead.peek(keyword::Block) { - (input.parse::()?.span(), WhereKind::Block) - } else if lookahead.peek(keyword::NodeBlock) { - (input.parse::()?.span(), WhereKind::NodeBlock) - } else if lookahead.peek(keyword::UncheckedExtrinsic) { - (input.parse::()?.span(), WhereKind::UncheckedExtrinsic) - } else { - return Err(lookahead.error()) - }; - - Ok(Self { - kind_span, - kind, - value: { - let _: Token![=] = input.parse()?; - input.parse()? - }, - }) - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + let (kind_span, kind) = if lookahead.peek(keyword::Block) { + (input.parse::()?.span(), WhereKind::Block) + } else if lookahead.peek(keyword::NodeBlock) { + ( + input.parse::()?.span(), + WhereKind::NodeBlock, + ) + } else if lookahead.peek(keyword::UncheckedExtrinsic) { + ( + input.parse::()?.span(), + WhereKind::UncheckedExtrinsic, + ) + } else { + return Err(lookahead.error()); + }; + + Ok(Self { + kind_span, + kind, + value: { + let _: Token![=] = input.parse()?; + input.parse()? + }, + }) + } } /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// Optional fixed index, e.g. `MyPallet ... = 3,`. - pub index: Option, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The declared pallet parts, - /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` - /// or `None` for `System: system`. - pub pallet_parts: Option>, - /// The specified parts, either use_parts or exclude_parts. - pub specified_parts: SpecifiedParts, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// Optional fixed index, e.g. `MyPallet ... = 3,`. + pub index: Option, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The declared pallet parts, + /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` + /// or `None` for `System: system`. + pub pallet_parts: Option>, + /// The specified parts, either use_parts or exclude_parts. + pub specified_parts: SpecifiedParts, } /// The possible declaration of pallet parts to use. #[derive(Debug, Clone)] pub enum SpecifiedParts { - /// Use all the pallet parts except those specified. - Exclude(Vec), - /// Use only the specified pallet parts. - Use(Vec), - /// Use the all the pallet parts. - All, + /// Use all the pallet parts except those specified. + Exclude(Vec), + /// Use only the specified pallet parts. + Use(Vec), + /// Use the all the pallet parts. + All, } impl Parse for PalletDeclaration { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - - let name = input.parse()?; - let _: Token![:] = input.parse()?; - let path = input.parse()?; - - // Parse for instance. - let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { - let _: Token![::] = input.parse()?; - let _: Token![<] = input.parse()?; - let res = Some(input.parse()?); - let _: Token![>] = input.parse()?; - res - } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) && - !input.peek(keyword::expanded) && - !input.peek(keyword::exclude_parts) && - !input.peek(keyword::use_parts) && - !input.peek(Token![=]) && - !input.peek(Token![,]) && - !input.is_empty() - { - return Err(input.error( + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + + let name = input.parse()?; + let _: Token![:] = input.parse()?; + let path = input.parse()?; + + // Parse for instance. + let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { + let _: Token![::] = input.parse()?; + let _: Token![<] = input.parse()?; + let res = Some(input.parse()?); + let _: Token![>] = input.parse()?; + res + } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) + && !input.peek(keyword::expanded) + && !input.peek(keyword::exclude_parts) + && !input.peek(keyword::use_parts) + && !input.peek(Token![=]) + && !input.peek(Token![,]) + && !input.is_empty() + { + return Err(input.error( "Unexpected tokens, expected one of `::$ident` `::{`, `exclude_parts`, `use_parts`, `=`, `,`", )); - } else { - None - }; - - // Check if the pallet is fully expanded. - let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { - let _: keyword::expanded = input.parse()?; - let _: Token![::] = input.parse()?; - (true, parse_pallet_parts(input)?) - } else { - (false, vec![]) - }; - - // Parse for explicit parts - let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { - let _: Token![::] = input.parse()?; - let mut parts = parse_pallet_parts(input)?; - parts.extend(extra_parts.into_iter()); - Some(parts) - } else if !input.peek(keyword::exclude_parts) && - !input.peek(keyword::use_parts) && - !input.peek(Token![=]) && - !input.peek(Token![,]) && - !input.is_empty() - { - return Err(input.error( - "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", - )) - } else { - is_expanded.then_some(extra_parts) - }; - - // Parse for specified parts - let specified_parts = if input.peek(keyword::exclude_parts) { - let _: keyword::exclude_parts = input.parse()?; - SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) - } else if input.peek(keyword::use_parts) { - let _: keyword::use_parts = input.parse()?; - SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) - } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")) - } else { - SpecifiedParts::All - }; - - // Parse for pallet index - let index = if input.peek(Token![=]) { - input.parse::()?; - let index = input.parse::()?; - let index = index.base10_parse::()?; - Some(index) - } else if !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `=`, `,`")) - } else { - None - }; - - Ok(Self { is_expanded, attrs, name, path, instance, pallet_parts, specified_parts, index }) - } + } else { + None + }; + + // Check if the pallet is fully expanded. + let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { + let _: keyword::expanded = input.parse()?; + let _: Token![::] = input.parse()?; + (true, parse_pallet_parts(input)?) + } else { + (false, vec![]) + }; + + // Parse for explicit parts + let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { + let _: Token![::] = input.parse()?; + let mut parts = parse_pallet_parts(input)?; + parts.extend(extra_parts.into_iter()); + Some(parts) + } else if !input.peek(keyword::exclude_parts) + && !input.peek(keyword::use_parts) + && !input.peek(Token![=]) + && !input.peek(Token![,]) + && !input.is_empty() + { + return Err(input.error( + "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", + )); + } else { + is_expanded.then_some(extra_parts) + }; + + // Parse for specified parts + let specified_parts = if input.peek(keyword::exclude_parts) { + let _: keyword::exclude_parts = input.parse()?; + SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) + } else if input.peek(keyword::use_parts) { + let _: keyword::use_parts = input.parse()?; + SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) + } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")); + } else { + SpecifiedParts::All + }; + + // Parse for pallet index + let index = if input.peek(Token![=]) { + input.parse::()?; + let index = input.parse::()?; + let index = index.base10_parse::()?; + Some(index) + } else if !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `=`, `,`")); + } else { + None + }; + + Ok(Self { + is_expanded, + attrs, + name, + path, + instance, + pallet_parts, + specified_parts, + index, + }) + } } /// A struct representing a path to a pallet. `PalletPath` is almost identical to the standard @@ -325,303 +357,312 @@ impl Parse for PalletDeclaration { /// - Path segments can only consist of identifiers separated by colons #[derive(Debug, Clone)] pub struct PalletPath { - pub inner: Path, + pub inner: Path, } impl PalletPath { - pub fn module_name(&self) -> String { - self.inner.segments.iter().fold(String::new(), |mut acc, segment| { - if !acc.is_empty() { - acc.push_str("::"); - } - acc.push_str(&segment.ident.to_string()); - acc - }) - } + pub fn module_name(&self) -> String { + self.inner + .segments + .iter() + .fold(String::new(), |mut acc, segment| { + if !acc.is_empty() { + acc.push_str("::"); + } + acc.push_str(&segment.ident.to_string()); + acc + }) + } } impl Parse for PalletPath { - fn parse(input: ParseStream) -> Result { - let mut res = - PalletPath { inner: Path { leading_colon: None, segments: Punctuated::new() } }; - - let lookahead = input.lookahead1(); - if lookahead.peek(Token![crate]) || - lookahead.peek(Token![self]) || - lookahead.peek(Token![super]) || - lookahead.peek(Ident) - { - let ident = input.call(Ident::parse_any)?; - res.inner.segments.push(ident.into()); - } else { - return Err(lookahead.error()) - } - - while input.peek(Token![::]) && input.peek3(Ident) { - input.parse::()?; - let ident = input.parse::()?; - res.inner.segments.push(ident.into()); - } - Ok(res) - } + fn parse(input: ParseStream) -> Result { + let mut res = PalletPath { + inner: Path { + leading_colon: None, + segments: Punctuated::new(), + }, + }; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![crate]) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Ident) + { + let ident = input.call(Ident::parse_any)?; + res.inner.segments.push(ident.into()); + } else { + return Err(lookahead.error()); + } + + while input.peek(Token![::]) && input.peek3(Ident) { + input.parse::()?; + let ident = input.parse::()?; + res.inner.segments.push(ident.into()); + } + Ok(res) + } } impl quote::ToTokens for PalletPath { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.inner.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.inner.to_tokens(tokens); + } } /// Parse [`PalletPart`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = input.parse()?; + let pallet_parts: ext::Braces> = input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.name(), - ); - return Err(Error::new(part.keyword.span(), msg)) - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.name(), + ); + return Err(Error::new(part.keyword.span(), msg)); + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } #[derive(Debug, Clone)] pub enum PalletPartKeyword { - Pallet(keyword::Pallet), - Call(keyword::Call), - Storage(keyword::Storage), - Event(keyword::Event), - Error(keyword::Error), - Config(keyword::Config), - Origin(keyword::Origin), - Inherent(keyword::Inherent), - ValidateUnsigned(keyword::ValidateUnsigned), - FreezeReason(keyword::FreezeReason), - HoldReason(keyword::HoldReason), - Task(keyword::Task), - LockId(keyword::LockId), - SlashReason(keyword::SlashReason), + Pallet(keyword::Pallet), + Call(keyword::Call), + Storage(keyword::Storage), + Event(keyword::Event), + Error(keyword::Error), + Config(keyword::Config), + Origin(keyword::Origin), + Inherent(keyword::Inherent), + ValidateUnsigned(keyword::ValidateUnsigned), + FreezeReason(keyword::FreezeReason), + HoldReason(keyword::HoldReason), + Task(keyword::Task), + LockId(keyword::LockId), + SlashReason(keyword::SlashReason), } impl Parse for PalletPartKeyword { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - - if lookahead.peek(keyword::Pallet) { - Ok(Self::Pallet(input.parse()?)) - } else if lookahead.peek(keyword::Call) { - Ok(Self::Call(input.parse()?)) - } else if lookahead.peek(keyword::Storage) { - Ok(Self::Storage(input.parse()?)) - } else if lookahead.peek(keyword::Event) { - Ok(Self::Event(input.parse()?)) - } else if lookahead.peek(keyword::Error) { - Ok(Self::Error(input.parse()?)) - } else if lookahead.peek(keyword::Config) { - Ok(Self::Config(input.parse()?)) - } else if lookahead.peek(keyword::Origin) { - Ok(Self::Origin(input.parse()?)) - } else if lookahead.peek(keyword::Inherent) { - Ok(Self::Inherent(input.parse()?)) - } else if lookahead.peek(keyword::ValidateUnsigned) { - Ok(Self::ValidateUnsigned(input.parse()?)) - } else if lookahead.peek(keyword::FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(keyword::Task) { - Ok(Self::Task(input.parse()?)) - } else if lookahead.peek(keyword::LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(keyword::SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(keyword::Pallet) { + Ok(Self::Pallet(input.parse()?)) + } else if lookahead.peek(keyword::Call) { + Ok(Self::Call(input.parse()?)) + } else if lookahead.peek(keyword::Storage) { + Ok(Self::Storage(input.parse()?)) + } else if lookahead.peek(keyword::Event) { + Ok(Self::Event(input.parse()?)) + } else if lookahead.peek(keyword::Error) { + Ok(Self::Error(input.parse()?)) + } else if lookahead.peek(keyword::Config) { + Ok(Self::Config(input.parse()?)) + } else if lookahead.peek(keyword::Origin) { + Ok(Self::Origin(input.parse()?)) + } else if lookahead.peek(keyword::Inherent) { + Ok(Self::Inherent(input.parse()?)) + } else if lookahead.peek(keyword::ValidateUnsigned) { + Ok(Self::ValidateUnsigned(input.parse()?)) + } else if lookahead.peek(keyword::FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(keyword::Task) { + Ok(Self::Task(input.parse()?)) + } else if lookahead.peek(keyword::LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(keyword::SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } impl PalletPartKeyword { - /// Returns the name of `Self`. - fn name(&self) -> &'static str { - match self { - Self::Pallet(_) => "Pallet", - Self::Call(_) => "Call", - Self::Storage(_) => "Storage", - Self::Event(_) => "Event", - Self::Error(_) => "Error", - Self::Config(_) => "Config", - Self::Origin(_) => "Origin", - Self::Inherent(_) => "Inherent", - Self::ValidateUnsigned(_) => "ValidateUnsigned", - Self::FreezeReason(_) => "FreezeReason", - Self::HoldReason(_) => "HoldReason", - Self::Task(_) => "Task", - Self::LockId(_) => "LockId", - Self::SlashReason(_) => "SlashReason", - } - } - - /// Returns `true` if this pallet part is allowed to have generic arguments. - fn allows_generic(&self) -> bool { - Self::all_generic_arg().iter().any(|n| *n == self.name()) - } - - /// Returns the names of all pallet parts that allow to have a generic argument. - fn all_generic_arg() -> &'static [&'static str] { - &["Event", "Error", "Origin", "Config", "Task"] - } + /// Returns the name of `Self`. + fn name(&self) -> &'static str { + match self { + Self::Pallet(_) => "Pallet", + Self::Call(_) => "Call", + Self::Storage(_) => "Storage", + Self::Event(_) => "Event", + Self::Error(_) => "Error", + Self::Config(_) => "Config", + Self::Origin(_) => "Origin", + Self::Inherent(_) => "Inherent", + Self::ValidateUnsigned(_) => "ValidateUnsigned", + Self::FreezeReason(_) => "FreezeReason", + Self::HoldReason(_) => "HoldReason", + Self::Task(_) => "Task", + Self::LockId(_) => "LockId", + Self::SlashReason(_) => "SlashReason", + } + } + + /// Returns `true` if this pallet part is allowed to have generic arguments. + fn allows_generic(&self) -> bool { + Self::all_generic_arg().iter().any(|n| *n == self.name()) + } + + /// Returns the names of all pallet parts that allow to have a generic argument. + fn all_generic_arg() -> &'static [&'static str] { + &["Event", "Error", "Origin", "Config", "Task"] + } } impl ToTokens for PalletPartKeyword { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - Self::Pallet(inner) => inner.to_tokens(tokens), - Self::Call(inner) => inner.to_tokens(tokens), - Self::Storage(inner) => inner.to_tokens(tokens), - Self::Event(inner) => inner.to_tokens(tokens), - Self::Error(inner) => inner.to_tokens(tokens), - Self::Config(inner) => inner.to_tokens(tokens), - Self::Origin(inner) => inner.to_tokens(tokens), - Self::Inherent(inner) => inner.to_tokens(tokens), - Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), - Self::FreezeReason(inner) => inner.to_tokens(tokens), - Self::HoldReason(inner) => inner.to_tokens(tokens), - Self::Task(inner) => inner.to_tokens(tokens), - Self::LockId(inner) => inner.to_tokens(tokens), - Self::SlashReason(inner) => inner.to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Pallet(inner) => inner.to_tokens(tokens), + Self::Call(inner) => inner.to_tokens(tokens), + Self::Storage(inner) => inner.to_tokens(tokens), + Self::Event(inner) => inner.to_tokens(tokens), + Self::Error(inner) => inner.to_tokens(tokens), + Self::Config(inner) => inner.to_tokens(tokens), + Self::Origin(inner) => inner.to_tokens(tokens), + Self::Inherent(inner) => inner.to_tokens(tokens), + Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), + Self::FreezeReason(inner) => inner.to_tokens(tokens), + Self::HoldReason(inner) => inner.to_tokens(tokens), + Self::Task(inner) => inner.to_tokens(tokens), + Self::LockId(inner) => inner.to_tokens(tokens), + Self::SlashReason(inner) => inner.to_tokens(tokens), + } + } } #[derive(Debug, Clone)] pub struct PalletPart { - pub keyword: PalletPartKeyword, - pub generics: syn::Generics, + pub keyword: PalletPartKeyword, + pub generics: syn::Generics, } impl Parse for PalletPart { - fn parse(input: ParseStream) -> Result { - let keyword: PalletPartKeyword = input.parse()?; - - let generics: syn::Generics = input.parse()?; - if !generics.params.is_empty() && !keyword.allows_generic() { - let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); - let msg = format!( - "`{}` is not allowed to have generics. \ + fn parse(input: ParseStream) -> Result { + let keyword: PalletPartKeyword = input.parse()?; + + let generics: syn::Generics = input.parse()?; + if !generics.params.is_empty() && !keyword.allows_generic() { + let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); + let msg = format!( + "`{}` is not allowed to have generics. \ Only the following pallets are allowed to have generics: {}.", - keyword.name(), - valid_generics, - ); - return Err(syn::Error::new(keyword.span(), msg)) - } + keyword.name(), + valid_generics, + ); + return Err(syn::Error::new(keyword.span(), msg)); + } - Ok(Self { keyword, generics }) - } + Ok(Self { keyword, generics }) + } } impl PalletPart { - pub fn format_names(names: &[&'static str]) -> String { - let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); - res.join(", ") - } + pub fn format_names(names: &[&'static str]) -> String { + let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); + res.join(", ") + } - /// The name of this pallet part. - pub fn name(&self) -> &'static str { - self.keyword.name() - } + /// The name of this pallet part. + pub fn name(&self) -> &'static str { + self.keyword.name() + } } fn remove_kind( - input: ParseStream, - kind: WhereKind, - definitions: &mut Vec, + input: ParseStream, + kind: WhereKind, + definitions: &mut Vec, ) -> Result { - if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { - Ok(definitions.remove(pos)) - } else { - let msg = format!( - "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", - kind, kind - ); - Err(input.error(msg)) - } + if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { + Ok(definitions.remove(pos)) + } else { + let msg = format!( + "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", + kind, kind + ); + Err(input.error(msg)) + } } /// The declaration of a part without its generics #[derive(Debug, Clone)] pub struct PalletPartNoGeneric { - keyword: PalletPartKeyword, + keyword: PalletPartKeyword, } impl Parse for PalletPartNoGeneric { - fn parse(input: ParseStream) -> Result { - Ok(Self { keyword: input.parse()? }) - } + fn parse(input: ParseStream) -> Result { + Ok(Self { + keyword: input.parse()?, + }) + } } /// Parse [`PalletPartNoGeneric`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts_no_generic(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = - input.parse()?; + let pallet_parts: ext::Braces> = + input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.keyword.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.keyword.name(), - ); - return Err(Error::new(part.keyword.span(), msg)) - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.keyword.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.keyword.name(), + ); + return Err(Error::new(part.keyword.span(), msg)); + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } /// The final definition of a pallet with the resulting fixed index and explicit parts. #[derive(Debug, Clone)] pub struct Pallet { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). - pub index: u8, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The pallet parts to use for the pallet. - pub pallet_parts: Vec, - /// Expressions specified inside of a #[cfg] attribute. - pub cfg_pattern: Vec, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). + pub index: u8, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The pallet parts to use for the pallet. + pub pallet_parts: Vec, + /// Expressions specified inside of a #[cfg] attribute. + pub cfg_pattern: Vec, } impl Pallet { - /// Get resolved pallet parts - pub fn pallet_parts(&self) -> &[PalletPart] { - &self.pallet_parts - } + /// Get resolved pallet parts + pub fn pallet_parts(&self) -> &[PalletPart] { + &self.pallet_parts + } - /// Find matching parts - pub fn find_part(&self, name: &str) -> Option<&PalletPart> { - self.pallet_parts.iter().find(|part| part.name() == name) - } + /// Find matching parts + pub fn find_part(&self, name: &str) -> Option<&PalletPart> { + self.pallet_parts.iter().find(|part| part.name() == name) + } - /// Return whether pallet contains part - pub fn exists_part(&self, name: &str) -> bool { - self.find_part(name).is_some() - } + /// Return whether pallet contains part + pub fn exists_part(&self, name: &str) -> bool { + self.find_part(name).is_some() + } } /// Result of a conversion of a declaration of pallets. @@ -634,26 +675,26 @@ impl Pallet { /// +----------+ +----------+ +------------------+ /// ``` enum PalletsConversion { - /// Pallets implicitly declare parts. - /// - /// `System: frame_system`. - Implicit(Vec), - /// Pallets explicitly declare parts. - /// - /// `System: frame_system::{Pallet, Call}` - /// - /// However, for backwards compatibility with Polkadot/Kusama - /// we must propagate some other parts to the pallet by default. - Explicit(Vec), - /// Pallets explicitly declare parts that are fully expanded. - /// - /// This is the end state that contains extra parts included by - /// default by Substrate. - /// - /// `System: frame_system expanded::{Error} ::{Pallet, Call}` - /// - /// For this example, the `Pallet`, `Call` and `Error` parts are collected. - ExplicitExpanded(Vec), + /// Pallets implicitly declare parts. + /// + /// `System: frame_system`. + Implicit(Vec), + /// Pallets explicitly declare parts. + /// + /// `System: frame_system::{Pallet, Call}` + /// + /// However, for backwards compatibility with Polkadot/Kusama + /// we must propagate some other parts to the pallet by default. + Explicit(Vec), + /// Pallets explicitly declare parts that are fully expanded. + /// + /// This is the end state that contains extra parts included by + /// default by Substrate. + /// + /// `System: frame_system expanded::{Error} ::{Pallet, Call}` + /// + /// For this example, the `Pallet`, `Call` and `Error` parts are collected. + ExplicitExpanded(Vec), } /// Convert from the parsed pallet declaration to their final information. @@ -662,125 +703,137 @@ enum PalletsConversion { /// pallet using same rules as rust for fieldless enum. I.e. implicit are assigned number /// incrementally from last explicit or 0. fn convert_pallets(pallets: Vec) -> syn::Result { - if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { - return Ok(PalletsConversion::Implicit(pallets)) - } - - let mut indices = HashMap::new(); - let mut last_index: Option = None; - let mut names = HashMap::new(); - let mut is_expanded = true; - - let pallets = pallets - .into_iter() - .map(|pallet| { - let final_index = match pallet.index { - Some(i) => i, - None => last_index.map_or(Some(0), |i| i.checked_add(1)).ok_or_else(|| { - let msg = "Pallet index doesn't fit into u8, index is 256"; - syn::Error::new(pallet.name.span(), msg) - })?, - }; - - last_index = Some(final_index); - - if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { - let msg = format!( - "Pallet indices are conflicting: Both pallets {} and {} are at index {}", - used_pallet, pallet.name, final_index, - ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err) - } - - if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { - let msg = "Two pallets with the same name!"; - - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet.name.span(), &msg)); - return Err(err) - } - - let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); - - let available_parts = - pallet_parts.iter().map(|part| part.keyword.name()).collect::>(); - - // Check parts are correctly specified - match &pallet.specified_parts { - SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => - for part in parts { - if !available_parts.contains(part.keyword.name()) { - let msg = format!( - "Invalid pallet part specified, the pallet `{}` doesn't have the \ + if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { + return Ok(PalletsConversion::Implicit(pallets)); + } + + let mut indices = HashMap::new(); + let mut last_index: Option = None; + let mut names = HashMap::new(); + let mut is_expanded = true; + + let pallets = pallets + .into_iter() + .map(|pallet| { + let final_index = match pallet.index { + Some(i) => i, + None => last_index + .map_or(Some(0), |i| i.checked_add(1)) + .ok_or_else(|| { + let msg = "Pallet index doesn't fit into u8, index is 256"; + syn::Error::new(pallet.name.span(), msg) + })?, + }; + + last_index = Some(final_index); + + if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, final_index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err); + } + + if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet.name.span(), &msg)); + return Err(err); + } + + let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); + + let available_parts = pallet_parts + .iter() + .map(|part| part.keyword.name()) + .collect::>(); + + // Check parts are correctly specified + match &pallet.specified_parts { + SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => { + for part in parts { + if !available_parts.contains(part.keyword.name()) { + let msg = format!( + "Invalid pallet part specified, the pallet `{}` doesn't have the \ `{}` part. Available parts are: {}.", - pallet.name, - part.keyword.name(), - pallet_parts.iter().fold(String::new(), |fold, part| { - if fold.is_empty() { - format!("`{}`", part.keyword.name()) - } else { - format!("{}, `{}`", fold, part.keyword.name()) - } - }) - ); - return Err(syn::Error::new(part.keyword.span(), msg)) - } - }, - SpecifiedParts::All => (), - } - - // Set only specified parts. - match pallet.specified_parts { - SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { - !excluded_parts - .iter() - .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { - used_parts.iter().any(|use_part| use_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::All => (), - } - - let cfg_pattern = pallet - .attrs - .iter() - .map(|attr| { - if attr.path().segments.first().map_or(false, |s| s.ident != "cfg") { - let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ + pallet.name, + part.keyword.name(), + pallet_parts.iter().fold(String::new(), |fold, part| { + if fold.is_empty() { + format!("`{}`", part.keyword.name()) + } else { + format!("{}, `{}`", fold, part.keyword.name()) + } + }) + ); + return Err(syn::Error::new(part.keyword.span(), msg)); + } + } + } + SpecifiedParts::All => (), + } + + // Set only specified parts. + match pallet.specified_parts { + SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { + !excluded_parts + .iter() + .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { + used_parts + .iter() + .any(|use_part| use_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::All => (), + } + + let cfg_pattern = pallet + .attrs + .iter() + .map(|attr| { + if attr + .path() + .segments + .first() + .map_or(false, |s| s.ident != "cfg") + { + let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ declarations in `construct_runtime`"; - return Err(syn::Error::new(attr.span(), msg)) - } - - attr.parse_args_with(|input: syn::parse::ParseStream| { - // Required, otherwise the parse stream doesn't advance and will result in - // an error. - let input = input.parse::()?; - cfg_expr::Expression::parse(&input.to_string()) - .map_err(|e| syn::Error::new(attr.span(), e.to_string())) - }) - }) - .collect::>>()?; - - is_expanded &= pallet.is_expanded; - - Ok(Pallet { - is_expanded: pallet.is_expanded, - name: pallet.name, - index: final_index, - path: pallet.path, - instance: pallet.instance, - cfg_pattern, - pallet_parts, - }) - }) - .collect::>>()?; - - if is_expanded { - Ok(PalletsConversion::ExplicitExpanded(pallets)) - } else { - Ok(PalletsConversion::Explicit(pallets)) - } + return Err(syn::Error::new(attr.span(), msg)); + } + + attr.parse_args_with(|input: syn::parse::ParseStream| { + // Required, otherwise the parse stream doesn't advance and will result in + // an error. + let input = input.parse::()?; + cfg_expr::Expression::parse(&input.to_string()) + .map_err(|e| syn::Error::new(attr.span(), e.to_string())) + }) + }) + .collect::>>()?; + + is_expanded &= pallet.is_expanded; + + Ok(Pallet { + is_expanded: pallet.is_expanded, + name: pallet.name, + index: final_index, + path: pallet.path, + instance: pallet.instance, + cfg_pattern, + pallet_parts, + }) + }) + .collect::>>()?; + + if is_expanded { + Ok(PalletsConversion::ExplicitExpanded(pallets)) + } else { + Ok(PalletsConversion::Explicit(pallets)) + } } diff --git a/support/procedural-fork/src/crate_version.rs b/support/procedural-fork/src/crate_version.rs index 8c8975a42..63e7c7279 100644 --- a/support/procedural-fork/src/crate_version.rs +++ b/support/procedural-fork/src/crate_version.rs @@ -24,31 +24,31 @@ use syn::{Error, Result}; /// Create an error that will be shown by rustc at the call site of the macro. fn create_error(message: &str) -> Error { - Error::new(Span::call_site(), message) + Error::new(Span::call_site(), message) } /// Implementation of the `crate_to_crate_version!` macro. pub fn crate_to_crate_version(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(create_error("No arguments expected!")) - } + if !input.is_empty() { + return Err(create_error("No arguments expected!")); + } - let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") - .map_err(|_| create_error("Major version needs to fit into `u16`"))?; + let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") + .map_err(|_| create_error("Major version needs to fit into `u16`"))?; - let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") - .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; + let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") + .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; - let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") - .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; + let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") + .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; - Ok(quote::quote! { - #crate_::traits::CrateVersion { - major: #major_version, - minor: #minor_version, - patch: #patch_version, - } - }) + Ok(quote::quote! { + #crate_::traits::CrateVersion { + major: #major_version, + minor: #minor_version, + patch: #patch_version, + } + }) } diff --git a/support/procedural-fork/src/dummy_part_checker.rs b/support/procedural-fork/src/dummy_part_checker.rs index 34d9a3e23..6bed541d1 100644 --- a/support/procedural-fork/src/dummy_part_checker.rs +++ b/support/procedural-fork/src/dummy_part_checker.rs @@ -19,61 +19,63 @@ use crate::COUNTER; use proc_macro::TokenStream; pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { - if !input.is_empty() { - return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") - .to_compile_error() - .into() - } + if !input.is_empty() { + return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") + .to_compile_error() + .into(); + } - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let no_op_macro_ident = - syn::Ident::new(&format!("__dummy_part_checker_{}", count), proc_macro2::Span::call_site()); + let no_op_macro_ident = syn::Ident::new( + &format!("__dummy_part_checker_{}", count), + proc_macro2::Span::call_site(), + ); - quote::quote!( - #[macro_export] - #[doc(hidden)] - macro_rules! #no_op_macro_ident { - ( $( $tt:tt )* ) => {}; - } + quote::quote!( + #[macro_export] + #[doc(hidden)] + macro_rules! #no_op_macro_ident { + ( $( $tt:tt )* ) => {}; + } - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #no_op_macro_ident as is_std_enabled_for_genesis; - } + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #no_op_macro_ident as is_std_enabled_for_genesis; + } - #[doc(hidden)] - pub mod __substrate_event_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_event_part_defined; - } + #[doc(hidden)] + pub mod __substrate_event_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_event_part_defined; + } - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_inherent_part_defined; - } + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_inherent_part_defined; + } - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_validate_unsigned_part_defined; - } + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_validate_unsigned_part_defined; + } - #[doc(hidden)] - pub mod __substrate_call_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_call_part_defined; - } + #[doc(hidden)] + pub mod __substrate_call_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_call_part_defined; + } - #[doc(hidden)] - pub mod __substrate_origin_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_origin_part_defined; - } - ) - .into() + #[doc(hidden)] + pub mod __substrate_origin_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_origin_part_defined; + } + ) + .into() } diff --git a/support/procedural-fork/src/dynamic_params.rs b/support/procedural-fork/src/dynamic_params.rs index 29399a885..70a18bf34 100644 --- a/support/procedural-fork/src/dynamic_params.rs +++ b/support/procedural-fork/src/dynamic_params.rs @@ -26,123 +26,132 @@ use syn::{parse2, spanned::Spanned, visit_mut, visit_mut::VisitMut, Result, Toke /// Parse and expand a `#[dynamic_params(..)]` module. pub fn dynamic_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_pallet_params(..)]` attribute. pub fn dynamic_pallet_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_aggregated_params_internal]` attribute. pub fn dynamic_aggregated_params_internal( - _attr: TokenStream, - item: TokenStream, + _attr: TokenStream, + item: TokenStream, ) -> Result { - parse2::(item).map(ToTokens::into_token_stream) + parse2::(item).map(ToTokens::into_token_stream) } /// A top `#[dynamic_params(..)]` attribute together with a mod. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttr { - params_mod: syn::ItemMod, - meta: DynamicParamModAttrMeta, + params_mod: syn::ItemMod, + meta: DynamicParamModAttrMeta, } /// The inner meta of a `#[dynamic_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttrMeta { - name: syn::Ident, - _comma: Option, - #[parse_if(_comma.is_some())] - params_pallet: Option, + name: syn::Ident, + _comma: Option, + #[parse_if(_comma.is_some())] + params_pallet: Option, } impl DynamicParamModAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - let params_mod = parse2(item)?; - let meta = parse2(attr)?; - Ok(Self { params_mod, meta }) - } - - pub fn inner_mods(&self) -> Vec { - self.params_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Mod(m) => Some(m), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + let params_mod = parse2(item)?; + let meta = parse2(attr)?; + Ok(Self { params_mod, meta }) + } + + pub fn inner_mods(&self) -> Vec { + self.params_mod + .content + .as_ref() + .map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Mod(m) => Some(m), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicParamModAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); - let dynam_params_ident = ¶ms_mod.ident; - - let mut quoted_enum = quote! {}; - for m in self.inner_mods() { - let aggregate_name = - syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); - let mod_name = &m.ident; - - let mut attrs = m.attrs.clone(); - attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); - if let Err(err) = ensure_codec_index(&attrs, m.span()) { - tokens.extend(err.into_compile_error()); - return - } - - quoted_enum.extend(quote! { - #(#attrs)* - #aggregate_name(#dynam_params_ident::#mod_name::Parameters), - }); - } - - // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. - if let Some(params_pallet) = &self.meta.params_pallet { - MacroInjectArgs { runtime_params: name.clone(), params_pallet: params_pallet.clone() } - .visit_item_mod_mut(&mut params_mod); - } - - tokens.extend(quote! { - #params_mod - - #[#scrate::dynamic_params::dynamic_aggregated_params_internal] - pub enum #name { - #quoted_enum - } - }); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); + let dynam_params_ident = ¶ms_mod.ident; + + let mut quoted_enum = quote! {}; + for m in self.inner_mods() { + let aggregate_name = + syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); + let mod_name = &m.ident; + + let mut attrs = m.attrs.clone(); + attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); + if let Err(err) = ensure_codec_index(&attrs, m.span()) { + tokens.extend(err.into_compile_error()); + return; + } + + quoted_enum.extend(quote! { + #(#attrs)* + #aggregate_name(#dynam_params_ident::#mod_name::Parameters), + }); + } + + // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. + if let Some(params_pallet) = &self.meta.params_pallet { + MacroInjectArgs { + runtime_params: name.clone(), + params_pallet: params_pallet.clone(), + } + .visit_item_mod_mut(&mut params_mod); + } + + tokens.extend(quote! { + #params_mod + + #[#scrate::dynamic_params::dynamic_aggregated_params_internal] + pub enum #name { + #quoted_enum + } + }); + } } /// Ensure there is a `#[codec(index = ..)]` attribute. fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { - let mut found = false; - - for attr in attrs.iter() { - if attr.path().is_ident("codec") { - let meta: syn::ExprAssign = attr.parse_args()?; - if meta.left.to_token_stream().to_string() == "index" { - found = true; - break - } - } - } - - if !found { - Err(syn::Error::new(span, "Missing explicit `#[codec(index = ..)]` attribute")) - } else { - Ok(()) - } + let mut found = false; + + for attr in attrs.iter() { + if attr.path().is_ident("codec") { + let meta: syn::ExprAssign = attr.parse_args()?; + if meta.left.to_token_stream().to_string() == "index" { + found = true; + break; + } + } + } + + if !found { + Err(syn::Error::new( + span, + "Missing explicit `#[codec(index = ..)]` attribute", + )) + } else { + Ok(()) + } } /// Used to inject arguments into the inner `#[dynamic_pallet_params(..)]` attribute. @@ -150,110 +159,125 @@ fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { /// This allows the outer `#[dynamic_params(..)]` attribute to specify some arguments that don't /// need to be repeated every time. struct MacroInjectArgs { - runtime_params: syn::Ident, - params_pallet: syn::Type, + runtime_params: syn::Ident, + params_pallet: syn::Type, } impl VisitMut for MacroInjectArgs { - fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { - // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. - let attr = item.attrs.iter_mut().find(|attr| attr.path().is_ident("dynamic_pallet_params")); - - if let Some(attr) = attr { - match &attr.meta { - syn::Meta::Path(path) => - assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params"), - _ => (), - } - - let runtime_params = &self.runtime_params; - let params_pallet = &self.params_pallet; - - attr.meta = syn::parse2::(quote! { - dynamic_pallet_params(#runtime_params, #params_pallet) - }) - .unwrap() - .into(); - } - - visit_mut::visit_item_mod_mut(self, item); - } + fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { + // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. + let attr = item + .attrs + .iter_mut() + .find(|attr| attr.path().is_ident("dynamic_pallet_params")); + + if let Some(attr) = attr { + match &attr.meta { + syn::Meta::Path(path) => { + assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params") + } + _ => (), + } + + let runtime_params = &self.runtime_params; + let params_pallet = &self.params_pallet; + + attr.meta = syn::parse2::(quote! { + dynamic_pallet_params(#runtime_params, #params_pallet) + }) + .unwrap() + .into(); + } + + visit_mut::visit_item_mod_mut(self, item); + } } /// The helper attribute of a `#[dynamic_pallet_params(runtime_params, params_pallet)]` /// attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttr { - inner_mod: syn::ItemMod, - meta: DynamicPalletParamAttrMeta, + inner_mod: syn::ItemMod, + meta: DynamicPalletParamAttrMeta, } /// The inner meta of a `#[dynamic_pallet_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttrMeta { - runtime_params: syn::Ident, - _comma: Token![,], - parameter_pallet: syn::Type, + runtime_params: syn::Ident, + _comma: Token![,], + parameter_pallet: syn::Type, } impl DynamicPalletParamAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - Ok(Self { inner_mod: parse2(item)?, meta: parse2(attr)? }) - } - - pub fn statics(&self) -> Vec { - self.inner_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Static(s) => Some(s), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + Ok(Self { + inner_mod: parse2(item)?, + meta: parse2(attr)?, + }) + } + + pub fn statics(&self) -> Vec { + self.inner_mod + .content + .as_ref() + .map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Static(s) => Some(s), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicPalletParamAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (params_mod, parameter_pallet, runtime_params) = - (&self.inner_mod, &self.meta.parameter_pallet, &self.meta.runtime_params); - - let aggregate_name = - syn::Ident::new(¶ms_mod.ident.to_string().to_class_case(), params_mod.ident.span()); - let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); - let statics = self.statics(); - - let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - ) = Default::default(); - - for s in statics.iter() { - if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { - tokens.extend(err.into_compile_error()); - return - } - - key_names.push(&s.ident); - key_values.push(format_ident!("{}Value", &s.ident)); - defaults.push(&s.expr); - attrs.push(&s.attrs); - value_types.push(&s.ty); - } - - let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); - let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); - let runtime_key_ident = format_ident!("{}Key", runtime_params); - let runtime_value_ident = format_ident!("{}Value", runtime_params); - - tokens.extend(quote! { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (params_mod, parameter_pallet, runtime_params) = ( + &self.inner_mod, + &self.meta.parameter_pallet, + &self.meta.runtime_params, + ); + + let aggregate_name = syn::Ident::new( + ¶ms_mod.ident.to_string().to_class_case(), + params_mod.ident.span(), + ); + let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); + let statics = self.statics(); + + let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = Default::default(); + + for s in statics.iter() { + if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { + tokens.extend(err.into_compile_error()); + return; + } + + key_names.push(&s.ident); + key_values.push(format_ident!("{}Value", &s.ident)); + defaults.push(&s.expr); + attrs.push(&s.attrs); + value_types.push(&s.ty); + } + + let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); + let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); + let runtime_key_ident = format_ident!("{}Key", runtime_params); + let runtime_value_ident = format_ident!("{}Value", runtime_params); + + tokens.extend(quote! { pub mod #mod_name { use super::*; @@ -426,44 +450,44 @@ impl ToTokens for DynamicPalletParamAttr { )* } }); - } + } } #[derive(derive_syn_parse::Parse)] pub struct DynamicParamAggregatedEnum { - aggregated_enum: syn::ItemEnum, + aggregated_enum: syn::ItemEnum, } impl ToTokens for DynamicParamAggregatedEnum { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let params_enum = &self.aggregated_enum; - let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); - - let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = - Default::default(); - let mut attributes = Vec::new(); - for (i, variant) in params_enum.variants.iter().enumerate() { - indices.push(i); - param_names.push(&variant.ident); - attributes.push(&variant.attrs); - - param_types.push(match &variant.fields { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let params_enum = &self.aggregated_enum; + let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); + + let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = + Default::default(); + let mut attributes = Vec::new(); + for (i, variant) in params_enum.variants.iter().enumerate() { + indices.push(i); + param_names.push(&variant.ident); + attributes.push(&variant.attrs); + + param_types.push(match &variant.fields { syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, _ => { *tokens = quote! { compile_error!("Only unnamed enum variants with one inner item are supported") }; return }, }); - } + } - let params_key_ident = format_ident!("{}Key", params_enum.ident); - let params_value_ident = format_ident!("{}Value", params_enum.ident); + let params_key_ident = format_ident!("{}Key", params_enum.ident); + let params_value_ident = format_ident!("{}Value", params_enum.ident); - tokens.extend(quote! { + tokens.extend(quote! { #[doc(hidden)] #[derive( Clone, @@ -554,10 +578,10 @@ impl ToTokens for DynamicParamAggregatedEnum { } )* }); - } + } } /// Get access to the current crate and convert the error to a compile error. fn crate_access() -> core::result::Result { - generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) + generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) } diff --git a/support/procedural-fork/src/key_prefix.rs b/support/procedural-fork/src/key_prefix.rs index 7f1ab6866..aea60ce3b 100644 --- a/support/procedural-fork/src/key_prefix.rs +++ b/support/procedural-fork/src/key_prefix.rs @@ -23,82 +23,84 @@ use syn::{Ident, Result}; const MAX_IDENTS: usize = 18; pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(syn::Error::new(Span::call_site(), "No arguments expected")) - } - - let mut all_trait_impls = TokenStream::new(); - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - for i in 2..=MAX_IDENTS { - let current_tuple = (0..i) - .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) - .collect::>(); - - for prefix_count in 1..i { - let (prefixes, suffixes) = current_tuple.split_at(prefix_count); - - let hashers = current_tuple - .iter() - .map(|ident| format_ident!("Hasher{}", ident)) - .collect::>(); - let kargs = - prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); - let partial_keygen = generate_keygen(prefixes); - let suffix_keygen = generate_keygen(suffixes); - let suffix_tuple = generate_tuple(suffixes); - - let trait_impls = quote! { - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: StorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - type Suffix = #suffix_tuple; - - fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { - <#partial_keygen>::final_key(prefix) - } - } - - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: ReversibleStorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - fn decode_partial_key(key_material: &[u8]) -> Result< - Self::Suffix, - #frame_support::__private::codec::Error, - > { - <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) - } - } - }; - - all_trait_impls.extend(trait_impls); - } - } - - Ok(all_trait_impls) + if !input.is_empty() { + return Err(syn::Error::new(Span::call_site(), "No arguments expected")); + } + + let mut all_trait_impls = TokenStream::new(); + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + for i in 2..=MAX_IDENTS { + let current_tuple = (0..i) + .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) + .collect::>(); + + for prefix_count in 1..i { + let (prefixes, suffixes) = current_tuple.split_at(prefix_count); + + let hashers = current_tuple + .iter() + .map(|ident| format_ident!("Hasher{}", ident)) + .collect::>(); + let kargs = prefixes + .iter() + .map(|ident| format_ident!("KArg{}", ident)) + .collect::>(); + let partial_keygen = generate_keygen(prefixes); + let suffix_keygen = generate_keygen(suffixes); + let suffix_tuple = generate_tuple(suffixes); + + let trait_impls = quote! { + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: StorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + type Suffix = #suffix_tuple; + + fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { + <#partial_keygen>::final_key(prefix) + } + } + + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: ReversibleStorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + fn decode_partial_key(key_material: &[u8]) -> Result< + Self::Suffix, + #frame_support::__private::codec::Error, + > { + <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) + } + } + }; + + all_trait_impls.extend(trait_impls); + } + } + + Ok(all_trait_impls) } fn generate_tuple(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - idents[0].to_token_stream() - } else { - quote!((#(#idents),*)) - } + if idents.len() == 1 { + idents[0].to_token_stream() + } else { + quote!((#(#idents),*)) + } } fn generate_keygen(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - let key = &idents[0]; - let hasher = format_ident!("Hasher{}", key); + if idents.len() == 1 { + let key = &idents[0]; + let hasher = format_ident!("Hasher{}", key); - quote!(Key<#hasher, #key>) - } else { - let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); + quote!(Key<#hasher, #key>) + } else { + let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); - quote!((#(Key<#hashers, #idents>),*)) - } + quote!((#(Key<#hashers, #idents>),*)) + } } diff --git a/support/procedural-fork/src/match_and_insert.rs b/support/procedural-fork/src/match_and_insert.rs index aa9cc56d7..a80b6e95f 100644 --- a/support/procedural-fork/src/match_and_insert.rs +++ b/support/procedural-fork/src/match_and_insert.rs @@ -22,138 +22,152 @@ use std::iter::once; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(target); - syn::custom_keyword!(pattern); - syn::custom_keyword!(tokens); + syn::custom_keyword!(target); + syn::custom_keyword!(pattern); + syn::custom_keyword!(tokens); } pub fn match_and_insert(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let MatchAndInsertDef { pattern, tokens, target } = - syn::parse_macro_input!(input as MatchAndInsertDef); - - match expand_in_stream(&pattern, &mut Some(tokens), target) { - Ok(stream) => stream.into(), - Err(err) => err.to_compile_error().into(), - } + let MatchAndInsertDef { + pattern, + tokens, + target, + } = syn::parse_macro_input!(input as MatchAndInsertDef); + + match expand_in_stream(&pattern, &mut Some(tokens), target) { + Ok(stream) => stream.into(), + Err(err) => err.to_compile_error().into(), + } } struct MatchAndInsertDef { - // Token stream to search and insert tokens into. - target: TokenStream, - // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal - // (i.e. contains only Punct or Ident), and not being empty. - pattern: Vec, - // Token stream to insert after the match pattern. - tokens: TokenStream, + // Token stream to search and insert tokens into. + target: TokenStream, + // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal + // (i.e. contains only Punct or Ident), and not being empty. + pattern: Vec, + // Token stream to insert after the match pattern. + tokens: TokenStream, } impl syn::parse::Parse for MatchAndInsertDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut target; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); - let target = target.parse()?; - - let mut pattern; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); - let pattern = pattern.parse::()?.into_iter().collect::>(); - - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { - return Err(syn::Error::new(t.span(), "Unexpected group token tree")) - } - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { - return Err(syn::Error::new(t.span(), "Unexpected literal token tree")) - } - - if pattern.is_empty() { - return Err(syn::Error::new(Span::call_site(), "empty match pattern is invalid")) - } - - let mut tokens; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); - let tokens = tokens.parse()?; - - Ok(Self { tokens, pattern, target }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut target; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); + let target = target.parse()?; + + let mut pattern; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); + let pattern = pattern + .parse::()? + .into_iter() + .collect::>(); + + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { + return Err(syn::Error::new(t.span(), "Unexpected group token tree")); + } + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { + return Err(syn::Error::new(t.span(), "Unexpected literal token tree")); + } + + if pattern.is_empty() { + return Err(syn::Error::new( + Span::call_site(), + "empty match pattern is invalid", + )); + } + + let mut tokens; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); + let tokens = tokens.parse()?; + + Ok(Self { + tokens, + pattern, + target, + }) + } } // Insert `tokens` after the first matching `pattern`. // `tokens` must be some (Option is used for internal simplification). // `pattern` must not be empty and should only contain Ident or Punct. fn expand_in_stream( - pattern: &[TokenTree], - tokens: &mut Option, - stream: TokenStream, + pattern: &[TokenTree], + tokens: &mut Option, + stream: TokenStream, ) -> syn::Result { - assert!( - tokens.is_some(), - "`tokens` must be some, Option is used because `tokens` is used only once" - ); - assert!( - !pattern.is_empty(), - "`pattern` must not be empty, otherwise there is nothing to match against" - ); - - let stream_span = stream.span(); - let mut stream = stream.into_iter(); - let mut extended = TokenStream::new(); - let mut match_cursor = 0; - - while let Some(token) = stream.next() { - match token { - TokenTree::Group(group) => { - match_cursor = 0; - let group_stream = group.stream(); - match expand_in_stream(pattern, tokens, group_stream) { - Ok(s) => { - extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); - extended.extend(stream); - return Ok(extended) - }, - Err(_) => { - extended.extend(once(TokenTree::Group(group))); - }, - } - }, - other => { - advance_match_cursor(&other, pattern, &mut match_cursor); - - extended.extend(once(other)); - - if match_cursor == pattern.len() { - extended - .extend(once(tokens.take().expect("tokens is used to replace only once"))); - extended.extend(stream); - return Ok(extended) - } - }, - } - } - // if we reach this point, it means the stream is empty and we haven't found a matching pattern - let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); - Err(syn::Error::new(stream_span, msg)) + assert!( + tokens.is_some(), + "`tokens` must be some, Option is used because `tokens` is used only once" + ); + assert!( + !pattern.is_empty(), + "`pattern` must not be empty, otherwise there is nothing to match against" + ); + + let stream_span = stream.span(); + let mut stream = stream.into_iter(); + let mut extended = TokenStream::new(); + let mut match_cursor = 0; + + while let Some(token) = stream.next() { + match token { + TokenTree::Group(group) => { + match_cursor = 0; + let group_stream = group.stream(); + match expand_in_stream(pattern, tokens, group_stream) { + Ok(s) => { + extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); + extended.extend(stream); + return Ok(extended); + } + Err(_) => { + extended.extend(once(TokenTree::Group(group))); + } + } + } + other => { + advance_match_cursor(&other, pattern, &mut match_cursor); + + extended.extend(once(other)); + + if match_cursor == pattern.len() { + extended.extend(once( + tokens.take().expect("tokens is used to replace only once"), + )); + extended.extend(stream); + return Ok(extended); + } + } + } + } + // if we reach this point, it means the stream is empty and we haven't found a matching pattern + let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); + Err(syn::Error::new(stream_span, msg)) } fn advance_match_cursor(other: &TokenTree, pattern: &[TokenTree], match_cursor: &mut usize) { - use TokenTree::{Ident, Punct}; - - let does_match_other_pattern = match (other, &pattern[*match_cursor]) { - (Ident(i1), Ident(i2)) => i1 == i2, - (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), - _ => false, - }; - - if does_match_other_pattern { - *match_cursor += 1; - } else { - *match_cursor = 0; - } + use TokenTree::{Ident, Punct}; + + let does_match_other_pattern = match (other, &pattern[*match_cursor]) { + (Ident(i1), Ident(i2)) => i1 == i2, + (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), + _ => false, + }; + + if does_match_other_pattern { + *match_cursor += 1; + } else { + *match_cursor = 0; + } } diff --git a/support/procedural-fork/src/no_bound/clone.rs b/support/procedural-fork/src/no_bound/clone.rs index 346bf450f..17039bdc8 100644 --- a/support/procedural-fork/src/no_bound/clone.rs +++ b/support/procedural-fork/src/no_bound/clone.rs @@ -19,89 +19,93 @@ use syn::spanned::Spanned; /// Derive Clone but do not bound any generic. pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named.named.iter().map(|i| &i.ident).map(|i| { - quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(&self.#i) - ) - }); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self { #( #fields, )* } ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = - unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(&self.#i) - ) - }); + quote::quote!( Self { #( #fields, )* } ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self ( #( #fields, )* ) ) - }, - syn::Fields::Unit => { - quote::quote!(Self) - }, - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let cloned = captured.clone().map(|i| { - ::quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} - ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let cloned = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) - ) - }, - syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), - } - }); + quote::quote!( Self ( #( #fields, )* ) ) + } + syn::Fields::Unit => { + quote::quote!(Self) + } + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let cloned = captured.clone().map(|i| { + ::quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + ) + } + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + ) + } + syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), + } + }); - quote::quote!(match self { - #( #variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(CloneNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!(match self { + #( #variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(CloneNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { - fn clone(&self) -> Self { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { + fn clone(&self) -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/debug.rs b/support/procedural-fork/src/no_bound/debug.rs index a1b3f4f0d..8034bb5ec 100644 --- a/support/procedural-fork/src/no_bound/debug.rs +++ b/support/procedural-fork/src/no_bound/debug.rs @@ -19,103 +19,103 @@ use syn::spanned::Spanned; /// Derive Debug but do not bound any generics. pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let input_ident = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let input_ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = - named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), - ); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), + ); - quote::quote!( - fmt.debug_struct(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); + quote::quote!( + fmt.debug_struct(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); - quote::quote!( - fmt.debug_tuple(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - }, - syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - let full_variant_str = format!("{}::{}", input_ident, ident); - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let debugged = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - .field(stringify!(#i), &#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => { - fmt.debug_struct(#full_variant_str) - #( #debugged )* - .finish() - } - ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let debugged = captured - .clone() - .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => { - fmt.debug_tuple(#full_variant_str) - #( #debugged )* - .finish() - } - ) - }, - syn::Fields::Unit => quote::quote!( - Self::#ident => fmt.write_str(#full_variant_str) - ), - } - }); + quote::quote!( + fmt.debug_tuple(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + } + syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + let full_variant_str = format!("{}::{}", input_ident, ident); + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let debugged = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + .field(stringify!(#i), &#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => { + fmt.debug_struct(#full_variant_str) + #( #debugged )* + .finish() + } + ) + } + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let debugged = captured + .clone() + .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => { + fmt.debug_tuple(#full_variant_str) + #( #debugged )* + .finish() + } + ) + } + syn::Fields::Unit => quote::quote!( + Self::#ident => fmt.write_str(#full_variant_str) + ), + } + }); - quote::quote!(match *self { - #( #variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(DebugNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!(match *self { + #( #variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(DebugNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index 0524247d2..1c0d90531 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -21,51 +21,57 @@ use syn::{spanned::Spanned, Data, DeriveInput, Fields}; /// Derive Default but do not bound any generic. pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as DeriveInput); - - let name = &input.ident; - - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - - let impl_ = match input.data { - Data::Struct(struct_) => match struct_.fields { - Fields::Named(named) => { - let fields = named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span() => - #ident: ::core::default::Default::default() - } - }); - - quote!(Self { #( #fields, )* }) - }, - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(Self( #( #fields, )* )) - }, - Fields::Unit => { - quote!(Self) - }, - }, - Data::Enum(enum_) => { - if enum_.variants.is_empty() { - return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") - .to_compile_error() - .into() - } - - // all #[default] attrs with the variant they're on; i.e. a var - let default_variants = enum_ - .variants - .into_iter() - .filter(|variant| variant.attrs.iter().any(|attr| attr.path().is_ident("default"))) - .collect::>(); - - match &*default_variants { + let input = syn::parse_macro_input!(input as DeriveInput); + + let name = &input.ident; + + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = + match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + } + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + } + Fields::Unit => { + quote!(Self) + } + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into(); + } + + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| { + variant + .attrs + .iter() + .any(|attr| attr.path().is_ident("default")) + }) + .collect::>(); + + match &*default_variants { [] => return syn::Error::new( name.clone().span(), "no default declared, make a variant default by placing `#[default]` above it", @@ -137,25 +143,26 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To return err.into_compile_error().into() }, } - }, - Data::Union(union_) => - return syn::Error::new_spanned( - union_.union_token, - "Union type not supported by `derive(DefaultNoBound)`", - ) - .to_compile_error() - .into(), - }; - - quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { - fn default() -> Self { - #impl_ - } - } - }; - ) - .into() + } + Data::Union(union_) => { + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into() + } + }; + + quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { + fn default() -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/ord.rs b/support/procedural-fork/src/no_bound/ord.rs index b24d27c04..20f30eb9d 100644 --- a/support/procedural-fork/src/no_bound/ord.rs +++ b/support/procedural-fork/src/no_bound/ord.rs @@ -19,57 +19,57 @@ use syn::spanned::Spanned; /// Derive Ord but do not bound any generic. pub fn derive_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - }, - syn::Fields::Unit => { - quote::quote!(core::cmp::Ordering::Equal) - }, - }, - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + } + syn::Fields::Unit => { + quote::quote!(core::cmp::Ordering::Equal) + } + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_eq.rs b/support/procedural-fork/src/no_bound/partial_eq.rs index a1be71a96..8833f6e5f 100644 --- a/support/procedural-fork/src/no_bound/partial_eq.rs +++ b/support/procedural-fork/src/no_bound/partial_eq.rs @@ -19,119 +19,119 @@ use syn::spanned::Spanned; /// Derive PartialEq but do not bound any generic. pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + quote::quote!( true #( && #fields )* ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - }, - syn::Fields::Unit => { - quote::quote!(true) - }, - }, - syn::Data::Enum(enum_) => { - let variants = - enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let names = named.named.iter().map(|i| &i.ident); - let other_names = names.clone().enumerate().map(|(n, ident)| { - syn::Ident::new(&format!("_{}", n), ident.span()) - }); + quote::quote!( true #( && #fields )* ) + } + syn::Fields::Unit => { + quote::quote!(true) + } + }, + syn::Data::Enum(enum_) => { + let variants = + enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let names = named.named.iter().map(|i| &i.ident); + let other_names = names.clone().enumerate().map(|(n, ident)| { + syn::Ident::new(&format!("_{}", n), ident.span()) + }); - let capture = names.clone(); - let other_capture = names - .clone() - .zip(other_names.clone()) - .map(|(i, other_i)| quote::quote!(#i: #other_i)); - let eq = names.zip(other_names).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident { #( #capture, )* }, - Self::#ident { #( #other_capture, )* }, - ) => true #( && #eq )* - ) - }, - syn::Fields::Unnamed(unnamed) => { - let names = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let other_names = - unnamed.unnamed.iter().enumerate().map(|(i, f)| { - syn::Ident::new(&format!("_{}_other", i), f.span()) - }); - let eq = names.clone().zip(other_names.clone()).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident ( #( #names, )* ), - Self::#ident ( #( #other_names, )* ), - ) => true #( && #eq )* - ) - }, - syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), - } - }); + let capture = names.clone(); + let other_capture = names + .clone() + .zip(other_names.clone()) + .map(|(i, other_i)| quote::quote!(#i: #other_i)); + let eq = names.zip(other_names).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident { #( #capture, )* }, + Self::#ident { #( #other_capture, )* }, + ) => true #( && #eq )* + ) + } + syn::Fields::Unnamed(unnamed) => { + let names = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let other_names = + unnamed.unnamed.iter().enumerate().map(|(i, f)| { + syn::Ident::new(&format!("_{}_other", i), f.span()) + }); + let eq = names.clone().zip(other_names.clone()).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident ( #( #names, )* ), + Self::#ident ( #( #other_names, )* ), + ) => true #( && #eq )* + ) + } + syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), + } + }); - let mut different_variants = vec![]; - for (i, i_variant) in enum_.variants.iter().enumerate() { - for (j, j_variant) in enum_.variants.iter().enumerate() { - if i != j { - let i_ident = &i_variant.ident; - let j_ident = &j_variant.ident; - different_variants.push(quote::quote!( - (Self::#i_ident { .. }, Self::#j_ident { .. }) => false - )) - } - } - } + let mut different_variants = vec![]; + for (i, i_variant) in enum_.variants.iter().enumerate() { + for (j, j_variant) in enum_.variants.iter().enumerate() { + if i != j { + let i_ident = &i_variant.ident; + let j_ident = &j_variant.ident; + different_variants.push(quote::quote!( + (Self::#i_ident { .. }, Self::#j_ident { .. }) => false + )) + } + } + } - quote::quote!( match (self, other) { - #( #variants, )* - #( #different_variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialEqNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( match (self, other) { + #( #variants, )* + #( #different_variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialEqNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { - fn eq(&self, other: &Self) -> bool { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { + fn eq(&self, other: &Self) -> bool { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_ord.rs b/support/procedural-fork/src/no_bound/partial_ord.rs index 86aa42be9..c73199d4e 100644 --- a/support/procedural-fork/src/no_bound/partial_ord.rs +++ b/support/procedural-fork/src/no_bound/partial_ord.rs @@ -19,71 +19,72 @@ use syn::spanned::Spanned; /// Derive PartialOrd but do not bound any generic. pub fn derive_partial_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => - match struct_.fields { - syn::Fields::Named(named) => { - let fields = - named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), - ); + let impl_ = + match input.data { + syn::Data::Struct(struct_) => { + match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), + ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map( |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - }, - syn::Fields::Unit => { - quote::quote!(Some(core::cmp::Ordering::Equal)) - }, - }, - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + } + syn::Fields::Unit => { + quote::quote!(Some(core::cmp::Ordering::Equal)) + } + } + } + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { - fn partial_cmp(&self, other: &Self) -> Option { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { + fn partial_cmp(&self, other: &Self) -> Option { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/pallet/expand/call.rs b/support/procedural-fork/src/pallet/expand/call.rs index f395872c8..a39e81fd1 100644 --- a/support/procedural-fork/src/pallet/expand/call.rs +++ b/support/procedural-fork/src/pallet/expand/call.rs @@ -16,12 +16,12 @@ // limitations under the License. use crate::{ - pallet::{ - expand::warnings::{weight_constant_warning, weight_witness_warning}, - parse::call::CallWeightDef, - Def, - }, - COUNTER, + pallet::{ + expand::warnings::{weight_constant_warning, weight_witness_warning}, + parse::call::CallWeightDef, + Def, + }, + COUNTER, }; use proc_macro2::TokenStream as TokenStream2; use proc_macro_warning::Warning; @@ -32,45 +32,56 @@ use syn::spanned::Spanned; /// * Generate enum call and implement various trait on it. /// * Implement Callable and call_function on `Pallet` pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { - let (span, where_clause, methods, docs) = match def.call.as_ref() { - Some(call) => { - let span = call.attr_span; - let where_clause = call.where_clause.clone(); - let methods = call.methods.clone(); - let docs = call.docs.clone(); - - (span, where_clause, methods, docs) - }, - None => (def.item.span(), def.config.where_clause.clone(), Vec::new(), Vec::new()), - }; - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(span); - let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); - let type_use_gen = &def.type_use_generics(span); - let call_ident = syn::Ident::new("Call", span); - let pallet_ident = &def.pallet_struct.pallet; - - let fn_name = methods.iter().map(|method| &method.name).collect::>(); - let call_index = methods.iter().map(|method| method.call_index).collect::>(); - let new_call_variant_fn_name = fn_name - .iter() - .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) - .collect::>(); - - let new_call_variant_doc = fn_name - .iter() - .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) - .collect::>(); - - let mut call_index_warnings = Vec::new(); - // Emit a warning for each call that is missing `call_index` when not in dev-mode. - for method in &methods { - if method.explicit_call_index || def.dev_mode { - continue - } - - let warning = Warning::new_deprecated("ImplicitCallIndex") + let (span, where_clause, methods, docs) = match def.call.as_ref() { + Some(call) => { + let span = call.attr_span; + let where_clause = call.where_clause.clone(); + let methods = call.methods.clone(); + let docs = call.docs.clone(); + + (span, where_clause, methods, docs) + } + None => ( + def.item.span(), + def.config.where_clause.clone(), + Vec::new(), + Vec::new(), + ), + }; + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(span); + let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); + let type_use_gen = &def.type_use_generics(span); + let call_ident = syn::Ident::new("Call", span); + let pallet_ident = &def.pallet_struct.pallet; + + let fn_name = methods + .iter() + .map(|method| &method.name) + .collect::>(); + let call_index = methods + .iter() + .map(|method| method.call_index) + .collect::>(); + let new_call_variant_fn_name = fn_name + .iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name + .iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); + + let mut call_index_warnings = Vec::new(); + // Emit a warning for each call that is missing `call_index` when not in dev-mode. + for method in &methods { + if method.explicit_call_index || def.dev_mode { + continue; + } + + let warning = Warning::new_deprecated("ImplicitCallIndex") .index(call_index_warnings.len()) .old("use implicit call indices") .new("ensure that all calls have a `pallet::call_index` attribute or put the pallet into `dev` mode") @@ -80,373 +91,408 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ]) .span(method.name.span()) .build_or_panic(); - call_index_warnings.push(warning); - } - - let mut fn_weight = Vec::::new(); - let mut weight_warnings = Vec::new(); - for method in &methods { - match &method.weight { - CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), - CallWeightDef::Immediate(e) => { - weight_constant_warning(e, def.dev_mode, &mut weight_warnings); - weight_witness_warning(method, def.dev_mode, &mut weight_warnings); - - fn_weight.push(e.into_token_stream()); - }, - CallWeightDef::Inherited => { - let pallet_weight = def - .call - .as_ref() - .expect("we have methods; we have calls; qed") - .inherited_call_weight - .as_ref() - .expect("the parser prevents this"); - - // Expand `<::WeightInfo>::call_name()`. - let t = &pallet_weight.typename; - let n = &method.name; - fn_weight.push(quote!({ < #t > :: #n () })); - }, - } - } - debug_assert_eq!(fn_weight.len(), methods.len()); - - let fn_doc = methods.iter().map(|method| &method.docs).collect::>(); - - let args_name = methods - .iter() - .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) - .collect::>(); - - let args_name_stripped = methods - .iter() - .map(|method| { - method - .args - .iter() - .map(|(_, name, _)| { - syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) - }) - .collect::>() - }) - .collect::>(); - - let make_args_name_pattern = |ref_tok| { - args_name - .iter() - .zip(args_name_stripped.iter()) - .map(|(args_name, args_name_stripped)| { - args_name - .iter() - .zip(args_name_stripped) - .map(|(args_name, args_name_stripped)| { - if args_name == args_name_stripped { - quote::quote!( #ref_tok #args_name ) - } else { - quote::quote!( #args_name_stripped: #ref_tok #args_name ) - } - }) - .collect::>() - }) - .collect::>() - }; - - let args_name_pattern = make_args_name_pattern(None); - let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); - - let args_type = methods - .iter() - .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) - .collect::>(); - - let args_compact_attr = methods.iter().map(|method| { - method - .args - .iter() - .map(|(is_compact, _, type_)| { - if *is_compact { - quote::quote_spanned!(type_.span() => #[codec(compact)] ) - } else { - quote::quote!() - } - }) - .collect::>() - }); - - let default_docs = - [syn::parse_quote!(r"Contains a variant per dispatchable extrinsic that this pallet has.")]; - let docs = if docs.is_empty() { &default_docs[..] } else { &docs[..] }; - - let maybe_compile_error = if def.call.is_none() { - quote::quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ - construct_runtime?", - )); - } - } else { - proc_macro2::TokenStream::new() - }; - - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // Wrap all calls inside of storage layers - if let Some(syn::Item::Impl(item_impl)) = def - .call - .as_ref() - .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) - { - item_impl.items.iter_mut().for_each(|i| { - if let syn::ImplItem::Fn(method) = i { - let block = &method.block; - method.block = syn::parse_quote! {{ - // We execute all dispatchable in a new storage layer, allowing them - // to return an error at any point, and undoing any storage changes. - #frame_support::storage::with_storage_layer(|| #block) - }}; - } - }); - } - - // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings - let maybe_allow_attrs = methods - .iter() - .map(|method| { - method - .attrs - .iter() - .find(|attr| attr.path().is_ident("allow")) - .map_or(proc_macro2::TokenStream::new(), |attr| attr.to_token_stream()) - }) - .collect::>(); - - let cfg_attrs = methods - .iter() - .map(|method| { - let attrs = - method.cfg_attrs.iter().map(|attr| attr.to_token_stream()).collect::>(); - quote::quote!( #( #attrs )* ) - }) - .collect::>(); - - let feeless_check = methods.iter().map(|method| &method.feeless_check).collect::>(); - let feeless_check_result = - feeless_check.iter().zip(args_name.iter()).map(|(feeless_check, arg_name)| { - if let Some(feeless_check) = feeless_check { - quote::quote!(#feeless_check(origin, #( #arg_name, )*)) - } else { - quote::quote!(false) - } - }); - - quote::quote_spanned!(span => - #[doc(hidden)] - mod warnings { - #( - #call_index_warnings - )* - #( - #weight_warnings - )* - } - - #[allow(unused_imports)] - #[doc(hidden)] - pub mod __substrate_call_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - }; - } - - #[doc(hidden)] - pub use #macro_ident as is_call_part_defined; - } - - #( #[doc = #docs] )* - #[derive( - #frame_support::RuntimeDebugNoBound, - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - #[allow(non_camel_case_types)] - pub enum #call_ident<#type_decl_bounded_gen> #where_clause { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#type_use_gen,)>, - #frame_support::Never, - ), - #( - #cfg_attrs - #( #[doc = #fn_doc] )* - #[codec(index = #call_index)] - #fn_name { - #( - #[allow(missing_docs)] - #args_compact_attr #args_name_stripped: #args_type - ),* - }, - )* - } - - impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { - #( - #cfg_attrs - #[doc = #new_call_variant_doc] - pub fn #new_call_variant_fn_name( - #( #args_name_stripped: #args_type ),* - ) -> Self { - Self::#fn_name { - #( #args_name_stripped ),* - } - } - )* - } - - impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo - for #call_ident<#type_use_gen> - #where_clause - { - fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - let __pallet_base_weight = #fn_weight; - - let __pallet_weight = < - dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> - >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_class = < - dyn #frame_support::dispatch::ClassifyDispatch< - ( #( & #args_type, )* ) - > - >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_pays_fee = < - dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> - >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); - - #frame_support::dispatch::DispatchInfo { - weight: __pallet_weight, - class: __pallet_class, - pays_fee: __pallet_pays_fee, - } - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> - #where_clause - { - type Origin = #frame_system::pallet_prelude::OriginFor; - #[allow(unused_variables)] - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - #feeless_check_result - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_name(&self) -> &'static str { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_names() -> &'static [&'static str] { - &[ #( #cfg_attrs stringify!(#fn_name), )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_index(&self) -> u8 { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_indices() -> &'static [u8] { - &[ #( #cfg_attrs #call_index, )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable - for #call_ident<#type_use_gen> - #where_clause - { - type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; - fn dispatch_bypass_filter( - self, - origin: Self::RuntimeOrigin - ) -> #frame_support::dispatch::DispatchResultWithPostInfo { - #frame_support::dispatch_context::run_in_context(|| { - match self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern, )* } => { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) - ); - #maybe_allow_attrs - <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) - .map(Into::into).map_err(Into::into) - }, - )* - Self::__Ignore(_, _) => { - let _ = origin; // Use origin for empty Call enum - unreachable!("__PhantomItem cannot be used."); - }, - } - }) - } - } - - impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> - #where_clause - { - type RuntimeCall = #call_ident<#type_use_gen>; - } - - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { - #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() - } - } - ) + call_index_warnings.push(warning); + } + + let mut fn_weight = Vec::::new(); + let mut weight_warnings = Vec::new(); + for method in &methods { + match &method.weight { + CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), + CallWeightDef::Immediate(e) => { + weight_constant_warning(e, def.dev_mode, &mut weight_warnings); + weight_witness_warning(method, def.dev_mode, &mut weight_warnings); + + fn_weight.push(e.into_token_stream()); + } + CallWeightDef::Inherited => { + let pallet_weight = def + .call + .as_ref() + .expect("we have methods; we have calls; qed") + .inherited_call_weight + .as_ref() + .expect("the parser prevents this"); + + // Expand `<::WeightInfo>::call_name()`. + let t = &pallet_weight.typename; + let n = &method.name; + fn_weight.push(quote!({ < #t > :: #n () })); + } + } + } + debug_assert_eq!(fn_weight.len(), methods.len()); + + let fn_doc = methods + .iter() + .map(|method| &method.docs) + .collect::>(); + + let args_name = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| name.clone()) + .collect::>() + }) + .collect::>(); + + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + + let args_type = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, _, type_)| type_.clone()) + .collect::>() + }) + .collect::>(); + + let args_compact_attr = methods.iter().map(|method| { + method + .args + .iter() + .map(|(is_compact, _, type_)| { + if *is_compact { + quote::quote_spanned!(type_.span() => #[codec(compact)] ) + } else { + quote::quote!() + } + }) + .collect::>() + }); + + let default_docs = [syn::parse_quote!( + r"Contains a variant per dispatchable extrinsic that this pallet has." + )]; + let docs = if docs.is_empty() { + &default_docs[..] + } else { + &docs[..] + }; + + let maybe_compile_error = if def.call.is_none() { + quote::quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ + construct_runtime?", + )); + } + } else { + proc_macro2::TokenStream::new() + }; + + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // Wrap all calls inside of storage layers + if let Some(syn::Item::Impl(item_impl)) = def + .call + .as_ref() + .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) + { + item_impl.items.iter_mut().for_each(|i| { + if let syn::ImplItem::Fn(method) = i { + let block = &method.block; + method.block = syn::parse_quote! {{ + // We execute all dispatchable in a new storage layer, allowing them + // to return an error at any point, and undoing any storage changes. + #frame_support::storage::with_storage_layer(|| #block) + }}; + } + }); + } + + // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings + let maybe_allow_attrs = methods + .iter() + .map(|method| { + method + .attrs + .iter() + .find(|attr| attr.path().is_ident("allow")) + .map_or(proc_macro2::TokenStream::new(), |attr| { + attr.to_token_stream() + }) + }) + .collect::>(); + + let cfg_attrs = methods + .iter() + .map(|method| { + let attrs = method + .cfg_attrs + .iter() + .map(|attr| attr.to_token_stream()) + .collect::>(); + quote::quote!( #( #attrs )* ) + }) + .collect::>(); + + let feeless_check = methods + .iter() + .map(|method| &method.feeless_check) + .collect::>(); + let feeless_check_result = + feeless_check + .iter() + .zip(args_name.iter()) + .map(|(feeless_check, arg_name)| { + if let Some(feeless_check) = feeless_check { + quote::quote!(#feeless_check(origin, #( #arg_name, )*)) + } else { + quote::quote!(false) + } + }); + + quote::quote_spanned!(span => + #[doc(hidden)] + mod warnings { + #( + #call_index_warnings + )* + #( + #weight_warnings + )* + } + + #[allow(unused_imports)] + #[doc(hidden)] + pub mod __substrate_call_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + }; + } + + #[doc(hidden)] + pub use #macro_ident as is_call_part_defined; + } + + #( #[doc = #docs] )* + #[derive( + #frame_support::RuntimeDebugNoBound, + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + #[allow(non_camel_case_types)] + pub enum #call_ident<#type_decl_bounded_gen> #where_clause { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#type_use_gen,)>, + #frame_support::Never, + ), + #( + #cfg_attrs + #( #[doc = #fn_doc] )* + #[codec(index = #call_index)] + #fn_name { + #( + #[allow(missing_docs)] + #args_compact_attr #args_name_stripped: #args_type + ),* + }, + )* + } + + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #cfg_attrs + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name_stripped: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name_stripped ),* + } + } + )* + } + + impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo + for #call_ident<#type_use_gen> + #where_clause + { + fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + let __pallet_base_weight = #fn_weight; + + let __pallet_weight = < + dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> + >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_class = < + dyn #frame_support::dispatch::ClassifyDispatch< + ( #( & #args_type, )* ) + > + >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_pays_fee = < + dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> + >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); + + #frame_support::dispatch::DispatchInfo { + weight: __pallet_weight, + class: __pallet_class, + pays_fee: __pallet_pays_fee, + } + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> + #where_clause + { + type Origin = #frame_system::pallet_prelude::OriginFor; + #[allow(unused_variables)] + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + #feeless_check_result + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_name(&self) -> &'static str { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ #( #cfg_attrs stringify!(#fn_name), )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_index(&self) -> u8 { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_indices() -> &'static [u8] { + &[ #( #cfg_attrs #call_index, )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable + for #call_ident<#type_use_gen> + #where_clause + { + type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; + fn dispatch_bypass_filter( + self, + origin: Self::RuntimeOrigin + ) -> #frame_support::dispatch::DispatchResultWithPostInfo { + #frame_support::dispatch_context::run_in_context(|| { + match self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern, )* } => { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) + ); + #maybe_allow_attrs + <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) + .map(Into::into).map_err(Into::into) + }, + )* + Self::__Ignore(_, _) => { + let _ = origin; // Use origin for empty Call enum + unreachable!("__PhantomItem cannot be used."); + }, + } + }) + } + } + + impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> + #where_clause + { + type RuntimeCall = #call_ident<#type_use_gen>; + } + + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { + #[allow(dead_code)] + #[doc(hidden)] + pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { + #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/composite.rs b/support/procedural-fork/src/pallet/expand/composite.rs index d449afe8f..49c0ad675 100644 --- a/support/procedural-fork/src/pallet/expand/composite.rs +++ b/support/procedural-fork/src/pallet/expand/composite.rs @@ -20,21 +20,21 @@ use proc_macro2::TokenStream; /// Expands `composite_enum` and adds the `VariantCount` implementation for it. pub fn expand_composites(def: &mut Def) -> TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for composite in &def.composites { - let name = &composite.ident; - let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); - let variants_count = composite.variant_count; + for composite in &def.composites { + let name = &composite.ident; + let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); + let variants_count = composite.variant_count; - // add `VariantCount` implementation for `composite_enum` - expand.extend(quote::quote_spanned!(composite.attr_span => + // add `VariantCount` implementation for `composite_enum` + expand.extend(quote::quote_spanned!(composite.attr_span => impl #impl_generics #frame_support::traits::VariantCount for #name #ty_generics #where_clause { const VARIANT_COUNT: u32 = #variants_count; } )); - } + } - expand + expand } diff --git a/support/procedural-fork/src/pallet/expand/config.rs b/support/procedural-fork/src/pallet/expand/config.rs index 5cf4035a8..836c74ae7 100644 --- a/support/procedural-fork/src/pallet/expand/config.rs +++ b/support/procedural-fork/src/pallet/expand/config.rs @@ -23,20 +23,20 @@ use syn::{parse_quote, Item}; /// /// * Generate default rust doc pub fn expand_config(def: &mut Def) -> TokenStream { - let config = &def.config; - let config_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; - if let Item::Trait(item) = item { - item - } else { - unreachable!("Checked by config parser") - } - }; + let config = &def.config; + let config_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; + if let Item::Trait(item) = item { + item + } else { + unreachable!("Checked by config parser") + } + }; - config_item.attrs.insert( - 0, - parse_quote!( - #[doc = r" + config_item.attrs.insert( + 0, + parse_quote!( + #[doc = r" Configuration trait of this pallet. The main purpose of this trait is to act as an interface between this pallet and the runtime in @@ -44,54 +44,54 @@ which it is embedded in. A type, function, or constant in this trait is essentia configured by the runtime that includes this pallet. Consequently, a runtime that wants to include this pallet must implement this trait." - ] - ), - ); + ] + ), + ); - // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is - // impossible consequently. - match &config.default_sub_trait { - Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { - let trait_items = &default_sub_trait - .items - .iter() - .map(|item| { - if item.1 { - if let syn::TraitItem::Type(item) = item.0.clone() { - let mut item = item.clone(); - item.bounds.clear(); - syn::TraitItem::Type(item) - } else { - item.0.clone() - } - } else { - item.0.clone() - } - }) - .collect::>(); + // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is + // impossible consequently. + match &config.default_sub_trait { + Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { + let trait_items = &default_sub_trait + .items + .iter() + .map(|item| { + if item.1 { + if let syn::TraitItem::Type(item) = item.0.clone() { + let mut item = item.clone(); + item.bounds.clear(); + syn::TraitItem::Type(item) + } else { + item.0.clone() + } + } else { + item.0.clone() + } + }) + .collect::>(); - let type_param_bounds = if default_sub_trait.has_system { - let system = &def.frame_system; - quote::quote!(: #system::DefaultConfig) - } else { - quote::quote!() - }; + let type_param_bounds = if default_sub_trait.has_system { + let system = &def.frame_system; + quote::quote!(: #system::DefaultConfig) + } else { + quote::quote!() + }; - quote!( - /// Based on [`Config`]. Auto-generated by - /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). - /// Can be used in tandem with - /// [`#[register_default_config]`](`frame_support::register_default_config`) and - /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits - /// based on existing pallet config traits in a safe and developer-friendly way. - /// - /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about - /// the auto-generated `DefaultConfig` trait and how it is generated. - pub trait DefaultConfig #type_param_bounds { - #(#trait_items)* - } - ) - }, - _ => Default::default(), - } + quote!( + /// Based on [`Config`]. Auto-generated by + /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). + /// Can be used in tandem with + /// [`#[register_default_config]`](`frame_support::register_default_config`) and + /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits + /// based on existing pallet config traits in a safe and developer-friendly way. + /// + /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about + /// the auto-generated `DefaultConfig` trait and how it is generated. + pub trait DefaultConfig #type_param_bounds { + #(#trait_items)* + } + ) + } + _ => Default::default(), + } } diff --git a/support/procedural-fork/src/pallet/expand/constants.rs b/support/procedural-fork/src/pallet/expand/constants.rs index 57fa8b7f3..5153ccf49 100644 --- a/support/procedural-fork/src/pallet/expand/constants.rs +++ b/support/procedural-fork/src/pallet/expand/constants.rs @@ -18,91 +18,99 @@ use crate::pallet::Def; struct ConstDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// default_byte implementation - pub default_byte_impl: proc_macro2::TokenStream, - /// Constant name for Metadata (optional) - pub metadata_name: Option, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// default_byte implementation + pub default_byte_impl: proc_macro2::TokenStream, + /// Constant name for Metadata (optional) + pub metadata_name: Option, } /// /// * Impl fn module_constant_metadata for pallet. pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); - - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - - let config_consts = def.config.consts_metadata.iter().map(|const_| { - let ident = &const_.ident; - let const_type = &const_.type_; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = <::#ident as - #frame_support::traits::Get<#const_type>>::get(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: None, - } - }); - - let extra_consts = def.extra_constants.iter().flat_map(|d| &d.extra_constants).map(|const_| { - let ident = &const_.ident; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = >::#ident(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: const_.metadata_name.clone(), - } - }); - - let consts = config_consts.chain(extra_consts).map(|const_| { - let const_type = &const_.type_; - let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); - - let no_docs = vec![]; - let doc = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &const_.doc }; - - let default_byte_impl = &const_.default_byte_impl; - - quote::quote!({ - #frame_support::__private::metadata_ir::PalletConstantMetadataIR { - name: #ident_str, - ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), - value: { #default_byte_impl }, - docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], - } - }) - }); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ - - #[doc(hidden)] - pub fn pallet_constants_metadata() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> - { - #frame_support::__private::sp_std::vec![ #( #consts ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); + + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + + let config_consts = def.config.consts_metadata.iter().map(|const_| { + let ident = &const_.ident; + let const_type = &const_.type_; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = <::#ident as + #frame_support::traits::Get<#const_type>>::get(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: None, + } + }); + + let extra_consts = def + .extra_constants + .iter() + .flat_map(|d| &d.extra_constants) + .map(|const_| { + let ident = &const_.ident; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = >::#ident(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: const_.metadata_name.clone(), + } + }); + + let consts = config_consts.chain(extra_consts).map(|const_| { + let const_type = &const_.type_; + let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); + + let no_docs = vec![]; + let doc = if cfg!(feature = "no-metadata-docs") { + &no_docs + } else { + &const_.doc + }; + + let default_byte_impl = &const_.default_byte_impl; + + quote::quote!({ + #frame_support::__private::metadata_ir::PalletConstantMetadataIR { + name: #ident_str, + ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), + value: { #default_byte_impl }, + docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], + } + }) + }); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ + + #[doc(hidden)] + pub fn pallet_constants_metadata() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> + { + #frame_support::__private::sp_std::vec![ #( #consts ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/doc_only.rs b/support/procedural-fork/src/pallet/expand/doc_only.rs index 621a051ac..3e60e9a9b 100644 --- a/support/procedural-fork/src/pallet/expand/doc_only.rs +++ b/support/procedural-fork/src/pallet/expand/doc_only.rs @@ -20,84 +20,84 @@ use proc_macro2::Span; use crate::pallet::Def; pub fn expand_doc_only(def: &mut Def) -> proc_macro2::TokenStream { - let dispatchables = if let Some(call_def) = &def.call { - let type_impl_generics = def.type_impl_generics(Span::call_site()); - call_def - .methods - .iter() - .map(|method| { - let name = &method.name; - let args = &method - .args - .iter() - .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) - .collect::(); - let docs = &method.docs; + let dispatchables = if let Some(call_def) = &def.call { + let type_impl_generics = def.type_impl_generics(Span::call_site()); + call_def + .methods + .iter() + .map(|method| { + let name = &method.name; + let args = &method + .args + .iter() + .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) + .collect::(); + let docs = &method.docs; - let real = format!(" [`Pallet::{}`].", name); - quote::quote!( - #( #[doc = #docs] )* - /// - /// # Warning: Doc-Only - /// - /// This function is an automatically generated, and is doc-only, uncallable - /// stub. See the real version in - #[ doc = #real ] - pub fn #name<#type_impl_generics>(#args) { unreachable!(); } - ) - }) - .collect::() - } else { - quote::quote!() - }; + let real = format!(" [`Pallet::{}`].", name); + quote::quote!( + #( #[doc = #docs] )* + /// + /// # Warning: Doc-Only + /// + /// This function is an automatically generated, and is doc-only, uncallable + /// stub. See the real version in + #[ doc = #real ] + pub fn #name<#type_impl_generics>(#args) { unreachable!(); } + ) + }) + .collect::() + } else { + quote::quote!() + }; - let storage_types = def - .storages - .iter() - .map(|storage| { - let storage_name = &storage.ident; - let storage_type_docs = &storage.docs; - let real = format!("[`pallet::{}`].", storage_name); - quote::quote!( - #( #[doc = #storage_type_docs] )* - /// - /// # Warning: Doc-Only - /// - /// This type is automatically generated, and is doc-only. See the real version in - #[ doc = #real ] - pub struct #storage_name(); - ) - }) - .collect::(); + let storage_types = def + .storages + .iter() + .map(|storage| { + let storage_name = &storage.ident; + let storage_type_docs = &storage.docs; + let real = format!("[`pallet::{}`].", storage_name); + quote::quote!( + #( #[doc = #storage_type_docs] )* + /// + /// # Warning: Doc-Only + /// + /// This type is automatically generated, and is doc-only. See the real version in + #[ doc = #real ] + pub struct #storage_name(); + ) + }) + .collect::(); - quote::quote!( - /// Auto-generated docs-only module listing all (public and private) defined storage types - /// for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. - /// - /// To see the actual storage type, find a struct with the same name at the root of the - /// pallet, in the list of [*Type Definitions*](../index.html#types). - #[cfg(doc)] - pub mod storage_types { - use super::*; - #storage_types - } + quote::quote!( + /// Auto-generated docs-only module listing all (public and private) defined storage types + /// for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. + /// + /// To see the actual storage type, find a struct with the same name at the root of the + /// pallet, in the list of [*Type Definitions*](../index.html#types). + #[cfg(doc)] + pub mod storage_types { + use super::*; + #storage_types + } - /// Auto-generated docs-only module listing all defined dispatchables for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or - /// [`Call`]. - #[cfg(doc)] - pub mod dispatchables { - use super::*; - #dispatchables - } - ) + /// Auto-generated docs-only module listing all defined dispatchables for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or + /// [`Call`]. + #[cfg(doc)] + pub mod dispatchables { + use super::*; + #dispatchables + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs index ec19f889a..adc4f7ce9 100644 --- a/support/procedural-fork/src/pallet/expand/documentation.rs +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -28,12 +28,12 @@ const PALLET_DOC: &'static str = "pallet_doc"; /// Supported format: /// `#[pallet_doc(PATH)]`: The path of the file from which the documentation is loaded fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { - let lit: syn::LitStr = attr.parse_args().map_err(|_| { + let lit: syn::LitStr = attr.parse_args().map_err(|_| { let msg = "The `pallet_doc` received an unsupported argument. Supported format: `pallet_doc(\"PATH\")`"; syn::Error::new(attr.span(), msg) })?; - Ok(DocMetaValue::Path(lit)) + Ok(DocMetaValue::Path(lit)) } /// Get the value from the `doc` comment attribute: @@ -42,46 +42,49 @@ fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { /// - `#[doc = "A doc string"]`: Documentation as a string literal /// - `#[doc = include_str!(PATH)]`: Documentation obtained from a path fn parse_doc_value(attr: &Attribute) -> syn::Result> { - if !attr.path().is_ident(DOC) { - return Ok(None) - } - - let meta = attr.meta.require_name_value()?; - - match &meta.value { - syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), - syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => - Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))), - _ => - Err(syn::Error::new(attr.span(), "Expected `= \"docs\"` or `= include_str!(\"PATH\")`")), - } + if !attr.path().is_ident(DOC) { + return Ok(None); + } + + let meta = attr.meta.require_name_value()?; + + match &meta.value { + syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), + syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => { + Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))) + } + _ => Err(syn::Error::new( + attr.span(), + "Expected `= \"docs\"` or `= include_str!(\"PATH\")`", + )), + } } /// Supported documentation tokens. #[derive(Debug)] enum DocMetaValue { - /// Documentation with string literals. - /// - /// `#[doc = "Lit"]` - Lit(Lit), - /// Documentation with `include_str!` macro. - /// - /// The string literal represents the file `PATH`. - /// - /// `#[doc = include_str!(PATH)]` - Path(LitStr), + /// Documentation with string literals. + /// + /// `#[doc = "Lit"]` + Lit(Lit), + /// Documentation with `include_str!` macro. + /// + /// The string literal represents the file `PATH`. + /// + /// `#[doc = include_str!(PATH)]` + Path(LitStr), } impl ToTokens for DocMetaValue { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - DocMetaValue::Lit(lit) => lit.to_tokens(tokens), - DocMetaValue::Path(path_lit) => { - let decl = quote::quote!(include_str!(#path_lit)); - tokens.extend(decl) - }, - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + DocMetaValue::Lit(lit) => lit.to_tokens(tokens), + DocMetaValue::Path(path_lit) => { + let decl = quote::quote!(include_str!(#path_lit)); + tokens.extend(decl) + } + } + } } /// Extract the documentation from the given pallet definition @@ -110,63 +113,63 @@ impl ToTokens for DocMetaValue { /// Unlike the `doc` attribute, the documentation provided to the `proc_macro` attribute is /// not added to the pallet. pub fn expand_documentation(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let where_clauses = &def.config.where_clause; - - // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. - - // The `pallet_doc` attributes are excluded from the generation of the pallet, - // but they are included in the runtime metadata. - let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); - let mut index = 0; - while index < def.item.attrs.len() { - let attr = &def.item.attrs[index]; - if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { - pallet_docs.push(def.item.attrs.remove(index)); - // Do not increment the index, we have just removed the - // element from the attributes. - continue - } - - index += 1; - } - - // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. - let docs = match def - .item - .attrs - .iter() - .filter_map(|v| parse_doc_value(v).transpose()) - .collect::>>() - { - Ok(r) => r, - Err(err) => return err.into_compile_error(), - }; - - // Capture the `#[pallet_doc("../README.md")]`. - let pallet_docs = match pallet_docs - .into_iter() - .map(|attr| parse_pallet_doc_value(&attr)) - .collect::>>() - { - Ok(docs) => docs, - Err(err) => return err.into_compile_error(), - }; - - let docs = docs.iter().chain(pallet_docs.iter()); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ - - #[doc(hidden)] - pub fn pallet_documentation_metadata() - -> #frame_support::__private::sp_std::vec::Vec<&'static str> - { - #frame_support::__private::sp_std::vec![ #( #docs ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let where_clauses = &def.config.where_clause; + + // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. + + // The `pallet_doc` attributes are excluded from the generation of the pallet, + // but they are included in the runtime metadata. + let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); + let mut index = 0; + while index < def.item.attrs.len() { + let attr = &def.item.attrs[index]; + if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { + pallet_docs.push(def.item.attrs.remove(index)); + // Do not increment the index, we have just removed the + // element from the attributes. + continue; + } + + index += 1; + } + + // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. + let docs = match def + .item + .attrs + .iter() + .filter_map(|v| parse_doc_value(v).transpose()) + .collect::>>() + { + Ok(r) => r, + Err(err) => return err.into_compile_error(), + }; + + // Capture the `#[pallet_doc("../README.md")]`. + let pallet_docs = match pallet_docs + .into_iter() + .map(|attr| parse_pallet_doc_value(&attr)) + .collect::>>() + { + Ok(docs) => docs, + Err(err) => return err.into_compile_error(), + }; + + let docs = docs.iter().chain(pallet_docs.iter()); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ + + #[doc(hidden)] + pub fn pallet_documentation_metadata() + -> #frame_support::__private::sp_std::vec::Vec<&'static str> + { + #frame_support::__private::sp_std::vec![ #( #docs ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/error.rs b/support/procedural-fork/src/pallet/expand/error.rs index 72fb6e923..e2c3f680c 100644 --- a/support/procedural-fork/src/pallet/expand/error.rs +++ b/support/procedural-fork/src/pallet/expand/error.rs @@ -16,11 +16,11 @@ // limitations under the License. use crate::{ - pallet::{ - parse::error::{VariantDef, VariantField}, - Def, - }, - COUNTER, + pallet::{ + parse::error::{VariantDef, VariantField}, + Def, + }, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; @@ -29,49 +29,49 @@ use syn::spanned::Spanned; /// /// * impl various trait on Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let error_token_unique_id = - syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let config_where_clause = &def.config.where_clause; - - let error = if let Some(error) = &def.error { - error - } else { - return quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - } - }; - - let error_ident = &error.error; - let type_impl_gen = &def.type_impl_generics(error.attr_span); - let type_use_gen = &def.type_use_generics(error.attr_span); - - let phantom_variant: syn::Variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, - #frame_support::Never, - ) - ); - - let as_str_matches = error.variants.iter().map( + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let error_token_unique_id = + syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let config_where_clause = &def.config.where_clause; + + let error = if let Some(error) = &def.error { + error + } else { + return quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + }; + }; + + let error_ident = &error.error; + let type_impl_gen = &def.type_impl_generics(error.attr_span); + let type_use_gen = &def.type_use_generics(error.attr_span); + + let phantom_variant: syn::Variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, + #frame_support::Never, + ) + ); + + let as_str_matches = error.variants.iter().map( |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { let variant_str = variant.to_string(); let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); @@ -89,103 +89,107 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { }, ); - let error_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; - - error_item.variants.insert(0, phantom_variant); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // derive TypeInfo for error metadata - error_item.attrs.push(syn::parse_quote! { - #[derive( - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - #frame_support::PalletError, - )] - }); - error_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - )); - - if get_doc_literals(&error_item.attrs).is_empty() { - error_item.attrs.push(syn::parse_quote!( - #[doc = "The `Error` enum of this pallet."] - )); - } - - quote::quote_spanned!(error.attr_span => - impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> - #config_where_clause - { - fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) - -> #frame_support::__private::sp_std::fmt::Result - { - f.write_str(self.as_str()) - } - } - - impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn as_str(&self) -> &'static str { - match &self { - Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), - #( #as_str_matches )* - } - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> &'static str { - err.as_str() - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> - for #frame_support::sp_runtime::DispatchError - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> Self { - use #frame_support::__private::codec::Encode; - let index = < - ::PalletInfo - as #frame_support::traits::PalletInfo - >::index::>() - .expect("Every active module has an index in the runtime; qed") as u8; - let mut encoded = err.encode(); - encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); - - #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { - index, - error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), - message: Some(err.as_str()), - }) - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - error = [{ #error_ident }] - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - ) + let error_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; + + error_item.variants.insert(0, phantom_variant); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // derive TypeInfo for error metadata + error_item.attrs.push(syn::parse_quote! { + #[derive( + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + #frame_support::PalletError, + )] + }); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + )); + + if get_doc_literals(&error_item.attrs).is_empty() { + error_item.attrs.push(syn::parse_quote!( + #[doc = "The `Error` enum of this pallet."] + )); + } + + quote::quote_spanned!(error.attr_span => + impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> + #config_where_clause + { + fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) + -> #frame_support::__private::sp_std::fmt::Result + { + f.write_str(self.as_str()) + } + } + + impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn as_str(&self) -> &'static str { + match &self { + Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), + #( #as_str_matches )* + } + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> &'static str { + err.as_str() + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> + for #frame_support::sp_runtime::DispatchError + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> Self { + use #frame_support::__private::codec::Encode; + let index = < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::index::>() + .expect("Every active module has an index in the runtime; qed") as u8; + let mut encoded = err.encode(); + encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); + + #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { + index, + error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), + message: Some(err.as_str()), + }) + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + error = [{ #error_ident }] + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + ) } diff --git a/support/procedural-fork/src/pallet/expand/event.rs b/support/procedural-fork/src/pallet/expand/event.rs index 655fc5507..931dcd95a 100644 --- a/support/procedural-fork/src/pallet/expand/event.rs +++ b/support/procedural-fork/src/pallet/expand/event.rs @@ -16,8 +16,8 @@ // limitations under the License. use crate::{ - pallet::{parse::event::PalletEventDepositAttr, Def}, - COUNTER, + pallet::{parse::event::PalletEventDepositAttr, Def}, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; @@ -27,148 +27,159 @@ use syn::{spanned::Spanned, Ident}; /// * Impl various trait on Event including metadata /// * if deposit_event is defined, implement deposit_event on module. pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (event, macro_ident) = if let Some(event) = &def.event { - let ident = Ident::new(&format!("__is_event_part_defined_{}", count), event.attr_span); - (event, ident) - } else { - let macro_ident = - Ident::new(&format!("__is_event_part_defined_{}", count), def.item.span()); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::event] defined, perhaps you should \ - remove `Event` from construct_runtime?", - )); - } - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - } - }; - - let event_where_clause = &event.where_clause; - - // NOTE: actually event where clause must be a subset of config where clause because of - // `type RuntimeEvent: From>`. But we merge either way for potential better error - // message - let completed_where_clause = - super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); - - let event_ident = &event.event; - let frame_system = &def.frame_system; - let frame_support = &def.frame_support; - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); - - let event_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by event parser") - } - }; - - // Phantom data is added for generic event. - if event.gen_kind.is_generic() { - let variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#event_use_gen)>, - #frame_support::Never, - ) - ); - - // Push ignore variant at the end. - event_item.variants.push(variant); - } - - if get_doc_literals(&event_item.attrs).is_empty() { - event_item - .attrs - .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); - } - - // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug - event_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - )); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // skip requirement for type params to implement `TypeInfo`, and set docs capture - event_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] - )); - - let deposit_event = if let Some(deposit_event) = &event.deposit_event { - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let trait_use_gen = &def.trait_use_generics(event.attr_span); - let type_impl_gen = &def.type_impl_generics(event.attr_span); - let type_use_gen = &def.type_use_generics(event.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - - let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; - - quote::quote_spanned!(*fn_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #fn_vis fn deposit_event(event: Event<#event_use_gen>) { - let event = < - ::RuntimeEvent as - From> - >::from(event); - - let event = < - ::RuntimeEvent as - Into<::RuntimeEvent> - >::into(event); - - <#frame_system::Pallet>::deposit_event(event) - } - } - ) - } else { - Default::default() - }; - - quote::quote_spanned!(event.attr_span => - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => {}; - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - - #deposit_event - - impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { - fn from(_: #event_ident<#event_use_gen>) {} - } - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (event, macro_ident) = if let Some(event) = &def.event { + let ident = Ident::new( + &format!("__is_event_part_defined_{}", count), + event.attr_span, + ); + (event, ident) + } else { + let macro_ident = Ident::new( + &format!("__is_event_part_defined_{}", count), + def.item.span(), + ); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::event] defined, perhaps you should \ + remove `Event` from construct_runtime?", + )); + } + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + }; + }; + + let event_where_clause = &event.where_clause; + + // NOTE: actually event where clause must be a subset of config where clause because of + // `type RuntimeEvent: From>`. But we merge either way for potential better error + // message + let completed_where_clause = + super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); + + let event_ident = &event.event; + let frame_system = &def.frame_system; + let frame_support = &def.frame_support; + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); + + let event_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by event parser") + } + }; + + // Phantom data is added for generic event. + if event.gen_kind.is_generic() { + let variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#event_use_gen)>, + #frame_support::Never, + ) + ); + + // Push ignore variant at the end. + event_item.variants.push(variant); + } + + if get_doc_literals(&event_item.attrs).is_empty() { + event_item + .attrs + .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); + } + + // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug + event_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + )); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // skip requirement for type params to implement `TypeInfo`, and set docs capture + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] + )); + + let deposit_event = if let Some(deposit_event) = &event.deposit_event { + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let trait_use_gen = &def.trait_use_generics(event.attr_span); + let type_impl_gen = &def.type_impl_generics(event.attr_span); + let type_use_gen = &def.type_use_generics(event.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + + let PalletEventDepositAttr { + fn_vis, fn_span, .. + } = deposit_event; + + quote::quote_spanned!(*fn_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #fn_vis fn deposit_event(event: Event<#event_use_gen>) { + let event = < + ::RuntimeEvent as + From> + >::from(event); + + let event = < + ::RuntimeEvent as + Into<::RuntimeEvent> + >::into(event); + + <#frame_system::Pallet>::deposit_event(event) + } + } + ) + } else { + Default::default() + }; + + quote::quote_spanned!(event.attr_span => + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => {}; + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + + #deposit_event + + impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { + fn from(_: #event_ident<#event_use_gen>) {} + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_build.rs b/support/procedural-fork/src/pallet/expand/genesis_build.rs index 248e83469..c6089550d 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_build.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_build.rs @@ -20,30 +20,34 @@ use crate::pallet::Def; /// /// * implement the trait `sp_runtime::BuildStorage` pub fn expand_genesis_build(def: &mut Def) -> proc_macro2::TokenStream { - let genesis_config = if let Some(genesis_config) = &def.genesis_config { - genesis_config - } else { - return Default::default() - }; - let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); + let genesis_config = if let Some(genesis_config) = &def.genesis_config { + genesis_config + } else { + return Default::default(); + }; + let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); - let frame_support = &def.frame_support; - let type_impl_gen = &genesis_config.gen_kind.type_impl_gen(genesis_build.attr_span); - let gen_cfg_ident = &genesis_config.genesis_config; - let gen_cfg_use_gen = &genesis_config.gen_kind.type_use_gen(genesis_build.attr_span); + let frame_support = &def.frame_support; + let type_impl_gen = &genesis_config + .gen_kind + .type_impl_gen(genesis_build.attr_span); + let gen_cfg_ident = &genesis_config.genesis_config; + let gen_cfg_use_gen = &genesis_config + .gen_kind + .type_use_gen(genesis_build.attr_span); - let where_clause = &genesis_build.where_clause; + let where_clause = &genesis_build.where_clause; - quote::quote_spanned!(genesis_build.attr_span => - #[cfg(feature = "std")] - impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause - { - fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { - #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { - self.build(); - Ok(()) - }) - } - } - ) + quote::quote_spanned!(genesis_build.attr_span => + #[cfg(feature = "std")] + impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause + { + fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { + #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { + self.build(); + Ok(()) + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_config.rs b/support/procedural-fork/src/pallet/expand/genesis_config.rs index 31d519ef2..e171e2468 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_config.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_config.rs @@ -23,125 +23,130 @@ use syn::{spanned::Spanned, Ident}; /// /// * add various derive trait on GenesisConfig struct. pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (genesis_config, def_macro_ident, std_macro_ident) = - if let Some(genesis_config) = &def.genesis_config { - let def_macro_ident = Ident::new( - &format!("__is_genesis_config_defined_{}", count), - genesis_config.genesis_config.span(), - ); - - let std_macro_ident = Ident::new( - &format!("__is_std_macro_defined_for_genesis_{}", count), - genesis_config.genesis_config.span(), - ); - - (genesis_config, def_macro_ident, std_macro_ident) - } else { - let def_macro_ident = - Ident::new(&format!("__is_genesis_config_defined_{}", count), def.item.span()); - - let std_macro_ident = - Ident::new(&format!("__is_std_enabled_for_genesis_{}", count), def.item.span()); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::genesis_config] defined, perhaps you should \ - remove `Config` from construct_runtime?", - )); - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - } - }; - - let frame_support = &def.frame_support; - - let genesis_config_item = - &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; - - let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); - - match genesis_config_item { - syn::Item::Enum(syn::ItemEnum { attrs, .. }) | - syn::Item::Struct(syn::ItemStruct { attrs, .. }) | - syn::Item::Type(syn::ItemType { attrs, .. }) => { - if get_doc_literals(attrs).is_empty() { - attrs.push(syn::parse_quote!( - #[doc = r" + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (genesis_config, def_macro_ident, std_macro_ident) = if let Some(genesis_config) = + &def.genesis_config + { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + genesis_config.genesis_config.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_macro_defined_for_genesis_{}", count), + genesis_config.genesis_config.span(), + ); + + (genesis_config, def_macro_ident, std_macro_ident) + } else { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + def.item.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_enabled_for_genesis_{}", count), + def.item.span(), + ); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::genesis_config] defined, perhaps you should \ + remove `Config` from construct_runtime?", + )); + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + }; + }; + + let frame_support = &def.frame_support; + + let genesis_config_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; + + let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); + + match genesis_config_item { + syn::Item::Enum(syn::ItemEnum { attrs, .. }) + | syn::Item::Struct(syn::ItemStruct { attrs, .. }) + | syn::Item::Type(syn::ItemType { attrs, .. }) => { + if get_doc_literals(attrs).is_empty() { + attrs.push(syn::parse_quote!( + #[doc = r" Can be used to configure the [genesis state](https://docs.substrate.io/build/genesis-configuration/) of this pallet. "] - )); - } - attrs.push(syn::parse_quote!( - #[derive(#frame_support::Serialize, #frame_support::Deserialize)] - )); - attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); - attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); - attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); - }, - _ => unreachable!("Checked by genesis_config parser"), - } - - quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => {}; - } - - #[cfg(not(feature = "std"))] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have the std feature enabled, this will cause the `", - $pallet_path, - "::GenesisConfig` type to not implement serde traits." - )); - }; - } - - #[cfg(feature = "std")] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - } + )); + } + attrs.push(syn::parse_quote!( + #[derive(#frame_support::Serialize, #frame_support::Deserialize)] + )); + attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); + attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); + attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); + } + _ => unreachable!("Checked by genesis_config parser"), + } + + quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => {}; + } + + #[cfg(not(feature = "std"))] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have the std feature enabled, this will cause the `", + $pallet_path, + "::GenesisConfig` type to not implement serde traits." + )); + }; + } + + #[cfg(feature = "std")] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/hooks.rs b/support/procedural-fork/src/pallet/expand/hooks.rs index 3623b5952..6967f4c08 100644 --- a/support/procedural-fork/src/pallet/expand/hooks.rs +++ b/support/procedural-fork/src/pallet/expand/hooks.rs @@ -19,322 +19,326 @@ use crate::pallet::Def; /// * implement the individual traits using the Hooks trait pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { - let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { - Some(hooks) => { - let where_clause = hooks.where_clause.clone(); - let span = hooks.attr_span; - let has_runtime_upgrade = hooks.has_runtime_upgrade; - (where_clause, span, has_runtime_upgrade) - }, - None => (def.config.where_clause.clone(), def.pallet_struct.attr_span, false), - }; + let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { + Some(hooks) => { + let where_clause = hooks.where_clause.clone(); + let span = hooks.attr_span; + let has_runtime_upgrade = hooks.has_runtime_upgrade; + (where_clause, span, has_runtime_upgrade) + } + None => ( + def.config.where_clause.clone(), + def.pallet_struct.attr_span, + false, + ), + }; - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(span); - let type_use_gen = &def.type_use_generics(span); - let pallet_ident = &def.pallet_struct.pallet; - let frame_system = &def.frame_system; - let pallet_name = quote::quote! { - < - ::PalletInfo - as - #frame_support::traits::PalletInfo - >::name::().unwrap_or("") - }; + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(span); + let type_use_gen = &def.type_use_generics(span); + let pallet_ident = &def.pallet_struct.pallet; + let frame_system = &def.frame_system; + let pallet_name = quote::quote! { + < + ::PalletInfo + as + #frame_support::traits::PalletInfo + >::name::().unwrap_or("") + }; - let initialize_on_chain_storage_version = if let Some(in_code_version) = - &def.pallet_struct.storage_version - { - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", - #pallet_name, - #in_code_version - ); - #in_code_version.put::(); - } - } else { - quote::quote! { - let default_version = #frame_support::traits::StorageVersion::new(0); - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", - #pallet_name, - default_version - ); - default_version.put::(); - } - }; + let initialize_on_chain_storage_version = if let Some(in_code_version) = + &def.pallet_struct.storage_version + { + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", + #pallet_name, + #in_code_version + ); + #in_code_version.put::(); + } + } else { + quote::quote! { + let default_version = #frame_support::traits::StorageVersion::new(0); + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", + #pallet_name, + default_version + ); + default_version.put::(); + } + }; - let log_runtime_upgrade = if has_runtime_upgrade { - // a migration is defined here. - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "⚠️ {} declares internal migrations (which *might* execute). \ - On-chain `{:?}` vs in-code storage version `{:?}`", - #pallet_name, - ::on_chain_storage_version(), - ::in_code_storage_version(), - ); - } - } else { - // default. - quote::quote! { - #frame_support::__private::log::debug!( - target: #frame_support::LOG_TARGET, - "✅ no migration for {}", - #pallet_name, - ); - } - }; + let log_runtime_upgrade = if has_runtime_upgrade { + // a migration is defined here. + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "⚠️ {} declares internal migrations (which *might* execute). \ + On-chain `{:?}` vs in-code storage version `{:?}`", + #pallet_name, + ::on_chain_storage_version(), + ::in_code_storage_version(), + ); + } + } else { + // default. + quote::quote! { + #frame_support::__private::log::debug!( + target: #frame_support::LOG_TARGET, + "✅ no migration for {}", + #pallet_name, + ); + } + }; - let hooks_impl = if def.hooks.is_none() { - let frame_system = &def.frame_system; - quote::quote! { - impl<#type_impl_gen> - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause {} - } - } else { - proc_macro2::TokenStream::new() - }; + let hooks_impl = if def.hooks.is_none() { + let frame_system = &def.frame_system; + quote::quote! { + impl<#type_impl_gen> + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause {} + } + } else { + proc_macro2::TokenStream::new() + }; - // If a storage version is set, we should ensure that the storage version on chain matches the - // in-code storage version. This assumes that `Executive` is running custom migrations before - // the pallets are called. - let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); - let in_code_version = ::in_code_storage_version(); + // If a storage version is set, we should ensure that the storage version on chain matches the + // in-code storage version. This assumes that `Executive` is running custom migrations before + // the pallets are called. + let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); + let in_code_version = ::in_code_storage_version(); - if on_chain_version != in_code_version { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", - #pallet_name, - on_chain_version, - in_code_version, - ); + if on_chain_version != in_code_version { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", + #pallet_name, + on_chain_version, + in_code_version, + ); - return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); - } - } - } else { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); + return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); + } + } + } else { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); - if on_chain_version != #frame_support::traits::StorageVersion::new(0) { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} is set to non zero, \ - while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", - #pallet_name, - on_chain_version, - ); + if on_chain_version != #frame_support::traits::StorageVersion::new(0) { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} is set to non zero, \ + while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", + #pallet_name, + on_chain_version, + ); - return Err("On chain storage version set, while the pallet doesn't \ - have the `#[pallet::storage_version(VERSION)]` attribute.".into()); - } - } - }; + return Err("On chain storage version set, while the pallet doesn't \ + have the `#[pallet::storage_version(VERSION)]` attribute.".into()); + } + } + }; - quote::quote_spanned!(span => - #hooks_impl + quote::quote_spanned!(span => + #hooks_impl - impl<#type_impl_gen> - #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_finalize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_finalize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_finalize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_finalize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_idle( - n: #frame_system::pallet_prelude::BlockNumberFor::, - remaining_weight: #frame_support::weights::Weight - ) -> #frame_support::weights::Weight { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_idle(n, remaining_weight) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_idle( + n: #frame_system::pallet_prelude::BlockNumberFor::, + remaining_weight: #frame_support::weights::Weight + ) -> #frame_support::weights::Weight { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_idle(n, remaining_weight) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_poll( - n: #frame_system::pallet_prelude::BlockNumberFor::, - weight: &mut #frame_support::weights::WeightMeter - ) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_poll(n, weight); - } - } + impl<#type_impl_gen> + #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_poll( + n: #frame_system::pallet_prelude::BlockNumberFor::, + weight: &mut #frame_support::weights::WeightMeter + ) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_poll(n, weight); + } + } - impl<#type_impl_gen> - #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_initialize( - n: #frame_system::pallet_prelude::BlockNumberFor:: - ) -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_initialize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_initialize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_initialize( + n: #frame_system::pallet_prelude::BlockNumberFor:: + ) -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_initialize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_initialize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::BeforeAllRuntimeMigrations - for #pallet_ident<#type_use_gen> #where_clause - { - fn before_all_runtime_migrations() -> #frame_support::weights::Weight { - use #frame_support::traits::{Get, PalletInfoAccess}; - use #frame_support::__private::hashing::twox_128; - use #frame_support::storage::unhashed::contains_prefixed_key; - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("before_all") - ); + impl<#type_impl_gen> + #frame_support::traits::BeforeAllRuntimeMigrations + for #pallet_ident<#type_use_gen> #where_clause + { + fn before_all_runtime_migrations() -> #frame_support::weights::Weight { + use #frame_support::traits::{Get, PalletInfoAccess}; + use #frame_support::__private::hashing::twox_128; + use #frame_support::storage::unhashed::contains_prefixed_key; + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("before_all") + ); - // Check if the pallet has any keys set, including the storage version. If there are - // no keys set, the pallet was just added to the runtime and needs to have its - // version initialized. - let pallet_hashed_prefix = ::name_hash(); - let exists = contains_prefixed_key(&pallet_hashed_prefix); - if !exists { - #initialize_on_chain_storage_version - ::DbWeight::get().reads_writes(1, 1) - } else { - ::DbWeight::get().reads(1) - } - } - } + // Check if the pallet has any keys set, including the storage version. If there are + // no keys set, the pallet was just added to the runtime and needs to have its + // version initialized. + let pallet_hashed_prefix = ::name_hash(); + let exists = contains_prefixed_key(&pallet_hashed_prefix); + if !exists { + #initialize_on_chain_storage_version + ::DbWeight::get().reads_writes(1, 1) + } else { + ::DbWeight::get().reads(1) + } + } + } - impl<#type_impl_gen> - #frame_support::traits::OnRuntimeUpgrade - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_runtime_upgrade() -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") - ); + impl<#type_impl_gen> + #frame_support::traits::OnRuntimeUpgrade + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_runtime_upgrade() -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") + ); - // log info about the upgrade. - #log_runtime_upgrade + // log info about the upgrade. + #log_runtime_upgrade - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_runtime_upgrade() - } + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_runtime_upgrade() + } - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::pre_upgrade() - } + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::pre_upgrade() + } - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #post_storage_version_check + #[cfg(feature = "try-runtime")] + fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #post_storage_version_check - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::post_upgrade(state) - } - } + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::post_upgrade(state) + } + } - impl<#type_impl_gen> - #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::offchain_worker(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::offchain_worker(n) + } + } - // Integrity tests are only required for when `std` is enabled. - #frame_support::std_enabled! { - impl<#type_impl_gen> - #frame_support::traits::IntegrityTest - for #pallet_ident<#type_use_gen> #where_clause - { - fn integrity_test() { - #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::integrity_test() - }); - } - } - } + // Integrity tests are only required for when `std` is enabled. + #frame_support::std_enabled! { + impl<#type_impl_gen> + #frame_support::traits::IntegrityTest + for #pallet_ident<#type_use_gen> #where_clause + { + fn integrity_test() { + #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::integrity_test() + }); + } + } + } - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> - #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn try_state( - n: #frame_system::pallet_prelude::BlockNumberFor::, - _s: #frame_support::traits::TryStateSelect - ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🩺 Running {:?} try-state checks", - #pallet_name, - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::try_state(n).map_err(|err| { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "❌ {:?} try_state checks failed: {:?}", - #pallet_name, - err - ); + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> + #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn try_state( + n: #frame_system::pallet_prelude::BlockNumberFor::, + _s: #frame_support::traits::TryStateSelect + ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🩺 Running {:?} try-state checks", + #pallet_name, + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::try_state(n).map_err(|err| { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "❌ {:?} try_state checks failed: {:?}", + #pallet_name, + err + ); - err - }) - } - } - ) + err + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/inherent.rs b/support/procedural-fork/src/pallet/expand/inherent.rs index 182d79f5b..0a80d672a 100644 --- a/support/procedural-fork/src/pallet/expand/inherent.rs +++ b/support/procedural-fork/src/pallet/expand/inherent.rs @@ -21,35 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_inherents(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new(&format!("__is_inherent_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_inherent_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.inherent.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::inherent] defined, perhaps you should \ - remove `Inherent` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.inherent.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::inherent] defined, perhaps you should \ + remove `Inherent` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_inherent_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_inherent_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/instances.rs b/support/procedural-fork/src/pallet/expand/instances.rs index b6dfa7e6d..12423409c 100644 --- a/support/procedural-fork/src/pallet/expand/instances.rs +++ b/support/procedural-fork/src/pallet/expand/instances.rs @@ -22,22 +22,22 @@ use proc_macro2::Span; /// * Provide inherent instance to be used by construct_runtime /// * Provide Instance1 ..= Instance16 for instantiable pallet pub fn expand_instances(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); - let instances = if def.config.has_instance { - (1..=NUMBER_OF_INSTANCE) - .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) - .collect() - } else { - vec![] - }; + let frame_support = &def.frame_support; + let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); + let instances = if def.config.has_instance { + (1..=NUMBER_OF_INSTANCE) + .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) + .collect() + } else { + vec![] + }; - quote::quote!( - /// Hidden instance generated to be internally used when module is used without - /// instance. - #[doc(hidden)] - pub type #inherent_ident = (); + quote::quote!( + /// Hidden instance generated to be internally used when module is used without + /// instance. + #[doc(hidden)] + pub type #inherent_ident = (); - #( pub use #frame_support::instances::#instances; )* - ) + #( pub use #frame_support::instances::#instances; )* + ) } diff --git a/support/procedural-fork/src/pallet/expand/mod.rs b/support/procedural-fork/src/pallet/expand/mod.rs index 067839c28..ff4423f85 100644 --- a/support/procedural-fork/src/pallet/expand/mod.rs +++ b/support/procedural-fork/src/pallet/expand/mod.rs @@ -42,12 +42,12 @@ use quote::ToTokens; /// Merge where clause together, `where` token span is taken from the first not none one. pub fn merge_where_clauses(clauses: &[&Option]) -> Option { - let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); - let mut res = clauses.next()?.clone(); - for other in clauses { - res.predicates.extend(other.predicates.iter().cloned()) - } - Some(res) + let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); + let mut res = clauses.next()?.clone(); + for other in clauses { + res.predicates.extend(other.predicates.iter().cloned()) + } + Some(res) } /// Expand definition, in particular: @@ -55,32 +55,32 @@ pub fn merge_where_clauses(clauses: &[&Option]) -> Option proc_macro2::TokenStream { - // Remove the `pallet_doc` attribute first. - let metadata_docs = documentation::expand_documentation(&mut def); - let constants = constants::expand_constants(&mut def); - let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); - let config = config::expand_config(&mut def); - let call = call::expand_call(&mut def); - let tasks = tasks::expand_tasks(&mut def); - let error = error::expand_error(&mut def); - let event = event::expand_event(&mut def); - let storages = storage::expand_storages(&mut def); - let inherents = inherent::expand_inherents(&mut def); - let instances = instances::expand_instances(&mut def); - let hooks = hooks::expand_hooks(&mut def); - let genesis_build = genesis_build::expand_genesis_build(&mut def); - let genesis_config = genesis_config::expand_genesis_config(&mut def); - let type_values = type_value::expand_type_values(&mut def); - let origins = origin::expand_origins(&mut def); - let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); - let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); - let doc_only = doc_only::expand_doc_only(&mut def); - let composites = composite::expand_composites(&mut def); + // Remove the `pallet_doc` attribute first. + let metadata_docs = documentation::expand_documentation(&mut def); + let constants = constants::expand_constants(&mut def); + let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); + let config = config::expand_config(&mut def); + let call = call::expand_call(&mut def); + let tasks = tasks::expand_tasks(&mut def); + let error = error::expand_error(&mut def); + let event = event::expand_event(&mut def); + let storages = storage::expand_storages(&mut def); + let inherents = inherent::expand_inherents(&mut def); + let instances = instances::expand_instances(&mut def); + let hooks = hooks::expand_hooks(&mut def); + let genesis_build = genesis_build::expand_genesis_build(&mut def); + let genesis_config = genesis_config::expand_genesis_config(&mut def); + let type_values = type_value::expand_type_values(&mut def); + let origins = origin::expand_origins(&mut def); + let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); + let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); + let doc_only = doc_only::expand_doc_only(&mut def); + let composites = composite::expand_composites(&mut def); - def.item.attrs.insert( - 0, - syn::parse_quote!( - #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed + def.item.attrs.insert( + 0, + syn::parse_quote!( + #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed to construct this pallet. The main components of this pallet are: @@ -93,38 +93,38 @@ storage item. Otherwise, all storage items are listed among [*Type Definitions*] - [`Config`], which contains the configuration trait of this pallet. - [`Event`] and [`Error`], which are listed among the [*Enums*](#enums). "] - ), - ); + ), + ); - let new_items = quote::quote!( - #metadata_docs - #constants - #pallet_struct - #config - #call - #tasks - #error - #event - #storages - #inherents - #instances - #hooks - #genesis_build - #genesis_config - #type_values - #origins - #validate_unsigned - #tt_default_parts - #doc_only - #composites - ); + let new_items = quote::quote!( + #metadata_docs + #constants + #pallet_struct + #config + #call + #tasks + #error + #event + #storages + #inherents + #instances + #hooks + #genesis_build + #genesis_config + #type_values + #origins + #validate_unsigned + #tt_default_parts + #doc_only + #composites + ); - def.item - .content - .as_mut() - .expect("This is checked by parsing") - .1 - .push(syn::Item::Verbatim(new_items)); + def.item + .content + .as_mut() + .expect("This is checked by parsing") + .1 + .push(syn::Item::Verbatim(new_items)); - def.item.into_token_stream() + def.item.into_token_stream() } diff --git a/support/procedural-fork/src/pallet/expand/origin.rs b/support/procedural-fork/src/pallet/expand/origin.rs index 55865b424..167445ad6 100644 --- a/support/procedural-fork/src/pallet/expand/origin.rs +++ b/support/procedural-fork/src/pallet/expand/origin.rs @@ -21,35 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_origins(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new(&format!("__is_origin_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_origin_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.origin.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::origin] defined, perhaps you should \ - remove `Origin` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.origin.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::origin] defined, perhaps you should \ + remove `Origin` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_origin_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_origin_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_origin_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_origin_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/pallet_struct.rs b/support/procedural-fork/src/pallet/expand/pallet_struct.rs index 7cdf6bde9..c5def65ed 100644 --- a/support/procedural-fork/src/pallet/expand/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/expand/pallet_struct.rs @@ -28,263 +28,275 @@ use frame_support_procedural_tools::get_doc_literals; /// * implementation of `PalletInfoAccess` information /// * implementation of `StorageInfoTrait` on Pallet pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); - let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); - let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - let config_where_clause = &def.config.where_clause; - - let mut storages_where_clauses = vec![&def.config.where_clause]; - storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let storages_where_clauses = merge_where_clauses(&storages_where_clauses); - - let pallet_item = { - let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; - let item = &mut pallet_module_items[def.pallet_struct.index]; - if let syn::Item::Struct(item) = item { - item - } else { - unreachable!("Checked by pallet struct parser") - } - }; - - // If the first field type is `_` then we replace with `PhantomData` - if let Some(field) = pallet_item.fields.iter_mut().next() { - if field.ty == syn::parse_quote!(_) { - field.ty = syn::parse_quote!( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> - ); - } - } - - if get_doc_literals(&pallet_item.attrs).is_empty() { - pallet_item.attrs.push(syn::parse_quote!( - #[doc = r" + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); + let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); + let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + let config_where_clause = &def.config.where_clause; + + let mut storages_where_clauses = vec![&def.config.where_clause]; + storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let storages_where_clauses = merge_where_clauses(&storages_where_clauses); + + let pallet_item = { + let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; + let item = &mut pallet_module_items[def.pallet_struct.index]; + if let syn::Item::Struct(item) = item { + item + } else { + unreachable!("Checked by pallet struct parser") + } + }; + + // If the first field type is `_` then we replace with `PhantomData` + if let Some(field) = pallet_item.fields.iter_mut().next() { + if field.ty == syn::parse_quote!(_) { + field.ty = syn::parse_quote!( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> + ); + } + } + + if get_doc_literals(&pallet_item.attrs).is_empty() { + pallet_item.attrs.push(syn::parse_quote!( + #[doc = r" The `Pallet` struct, the main type that implements traits and standalone functions within the pallet. "] - )); - } - - pallet_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - )] - )); - - let pallet_error_metadata = if let Some(error_def) = &def.error { - let error_ident = &error_def.error; - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { - ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() - }) - } - } - ) - } else { - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - None - } - } - ) - }; - - let storage_info_span = - def.pallet_struct.without_storage_info.unwrap_or(def.pallet_struct.attr_span); - - let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); - let storage_cfg_attrs = - &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); - - // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use - // partial or full storage info from storage. - let storage_info_traits = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) - } else { - quote::quote_spanned!(storage_info_span => StorageInfoTrait) - } - }) - .collect::>(); - - let storage_info_methods = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => partial_storage_info) - } else { - quote::quote_spanned!(storage_info_span => storage_info) - } - }) - .collect::>(); - - let storage_info = quote::quote_spanned!(storage_info_span => - impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait - for #pallet_ident<#type_use_gen> - #storages_where_clauses - { - fn storage_info() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> - { - #[allow(unused_mut)] - let mut res = #frame_support::__private::sp_std::vec![]; - - #( - #(#storage_cfg_attrs)* - { - let mut storage_info = < - #storage_names<#type_use_gen> - as #frame_support::traits::#storage_info_traits - >::#storage_info_methods(); - res.append(&mut storage_info); - } - )* - - res - } - } - ); - - let (storage_version, in_code_storage_version_ty) = - if let Some(v) = def.pallet_struct.storage_version.as_ref() { - (quote::quote! { #v }, quote::quote! { #frame_support::traits::StorageVersion }) - } else { - ( - quote::quote! { core::default::Default::default() }, - quote::quote! { #frame_support::traits::NoStorageVersionSet }, - ) - }; - - let whitelisted_storage_idents: Vec = def - .storages - .iter() - .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) - .collect(); - - let whitelisted_storage_keys_impl = quote::quote![ - use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; - impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { - fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { - use #frame_support::__private::sp_std::vec; - vec![#( - TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) - ),*] - } - } - ]; - - quote::quote_spanned!(def.pallet_struct.attr_span => - #pallet_error_metadata - - /// Type alias to `Pallet`, to be used by `construct_runtime`. - /// - /// Generated by `pallet` attribute macro. - #[deprecated(note = "use `Pallet` instead")] - #[allow(dead_code)] - pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; - - // Implement `GetStorageVersion` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::GetStorageVersion - for #pallet_ident<#type_use_gen> - #config_where_clause - { - type InCodeStorageVersion = #in_code_storage_version_ty; - - fn in_code_storage_version() -> Self::InCodeStorageVersion { - #storage_version - } - - fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { - #frame_support::traits::StorageVersion::get::() - } - } - - // Implement `OnGenesis` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::OnGenesis - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn on_genesis() { - let storage_version: #frame_support::traits::StorageVersion = #storage_version; - storage_version.put::(); - } - } - - // Implement `PalletInfoAccess` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn index() -> usize { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::index::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name_hash() -> [u8; 16] { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name_hash::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn module_name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::module_name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn crate_version() -> #frame_support::traits::CrateVersion { - #frame_support::crate_to_crate_version!() - } - } - - impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn count() -> usize { 1 } - fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { - use #frame_support::traits::PalletInfoAccess; - let item = #frame_support::traits::PalletInfoData { - index: Self::index(), - name: Self::name(), - module_name: Self::module_name(), - crate_version: Self::crate_version(), - }; - #frame_support::__private::sp_std::vec![item] - } - } - - #storage_info - #whitelisted_storage_keys_impl - ) + )); + } + + pallet_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + )] + )); + + let pallet_error_metadata = if let Some(error_def) = &def.error { + let error_ident = &error_def.error; + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { + ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) + } + } + ) + } else { + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + None + } + } + ) + }; + + let storage_info_span = def + .pallet_struct + .without_storage_info + .unwrap_or(def.pallet_struct.attr_span); + + let storage_names = &def + .storages + .iter() + .map(|storage| &storage.ident) + .collect::>(); + let storage_cfg_attrs = &def + .storages + .iter() + .map(|storage| &storage.cfg_attrs) + .collect::>(); + + // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use + // partial or full storage info from storage. + let storage_info_traits = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) + } else { + quote::quote_spanned!(storage_info_span => StorageInfoTrait) + } + }) + .collect::>(); + + let storage_info_methods = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => partial_storage_info) + } else { + quote::quote_spanned!(storage_info_span => storage_info) + } + }) + .collect::>(); + + let storage_info = quote::quote_spanned!(storage_info_span => + impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait + for #pallet_ident<#type_use_gen> + #storages_where_clauses + { + fn storage_info() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> + { + #[allow(unused_mut)] + let mut res = #frame_support::__private::sp_std::vec![]; + + #( + #(#storage_cfg_attrs)* + { + let mut storage_info = < + #storage_names<#type_use_gen> + as #frame_support::traits::#storage_info_traits + >::#storage_info_methods(); + res.append(&mut storage_info); + } + )* + + res + } + } + ); + + let (storage_version, in_code_storage_version_ty) = + if let Some(v) = def.pallet_struct.storage_version.as_ref() { + ( + quote::quote! { #v }, + quote::quote! { #frame_support::traits::StorageVersion }, + ) + } else { + ( + quote::quote! { core::default::Default::default() }, + quote::quote! { #frame_support::traits::NoStorageVersionSet }, + ) + }; + + let whitelisted_storage_idents: Vec = def + .storages + .iter() + .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) + .collect(); + + let whitelisted_storage_keys_impl = quote::quote![ + use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; + impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { + fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { + use #frame_support::__private::sp_std::vec; + vec![#( + TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) + ),*] + } + } + ]; + + quote::quote_spanned!(def.pallet_struct.attr_span => + #pallet_error_metadata + + /// Type alias to `Pallet`, to be used by `construct_runtime`. + /// + /// Generated by `pallet` attribute macro. + #[deprecated(note = "use `Pallet` instead")] + #[allow(dead_code)] + pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; + + // Implement `GetStorageVersion` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::GetStorageVersion + for #pallet_ident<#type_use_gen> + #config_where_clause + { + type InCodeStorageVersion = #in_code_storage_version_ty; + + fn in_code_storage_version() -> Self::InCodeStorageVersion { + #storage_version + } + + fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { + #frame_support::traits::StorageVersion::get::() + } + } + + // Implement `OnGenesis` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::OnGenesis + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn on_genesis() { + let storage_version: #frame_support::traits::StorageVersion = #storage_version; + storage_version.put::(); + } + } + + // Implement `PalletInfoAccess` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn index() -> usize { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::index::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name_hash() -> [u8; 16] { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name_hash::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn module_name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::module_name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn crate_version() -> #frame_support::traits::CrateVersion { + #frame_support::crate_to_crate_version!() + } + } + + impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn count() -> usize { 1 } + fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { + use #frame_support::traits::PalletInfoAccess; + let item = #frame_support::traits::PalletInfoData { + index: Self::index(), + name: Self::name(), + module_name: Self::module_name(), + crate_version: Self::crate_version(), + }; + #frame_support::__private::sp_std::vec![item] + } + } + + #storage_info + #whitelisted_storage_keys_impl + ) } diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs index 937b068cf..b77e9846b 100644 --- a/support/procedural-fork/src/pallet/expand/storage.rs +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -16,14 +16,14 @@ // limitations under the License. use crate::{ - counter_prefix, - pallet::{ - parse::{ - helper::two128_str, - storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, - }, - Def, - }, + counter_prefix, + pallet::{ + parse::{ + helper::two128_str, + storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, + }, + Def, + }, }; use quote::ToTokens; use std::{collections::HashMap, ops::IndexMut}; @@ -32,73 +32,76 @@ use syn::spanned::Spanned; /// Generate the prefix_ident related to the storage. /// prefix_ident is used for the prefix struct to be given to storage as first generic param. fn prefix_ident(storage: &StorageDef) -> syn::Ident { - let storage_ident = &storage.ident; - syn::Ident::new(&format!("_GeneratedPrefixForStorage{}", storage_ident), storage_ident.span()) + let storage_ident = &storage.ident; + syn::Ident::new( + &format!("_GeneratedPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) } /// Generate the counter_prefix_ident related to the storage. /// counter_prefix_ident is used for the prefix struct to be given to counted storage map. fn counter_prefix_ident(storage_ident: &syn::Ident) -> syn::Ident { - syn::Ident::new( - &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), - storage_ident.span(), - ) + syn::Ident::new( + &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) } /// Check for duplicated storage prefixes. This step is necessary since users can specify an /// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure /// that the prefix specified by the user is not a duplicate of an existing one. fn check_prefix_duplicates( - storage_def: &StorageDef, - // A hashmap of all already used prefix and their associated error if duplication - used_prefixes: &mut HashMap, + storage_def: &StorageDef, + // A hashmap of all already used prefix and their associated error if duplication + used_prefixes: &mut HashMap, ) -> syn::Result<()> { - let prefix = storage_def.prefix(); - let dup_err = syn::Error::new( - storage_def.prefix_span(), - format!("Duplicate storage prefixes found for `{}`", prefix), - ); - - if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { - let mut err = dup_err; - err.combine(other_dup_err); - return Err(err) - } - - if let Metadata::CountedMap { .. } = storage_def.metadata { - let counter_prefix = counter_prefix(&prefix); - let counter_dup_err = syn::Error::new( - storage_def.prefix_span(), - format!( - "Duplicate storage prefixes found for `{}`, used for counter associated to \ + let prefix = storage_def.prefix(); + let dup_err = syn::Error::new( + storage_def.prefix_span(), + format!("Duplicate storage prefixes found for `{}`", prefix), + ); + + if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { + let mut err = dup_err; + err.combine(other_dup_err); + return Err(err); + } + + if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix = counter_prefix(&prefix); + let counter_dup_err = syn::Error::new( + storage_def.prefix_span(), + format!( + "Duplicate storage prefixes found for `{}`, used for counter associated to \ counted storage map", - counter_prefix, - ), - ); - - if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { - let mut err = counter_dup_err; - err.combine(other_dup_err); - return Err(err) - } - } - - Ok(()) + counter_prefix, + ), + ); + + if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { + let mut err = counter_dup_err; + err.combine(other_dup_err); + return Err(err); + } + } + + Ok(()) } pub struct ResultOnEmptyStructMetadata { - /// The Rust ident that is going to be used as the name of the OnEmpty struct. - pub name: syn::Ident, - /// The path to the error type being returned by the ResultQuery. - pub error_path: syn::Path, - /// The visibility of the OnEmpty struct. - pub visibility: syn::Visibility, - /// The type of the storage item. - pub value_ty: syn::Type, - /// The name of the pallet error enum variant that is going to be returned. - pub variant_name: syn::Ident, - /// The span used to report compilation errors about the OnEmpty struct. - pub span: proc_macro2::Span, + /// The Rust ident that is going to be used as the name of the OnEmpty struct. + pub name: syn::Ident, + /// The path to the error type being returned by the ResultQuery. + pub error_path: syn::Path, + /// The visibility of the OnEmpty struct. + pub visibility: syn::Visibility, + /// The type of the storage item. + pub value_ty: syn::Type, + /// The name of the pallet error enum variant that is going to be returned. + pub variant_name: syn::Ident, + /// The span used to report compilation errors about the OnEmpty struct. + pub span: proc_macro2::Span, } /// @@ -106,277 +109,305 @@ pub struct ResultOnEmptyStructMetadata { /// * if generics are named: reorder the generic, remove their name, and add the missing ones. /// * Add `#[allow(type_alias_bounds)]` pub fn process_generics(def: &mut Def) -> syn::Result> { - let frame_support = &def.frame_support; - let mut on_empty_struct_metadata = Vec::new(); - - for storage_def in def.storages.iter_mut() { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; - - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - - typ_item.attrs.push(syn::parse_quote!(#[allow(type_alias_bounds)])); - - let typ_path = match &mut *typ_item.ty { - syn::Type::Path(p) => p, - _ => unreachable!("Checked by def"), - }; - - let args = match &mut typ_path.path.segments[0].arguments { - syn::PathArguments::AngleBracketed(args) => args, - _ => unreachable!("Checked by def"), - }; - - let prefix_ident = prefix_ident(storage_def); - let type_use_gen = if def.config.has_instance { - quote::quote_spanned!(storage_def.attr_span => T, I) - } else { - quote::quote_spanned!(storage_def.attr_span => T) - }; - - let default_query_kind: syn::Type = - syn::parse_quote!(#frame_support::storage::types::OptionQuery); - let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { - if let Some(QueryKind::ResultQuery(error_path, variant_name)) = - storage_def.query_kind.as_ref() - { - let on_empty_ident = - quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); - on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { - name: on_empty_ident.clone(), - visibility: storage_def.vis.clone(), - value_ty, - error_path: error_path.clone(), - variant_name: variant_name.clone(), - span: storage_def.attr_span, - }); - return syn::parse_quote!(#on_empty_ident) - } - syn::parse_quote!(#frame_support::traits::GetDefault) - }; - let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); - - let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { - if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { - if let syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) = - query_type - { - if let Some(seg) = segments.last_mut() { - if let syn::PathArguments::AngleBracketed( - syn::AngleBracketedGenericArguments { args, .. }, - ) = &mut seg.arguments - { - args.clear(); - args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); - } - } - } else { - let msg = format!( - "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ + let frame_support = &def.frame_support; + let mut on_empty_struct_metadata = Vec::new(); + + for storage_def in def.storages.iter_mut() { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; + + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + + typ_item + .attrs + .push(syn::parse_quote!(#[allow(type_alias_bounds)])); + + let typ_path = match &mut *typ_item.ty { + syn::Type::Path(p) => p, + _ => unreachable!("Checked by def"), + }; + + let args = match &mut typ_path.path.segments[0].arguments { + syn::PathArguments::AngleBracketed(args) => args, + _ => unreachable!("Checked by def"), + }; + + let prefix_ident = prefix_ident(storage_def); + let type_use_gen = if def.config.has_instance { + quote::quote_spanned!(storage_def.attr_span => T, I) + } else { + quote::quote_spanned!(storage_def.attr_span => T) + }; + + let default_query_kind: syn::Type = + syn::parse_quote!(#frame_support::storage::types::OptionQuery); + let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { + if let Some(QueryKind::ResultQuery(error_path, variant_name)) = + storage_def.query_kind.as_ref() + { + let on_empty_ident = + quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); + on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { + name: on_empty_ident.clone(), + visibility: storage_def.vis.clone(), + value_ty, + error_path: error_path.clone(), + variant_name: variant_name.clone(), + span: storage_def.attr_span, + }); + return syn::parse_quote!(#on_empty_ident); + } + syn::parse_quote!(#frame_support::traits::GetDefault) + }; + let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); + + let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { + if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { + if let syn::Type::Path(syn::TypePath { + path: syn::Path { segments, .. }, + .. + }) = query_type + { + if let Some(seg) = segments.last_mut() { + if let syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { args, .. }, + ) = &mut seg.arguments + { + args.clear(); + args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); + } + } + } else { + let msg = format!( + "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ with 1 type parameter, found `{}`", - query_type.to_token_stream().to_string() - ); - return Err(syn::Error::new(query_type.span(), msg)) - } - } - Ok(()) - }; - - if let Some(named_generics) = storage_def.named_generics.clone() { - args.args.clear(); - args.args.push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); - match named_generics { - StorageGenerics::Value { value, query_kind, on_empty } => { - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - }, - StorageGenerics::Map { hasher, key, value, query_kind, on_empty, max_values } | - StorageGenerics::CountedMap { - hasher, - key, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher)); - args.args.push(syn::GenericArgument::Type(key)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - StorageGenerics::DoubleMap { - hasher1, - key1, - hasher2, - key2, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher1)); - args.args.push(syn::GenericArgument::Type(key1)); - args.args.push(syn::GenericArgument::Type(hasher2)); - args.args.push(syn::GenericArgument::Type(key2)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values } | - StorageGenerics::CountedNMap { - keygen, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(keygen)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - } - } else { - args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); - - let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { - Metadata::Value { .. } => (1, 2, 3), - Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), - Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), - Metadata::DoubleMap { .. } => (5, 6, 7), - }; - - if storage_def.use_default_hasher { - let hasher_indices: Vec = match storage_def.metadata { - Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], - Metadata::DoubleMap { .. } => vec![1, 3], - _ => vec![], - }; - for hasher_idx in hasher_indices { - args.args[hasher_idx] = syn::GenericArgument::Type( - syn::parse_quote!(#frame_support::Blake2_128Concat), - ); - } - } - - if query_idx < args.args.len() { - if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { - set_result_query_type_parameter(query_kind)?; - } - } else if let Some(QueryKind::ResultQuery(error_path, _)) = - storage_def.query_kind.as_ref() - { - args.args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) - } - - // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to - // generate a default OnEmpty struct for it. - if on_empty_idx >= args.args.len() && - matches!(storage_def.query_kind.as_ref(), Some(QueryKind::ResultQuery(_, _))) - { - let value_ty = match args.args[value_idx].clone() { - syn::GenericArgument::Type(ty) => ty, - _ => unreachable!(), - }; - let on_empty = default_on_empty(value_ty); - args.args.push(syn::GenericArgument::Type(on_empty)); - } - } - } - - Ok(on_empty_struct_metadata) + query_type.to_token_stream().to_string() + ); + return Err(syn::Error::new(query_type.span(), msg)); + } + } + Ok(()) + }; + + if let Some(named_generics) = storage_def.named_generics.clone() { + args.args.clear(); + args.args + .push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); + match named_generics { + StorageGenerics::Value { + value, + query_kind, + on_empty, + } => { + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + StorageGenerics::Map { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } + | StorageGenerics::CountedMap { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher)); + args.args.push(syn::GenericArgument::Type(key)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + StorageGenerics::DoubleMap { + hasher1, + key1, + hasher2, + key2, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher1)); + args.args.push(syn::GenericArgument::Type(key1)); + args.args.push(syn::GenericArgument::Type(hasher2)); + args.args.push(syn::GenericArgument::Type(key2)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + StorageGenerics::NMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } + | StorageGenerics::CountedNMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(keygen)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + } + } else { + args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); + + let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { + Metadata::Value { .. } => (1, 2, 3), + Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), + Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), + Metadata::DoubleMap { .. } => (5, 6, 7), + }; + + if storage_def.use_default_hasher { + let hasher_indices: Vec = match storage_def.metadata { + Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], + Metadata::DoubleMap { .. } => vec![1, 3], + _ => vec![], + }; + for hasher_idx in hasher_indices { + args.args[hasher_idx] = syn::GenericArgument::Type( + syn::parse_quote!(#frame_support::Blake2_128Concat), + ); + } + } + + if query_idx < args.args.len() { + if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { + set_result_query_type_parameter(query_kind)?; + } + } else if let Some(QueryKind::ResultQuery(error_path, _)) = + storage_def.query_kind.as_ref() + { + args.args + .push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) + } + + // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to + // generate a default OnEmpty struct for it. + if on_empty_idx >= args.args.len() + && matches!( + storage_def.query_kind.as_ref(), + Some(QueryKind::ResultQuery(_, _)) + ) + { + let value_ty = match args.args[value_idx].clone() { + syn::GenericArgument::Type(ty) => ty, + _ => unreachable!(), + }; + let on_empty = default_on_empty(value_ty); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + } + } + + Ok(on_empty_struct_metadata) } fn augment_final_docs(def: &mut Def) { - // expand the docs with a new line showing the storage type (value, map, double map, etc), and - // the key/value type(s). - let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); - typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); - }; - def.storages.iter_mut().for_each(|storage| match &storage.metadata { - Metadata::Value { value } => { - let doc_line = format!( - "Storage type is [`StorageValue`] with value type `{}`.", - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::Map { key, value } => { - let doc_line = format!( - "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::DoubleMap { key1, key2, value } => { - let doc_line = format!( + // expand the docs with a new line showing the storage type (value, map, double map, etc), and + // the key/value type(s). + let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); + typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); + }; + def.storages + .iter_mut() + .for_each(|storage| match &storage.metadata { + Metadata::Value { value } => { + let doc_line = format!( + "Storage type is [`StorageValue`] with value type `{}`.", + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::Map { key, value } => { + let doc_line = format!( + "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::DoubleMap { key1, key2, value } => { + let doc_line = format!( "Storage type is [`StorageDoubleMap`] with key1 type {}, key2 type {} and value type {}.", key1.to_token_stream(), key2.to_token_stream(), value.to_token_stream() ); - push_string_literal(&doc_line, storage); - }, - Metadata::NMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::CountedNMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::CountedMap { key, value } => { - let doc_line = format!( - "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - }); + push_string_literal(&doc_line, storage); + } + Metadata::NMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::CountedNMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::CountedMap { key, value } => { + let doc_line = format!( + "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + }); } /// @@ -387,29 +418,29 @@ fn augment_final_docs(def: &mut Def) { /// * Add `#[allow(type_alias_bounds)]` on storages type alias /// * generate metadatas pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { - let on_empty_struct_metadata = match process_generics(def) { - Ok(idents) => idents, - Err(e) => return e.into_compile_error(), - }; - - augment_final_docs(def); - - // Check for duplicate prefixes - let mut prefix_set = HashMap::new(); - let mut errors = def - .storages - .iter() - .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); - if let Some(mut final_error) = errors.next() { - errors.for_each(|error| final_error.combine(error)); - return final_error.into_compile_error() - } - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let pallet_ident = &def.pallet_struct.pallet; - - let entries_builder = def.storages.iter().map(|storage| { + let on_empty_struct_metadata = match process_generics(def) { + Ok(idents) => idents, + Err(e) => return e.into_compile_error(), + }; + + augment_final_docs(def); + + // Check for duplicate prefixes + let mut prefix_set = HashMap::new(); + let mut errors = def + .storages + .iter() + .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); + if let Some(mut final_error) = errors.next() { + errors.for_each(|error| final_error.combine(error)); + return final_error.into_compile_error(); + } + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let pallet_ident = &def.pallet_struct.pallet; + + let entries_builder = def.storages.iter().map(|storage| { let no_docs = vec![]; let docs = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &storage.docs }; @@ -432,202 +463,202 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let getters = def.storages.iter().map(|storage| { - if let Some(getter) = &storage.getter { - let completed_where_clause = - super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); - - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - let type_impl_gen = &def.type_impl_generics(storage.attr_span); - let type_use_gen = &def.type_use_generics(storage.attr_span); - let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); - - let cfg_attrs = &storage.cfg_attrs; - - // If the storage item is public, link it and otherwise just mention it. - // - // We can not just copy the docs from a non-public type as it may links to internal - // types which makes the compiler very unhappy :( - let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { - format!("An auto-generated getter for [`{}`].", storage.ident) - } else { - format!("An auto-generated getter for `{}`.", storage.ident) - }; - - match &storage.metadata { - Metadata::Value { value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter() -> #query { - < - #full_ident as #frame_support::storage::StorageValue<#value> - >::get() - } - } - ) - }, - Metadata::Map { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - < - #full_ident as #frame_support::storage::StorageMap<#key, #value> - >::get(k) - } - } - ) - }, - Metadata::CountedMap { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - // NOTE: we can't use any trait here because CountedStorageMap - // doesn't implement any. - <#full_ident>::get(k) - } - } - ) - }, - Metadata::DoubleMap { key1, key2, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k1: KArg1, k2: KArg2) -> #query where - KArg1: #frame_support::__private::codec::EncodeLike<#key1>, - KArg2: #frame_support::__private::codec::EncodeLike<#key2>, - { - < - #full_ident as - #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> - >::get(k1, k2) - } - } - ) - }, - Metadata::NMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - < - #full_ident as - #frame_support::storage::StorageNMap<#keygen, #value> - >::get(key) - } - } - ) - }, - Metadata::CountedNMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - // NOTE: we can't use any trait here because CountedStorageNMap - // doesn't implement any. - <#full_ident>::get(key) - } - } - ) - }, - } - } else { - Default::default() - } - }); - - let prefix_structs = def.storages.iter().map(|storage_def| { + let getters = def.storages.iter().map(|storage| { + if let Some(getter) = &storage.getter { + let completed_where_clause = + super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); + + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let type_impl_gen = &def.type_impl_generics(storage.attr_span); + let type_use_gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + + let cfg_attrs = &storage.cfg_attrs; + + // If the storage item is public, link it and otherwise just mention it. + // + // We can not just copy the docs from a non-public type as it may links to internal + // types which makes the compiler very unhappy :( + let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { + format!("An auto-generated getter for [`{}`].", storage.ident) + } else { + format!("An auto-generated getter for `{}`.", storage.ident) + }; + + match &storage.metadata { + Metadata::Value { value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter() -> #query { + < + #full_ident as #frame_support::storage::StorageValue<#value> + >::get() + } + } + ) + } + Metadata::Map { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + < + #full_ident as #frame_support::storage::StorageMap<#key, #value> + >::get(k) + } + } + ) + } + Metadata::CountedMap { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + // NOTE: we can't use any trait here because CountedStorageMap + // doesn't implement any. + <#full_ident>::get(k) + } + } + ) + } + Metadata::DoubleMap { key1, key2, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k1: KArg1, k2: KArg2) -> #query where + KArg1: #frame_support::__private::codec::EncodeLike<#key1>, + KArg2: #frame_support::__private::codec::EncodeLike<#key2>, + { + < + #full_ident as + #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> + >::get(k1, k2) + } + } + ) + } + Metadata::NMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + < + #full_ident as + #frame_support::storage::StorageNMap<#keygen, #value> + >::get(key) + } + } + ) + } + Metadata::CountedNMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + // NOTE: we can't use any trait here because CountedStorageNMap + // doesn't implement any. + <#full_ident>::get(key) + } + } + ) + } + } + } else { + Default::default() + } + }); + + let prefix_structs = def.storages.iter().map(|storage_def| { let type_impl_gen = &def.type_impl_generics(storage_def.attr_span); let type_use_gen = &def.type_use_generics(storage_def.attr_span); let prefix_struct_ident = prefix_ident(storage_def); @@ -767,153 +798,159 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { - use crate::pallet::parse::GenericKind; - use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; - - let ResultOnEmptyStructMetadata { - name, - visibility, - value_ty, - error_path, - variant_name, - span, - } = metadata; - - let generic_kind = match error_path.segments.last() { - Some(PathSegment { arguments: PathArguments::AngleBracketed(args), .. }) => { - let (has_config, has_instance) = - args.args.iter().fold((false, false), |(has_config, has_instance), arg| { - match arg { - GenericArgument::Type(Type::Path(TypePath { - path: Path { segments, .. }, - .. - })) => { - let maybe_config = - segments.first().map_or(false, |seg| seg.ident == "T"); - let maybe_instance = - segments.first().map_or(false, |seg| seg.ident == "I"); - - (has_config || maybe_config, has_instance || maybe_instance) - }, - _ => (has_config, has_instance), - } - }); - GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) - }, - _ => GenericKind::None, - }; - let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); - let config_where_clause = &def.config.where_clause; - - quote::quote_spanned!(span => - #[doc(hidden)] - #[allow(non_camel_case_types)] - #visibility struct #name; - - impl<#type_impl_gen> #frame_support::traits::Get> - for #name - #config_where_clause - { - fn get() -> Result<#value_ty, #error_path> { - Err(<#error_path>::#variant_name) - } - } - ) - }); - - // aggregated where clause of all storage types and the whole pallet. - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - - let try_decode_entire_state = { - let mut storage_names = def - .storages - .iter() - .filter_map(|storage| { - // A little hacky; don't generate for cfg gated storages to not get compile errors - // when building "frame-feature-testing" gated storages in the "frame-support-test" - // crate. - if storage.try_decode && storage.cfg_attrs.is_empty() { - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) - } else { - None - } - }) - .collect::>(); - storage_names.sort_by_cached_key(|ident| ident.to_string()); - - quote::quote!( - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage - for #pallet_ident<#type_use_gen> #completed_where_clause - { - fn try_decode_entire_state() -> Result> { - let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> - ::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed"); - - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); - - // NOTE: for now, we have to exclude storage items that are feature gated. - let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); - let mut decoded = 0usize; - - #( - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ - {pallet_name}::{}", stringify!(#storage_names)); - - match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { - Ok(count) => { - decoded += count; - }, - Err(err) => { - errors.extend(err); - }, - } - )* - - if errors.is_empty() { - Ok(decoded) - } else { - Err(errors) - } - } - } - ) - }; - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> - #completed_where_clause - { - #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - prefix: < - ::PalletInfo as - #frame_support::traits::PalletInfo - >::name::<#pallet_ident<#type_use_gen>>() - .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), - entries: { - #[allow(unused_mut)] - let mut entries = #frame_support::__private::sp_std::vec![]; - #( #entries_builder )* - entries - }, - } - } - } - - #( #getters )* - #( #prefix_structs )* - #( #on_empty_structs )* - - #try_decode_entire_state - ) + let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { + use crate::pallet::parse::GenericKind; + use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; + + let ResultOnEmptyStructMetadata { + name, + visibility, + value_ty, + error_path, + variant_name, + span, + } = metadata; + + let generic_kind = match error_path.segments.last() { + Some(PathSegment { + arguments: PathArguments::AngleBracketed(args), + .. + }) => { + let (has_config, has_instance) = + args.args + .iter() + .fold( + (false, false), + |(has_config, has_instance), arg| match arg { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments, .. }, + .. + })) => { + let maybe_config = + segments.first().map_or(false, |seg| seg.ident == "T"); + let maybe_instance = + segments.first().map_or(false, |seg| seg.ident == "I"); + + (has_config || maybe_config, has_instance || maybe_instance) + } + _ => (has_config, has_instance), + }, + ); + GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) + } + _ => GenericKind::None, + }; + let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); + let config_where_clause = &def.config.where_clause; + + quote::quote_spanned!(span => + #[doc(hidden)] + #[allow(non_camel_case_types)] + #visibility struct #name; + + impl<#type_impl_gen> #frame_support::traits::Get> + for #name + #config_where_clause + { + fn get() -> Result<#value_ty, #error_path> { + Err(<#error_path>::#variant_name) + } + } + ) + }); + + // aggregated where clause of all storage types and the whole pallet. + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + + let try_decode_entire_state = { + let mut storage_names = def + .storages + .iter() + .filter_map(|storage| { + // A little hacky; don't generate for cfg gated storages to not get compile errors + // when building "frame-feature-testing" gated storages in the "frame-support-test" + // crate. + if storage.try_decode && storage.cfg_attrs.is_empty() { + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) + } else { + None + } + }) + .collect::>(); + storage_names.sort_by_cached_key(|ident| ident.to_string()); + + quote::quote!( + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage + for #pallet_ident<#type_use_gen> #completed_where_clause + { + fn try_decode_entire_state() -> Result> { + let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> + ::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"); + + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); + + // NOTE: for now, we have to exclude storage items that are feature gated. + let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); + let mut decoded = 0usize; + + #( + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ + {pallet_name}::{}", stringify!(#storage_names)); + + match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { + Ok(count) => { + decoded += count; + }, + Err(err) => { + errors.extend(err); + }, + } + )* + + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) + } + } + } + ) + }; + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> + #completed_where_clause + { + #[doc(hidden)] + pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), + entries: { + #[allow(unused_mut)] + let mut entries = #frame_support::__private::sp_std::vec![]; + #( #entries_builder )* + entries + }, + } + } + } + + #( #getters )* + #( #prefix_structs )* + #( #on_empty_structs )* + + #try_decode_entire_state + ) } diff --git a/support/procedural-fork/src/pallet/expand/tasks.rs b/support/procedural-fork/src/pallet/expand/tasks.rs index 6697e5c82..8c4dfb54f 100644 --- a/support/procedural-fork/src/pallet/expand/tasks.rs +++ b/support/procedural-fork/src/pallet/expand/tasks.rs @@ -27,141 +27,145 @@ use quote::{format_ident, quote, ToTokens}; use syn::{parse_quote, spanned::Spanned, ItemEnum, ItemImpl}; impl TaskEnumDef { - /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the - /// event they _don't_ specify one (which is actually the most common behavior) we have to - /// generate one based on the existing [`TasksDef`]. This method performs that generation. - pub fn generate( - tasks: &TasksDef, - type_decl_bounded_generics: TokenStream2, - type_use_generics: TokenStream2, - ) -> Self { - let variants = if tasks.tasks_attr.is_some() { - tasks - .tasks - .iter() - .map(|task| { - let ident = &task.item.sig.ident; - let ident = - format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); + /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the + /// event they _don't_ specify one (which is actually the most common behavior) we have to + /// generate one based on the existing [`TasksDef`]. This method performs that generation. + pub fn generate( + tasks: &TasksDef, + type_decl_bounded_generics: TokenStream2, + type_use_generics: TokenStream2, + ) -> Self { + let variants = if tasks.tasks_attr.is_some() { + tasks + .tasks + .iter() + .map(|task| { + let ident = &task.item.sig.ident; + let ident = + format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); - let args = task.item.sig.inputs.iter().collect::>(); + let args = task.item.sig.inputs.iter().collect::>(); - if args.is_empty() { - quote!(#ident) - } else { - quote!(#ident { - #(#args),* - }) - } - }) - .collect::>() - } else { - Vec::new() - }; - let mut task_enum_def: TaskEnumDef = parse_quote! { - /// Auto-generated enum that encapsulates all tasks defined by this pallet. - /// - /// Conceptually similar to the [`Call`] enum, but for tasks. This is only - /// generated if there are tasks present in this pallet. - #[pallet::task_enum] - pub enum Task<#type_decl_bounded_generics> { - #( - #variants, - )* - } - }; - task_enum_def.type_use_generics = type_use_generics; - task_enum_def - } + if args.is_empty() { + quote!(#ident) + } else { + quote!(#ident { + #(#args),* + }) + } + }) + .collect::>() + } else { + Vec::new() + }; + let mut task_enum_def: TaskEnumDef = parse_quote! { + /// Auto-generated enum that encapsulates all tasks defined by this pallet. + /// + /// Conceptually similar to the [`Call`] enum, but for tasks. This is only + /// generated if there are tasks present in this pallet. + #[pallet::task_enum] + pub enum Task<#type_decl_bounded_generics> { + #( + #variants, + )* + } + }; + task_enum_def.type_use_generics = type_use_generics; + task_enum_def + } } impl ToTokens for TaskEnumDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let item_enum = &self.item_enum; - let ident = &item_enum.ident; - let vis = &item_enum.vis; - let attrs = &item_enum.attrs; - let generics = &item_enum.generics; - let variants = &item_enum.variants; - let scrate = &self.scrate; - let type_use_generics = &self.type_use_generics; - if self.attr.is_some() { - // `item_enum` is short-hand / generated enum - tokens.extend(quote! { - #(#attrs)* - #[derive( - #scrate::CloneNoBound, - #scrate::EqNoBound, - #scrate::PartialEqNoBound, - #scrate::pallet_prelude::Encode, - #scrate::pallet_prelude::Decode, - #scrate::pallet_prelude::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_generics))] - #vis enum #ident #generics { - #variants - #[doc(hidden)] - #[codec(skip)] - __Ignore(core::marker::PhantomData, #scrate::Never), - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let item_enum = &self.item_enum; + let ident = &item_enum.ident; + let vis = &item_enum.vis; + let attrs = &item_enum.attrs; + let generics = &item_enum.generics; + let variants = &item_enum.variants; + let scrate = &self.scrate; + let type_use_generics = &self.type_use_generics; + if self.attr.is_some() { + // `item_enum` is short-hand / generated enum + tokens.extend(quote! { + #(#attrs)* + #[derive( + #scrate::CloneNoBound, + #scrate::EqNoBound, + #scrate::PartialEqNoBound, + #scrate::pallet_prelude::Encode, + #scrate::pallet_prelude::Decode, + #scrate::pallet_prelude::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_generics))] + #vis enum #ident #generics { + #variants + #[doc(hidden)] + #[codec(skip)] + __Ignore(core::marker::PhantomData, #scrate::Never), + } - impl core::fmt::Debug for #ident<#type_use_generics> { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct(stringify!(#ident)).field("value", self).finish() - } - } - }); - } else { - // `item_enum` is a manually specified enum (no attribute) - tokens.extend(item_enum.to_token_stream()); - } - } + impl core::fmt::Debug for #ident<#type_use_generics> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct(stringify!(#ident)).field("value", self).finish() + } + } + }); + } else { + // `item_enum` is a manually specified enum (no attribute) + tokens.extend(item_enum.to_token_stream()); + } + } } /// Represents an already-expanded [`TasksDef`]. #[derive(Parse)] pub struct ExpandedTasksDef { - pub task_item_impl: ItemImpl, - pub task_trait_impl: ItemImpl, + pub task_item_impl: ItemImpl, + pub task_trait_impl: ItemImpl, } impl ToTokens for TasksDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let scrate = &self.scrate; - let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); - let enum_arguments = &self.enum_arguments; - let enum_use = quote!(#enum_ident #enum_arguments); + fn to_tokens(&self, tokens: &mut TokenStream2) { + let scrate = &self.scrate; + let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); + let enum_arguments = &self.enum_arguments; + let enum_use = quote!(#enum_ident #enum_arguments); - let task_fn_idents = self - .tasks - .iter() - .map(|task| { - format_ident!( - "{}", - &task.item.sig.ident.to_string().to_class_case(), - span = task.item.sig.ident.span() - ) - }) - .collect::>(); - let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); - let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); - let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); - let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); + let task_fn_idents = self + .tasks + .iter() + .map(|task| { + format_ident!( + "{}", + &task.item.sig.ident.to_string().to_class_case(), + span = task.item.sig.ident.span() + ) + }) + .collect::>(); + let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); + let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); + let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); + let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); - let task_fn_impls = self.tasks.iter().map(|task| { - let mut task_fn_impl = task.item.clone(); - task_fn_impl.attrs = vec![]; - task_fn_impl - }); + let task_fn_impls = self.tasks.iter().map(|task| { + let mut task_fn_impl = task.item.clone(); + task_fn_impl.attrs = vec![]; + task_fn_impl + }); - let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); - let task_arg_names = self.tasks.iter().map(|task| &task.arg_names).collect::>(); + let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); + let task_arg_names = self + .tasks + .iter() + .map(|task| &task.arg_names) + .collect::>(); - let sp_std = quote!(#scrate::__private::sp_std); - let impl_generics = &self.item_impl.generics; - tokens.extend(quote! { + let sp_std = quote!(#scrate::__private::sp_std); + let impl_generics = &self.item_impl.generics; + tokens.extend(quote! { impl #impl_generics #enum_use { #(#task_fn_impls)* @@ -212,56 +216,66 @@ impl ToTokens for TasksDef { } } }); - } + } } /// Expands the [`TasksDef`] in the enclosing [`Def`], if present, and returns its tokens. /// /// This modifies the underlying [`Def`] in addition to returning any tokens that were added. pub fn expand_tasks_impl(def: &mut Def) -> TokenStream2 { - let Some(tasks) = &mut def.tasks else { return quote!() }; - let ExpandedTasksDef { task_item_impl, task_trait_impl } = parse_quote!(#tasks); - quote! { - #task_item_impl - #task_trait_impl - } + let Some(tasks) = &mut def.tasks else { + return quote!(); + }; + let ExpandedTasksDef { + task_item_impl, + task_trait_impl, + } = parse_quote!(#tasks); + quote! { + #task_item_impl + #task_trait_impl + } } /// Represents a fully-expanded [`TaskEnumDef`]. #[derive(Parse)] pub struct ExpandedTaskEnum { - pub item_enum: ItemEnum, - pub debug_impl: ItemImpl, + pub item_enum: ItemEnum, + pub debug_impl: ItemImpl, } /// Modifies a [`Def`] to expand the underlying [`TaskEnumDef`] if present, and also returns /// its tokens. A blank [`TokenStream2`] is returned if no [`TaskEnumDef`] has been generated /// or defined. pub fn expand_task_enum(def: &mut Def) -> TokenStream2 { - let Some(task_enum) = &mut def.task_enum else { return quote!() }; - let ExpandedTaskEnum { item_enum, debug_impl } = parse_quote!(#task_enum); - quote! { - #item_enum - #debug_impl - } + let Some(task_enum) = &mut def.task_enum else { + return quote!(); + }; + let ExpandedTaskEnum { + item_enum, + debug_impl, + } = parse_quote!(#task_enum); + quote! { + #item_enum + #debug_impl + } } /// Modifies a [`Def`] to expand the underlying [`TasksDef`] and also generate a /// [`TaskEnumDef`] if applicable. The tokens for these items are returned if they are created. pub fn expand_tasks(def: &mut Def) -> TokenStream2 { - if let Some(tasks_def) = &def.tasks { - if def.task_enum.is_none() { - def.task_enum = Some(TaskEnumDef::generate( - &tasks_def, - def.type_decl_bounded_generics(tasks_def.item_impl.span()), - def.type_use_generics(tasks_def.item_impl.span()), - )); - } - } - let tasks_extra_output = expand_tasks_impl(def); - let task_enum_extra_output = expand_task_enum(def); - quote! { - #tasks_extra_output - #task_enum_extra_output - } + if let Some(tasks_def) = &def.tasks { + if def.task_enum.is_none() { + def.task_enum = Some(TaskEnumDef::generate( + &tasks_def, + def.type_decl_bounded_generics(tasks_def.item_impl.span()), + def.type_use_generics(tasks_def.item_impl.span()), + )); + } + } + let tasks_extra_output = expand_tasks_impl(def); + let task_enum_extra_output = expand_task_enum(def); + quote! { + #tasks_extra_output + #task_enum_extra_output + } } diff --git a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs index 99364aaa9..57b78339a 100644 --- a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs +++ b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs @@ -16,201 +16,211 @@ // limitations under the License. use crate::{ - pallet::{CompositeKeyword, Def}, - COUNTER, + pallet::{CompositeKeyword, Def}, + COUNTER, }; use syn::spanned::Spanned; /// Generate the `tt_default_parts` macro. pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let default_parts_unique_id = - syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); - let extra_parts_unique_id = - syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); - let default_parts_unique_id_v2 = - syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); - - let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); - - let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); - - let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); - - let event_part = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); - quote::quote!( Event #gen , ) - }); - - let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); - - let origin_part = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!( )); - quote::quote!( Origin #gen , ) - }); - - let config_part = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!( )); - quote::quote!( Config #gen , ) - }); - - let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); - - let validate_unsigned_part = - def.validate_unsigned.as_ref().map(|_| quote::quote!(ValidateUnsigned,)); - - let freeze_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(FreezeReason,)); - - let hold_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(HoldReason,)); - - let lock_id_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(LockId,)); - - let slash_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(SlashReason,)); - - let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); - - let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); - - let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); - - let event_part_v2 = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!()); - quote::quote!(+ Event #gen) - }); - - let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); - - let origin_part_v2 = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!()); - quote::quote!(+ Origin #gen) - }); - - let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!()); - quote::quote!(+ Config #gen) - }); - - let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); - - let validate_unsigned_part_v2 = - def.validate_unsigned.as_ref().map(|_| quote::quote!(+ ValidateUnsigned)); - - let freeze_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(+ FreezeReason)); - - let hold_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(+ HoldReason)); - - let lock_id_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(+ LockId)); - - let slash_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(+ SlashReason)); - - quote::quote!( - // This macro follows the conventions as laid out by the `tt-call` crate. It does not - // accept any arguments and simply returns the pallet parts, separated by commas, then - // wrapped inside of braces and finally prepended with double colons, to the caller inside - // of a key named `tokens`. - // - // We need to accept a path argument here, because this macro gets expanded on the - // crate that called the `construct_runtime!` macro, and the actual path is unknown. - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part - #inherent_part #validate_unsigned_part #freeze_reason_part #task_part - #hold_reason_part #lock_id_part #slash_reason_part - } - }] - } - }; - } - - pub use #default_parts_unique_id as tt_default_parts; - - - // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared - // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. - // - // For example, after expansion an explicit pallet would look like: - // `System: expanded::{Error} ::{Pallet, Call}`. - // - // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. - #[macro_export] - #[doc(hidden)] - macro_rules! #extra_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - #error_part - } - }] - } - }; - } - - pub use #extra_parts_unique_id as tt_extra_parts; - - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id_v2 { - { - $caller:tt - frame_support = [{ $($frame_support:ident)::* }] - } => { - $($frame_support)*::__private::tt_return! { - $caller - tokens = [{ - + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 - #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 - #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 - }] - } - }; - } - - pub use #default_parts_unique_id_v2 as tt_default_parts_v2; - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let default_parts_unique_id = + syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); + let extra_parts_unique_id = + syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); + let default_parts_unique_id_v2 = + syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); + + let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); + + let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); + + let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); + + let event_part = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Event #gen , ) + }); + + let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); + + let origin_part = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!( )); + quote::quote!( Origin #gen , ) + }); + + let config_part = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config + .gen_kind + .is_generic() + .then(|| quote::quote!( )); + quote::quote!( Config #gen , ) + }); + + let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); + + let validate_unsigned_part = def + .validate_unsigned + .as_ref() + .map(|_| quote::quote!(ValidateUnsigned,)); + + let freeze_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(FreezeReason,)); + + let hold_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(HoldReason,)); + + let lock_id_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(LockId,)); + + let slash_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(SlashReason,)); + + let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); + + let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); + + let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); + + let event_part_v2 = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Event #gen) + }); + + let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); + + let origin_part_v2 = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!()); + quote::quote!(+ Origin #gen) + }); + + let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config + .gen_kind + .is_generic() + .then(|| quote::quote!()); + quote::quote!(+ Config #gen) + }); + + let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); + + let validate_unsigned_part_v2 = def + .validate_unsigned + .as_ref() + .map(|_| quote::quote!(+ ValidateUnsigned)); + + let freeze_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(+ FreezeReason)); + + let hold_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(+ HoldReason)); + + let lock_id_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(+ LockId)); + + let slash_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(+ SlashReason)); + + quote::quote!( + // This macro follows the conventions as laid out by the `tt-call` crate. It does not + // accept any arguments and simply returns the pallet parts, separated by commas, then + // wrapped inside of braces and finally prepended with double colons, to the caller inside + // of a key named `tokens`. + // + // We need to accept a path argument here, because this macro gets expanded on the + // crate that called the `construct_runtime!` macro, and the actual path is unknown. + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part + #inherent_part #validate_unsigned_part #freeze_reason_part #task_part + #hold_reason_part #lock_id_part #slash_reason_part + } + }] + } + }; + } + + pub use #default_parts_unique_id as tt_default_parts; + + + // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared + // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. + // + // For example, after expansion an explicit pallet would look like: + // `System: expanded::{Error} ::{Pallet, Call}`. + // + // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. + #[macro_export] + #[doc(hidden)] + macro_rules! #extra_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + #error_part + } + }] + } + }; + } + + pub use #extra_parts_unique_id as tt_extra_parts; + + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id_v2 { + { + $caller:tt + frame_support = [{ $($frame_support:ident)::* }] + } => { + $($frame_support)*::__private::tt_return! { + $caller + tokens = [{ + + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 + #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 + #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 + }] + } + }; + } + + pub use #default_parts_unique_id_v2 as tt_default_parts_v2; + ) } diff --git a/support/procedural-fork/src/pallet/expand/type_value.rs b/support/procedural-fork/src/pallet/expand/type_value.rs index 5dc6309c0..84db3e431 100644 --- a/support/procedural-fork/src/pallet/expand/type_value.rs +++ b/support/procedural-fork/src/pallet/expand/type_value.rs @@ -22,56 +22,56 @@ use crate::pallet::Def; /// * implement the `Get<..>` on it /// * Rename the name of the function to internal name pub fn expand_type_values(def: &mut Def) -> proc_macro2::TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for type_value in &def.type_values { - let fn_name_str = &type_value.ident.to_string(); - let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); - let fn_ident_renamed = syn::Ident::new( - &format!("__type_value_for_{}", fn_name_snakecase), - type_value.ident.span(), - ); + for type_value in &def.type_values { + let fn_name_str = &type_value.ident.to_string(); + let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); + let fn_ident_renamed = syn::Ident::new( + &format!("__type_value_for_{}", fn_name_snakecase), + type_value.ident.span(), + ); - let type_value_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; - if let syn::Item::Fn(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; + let type_value_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; + if let syn::Item::Fn(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; - // Rename the type_value function name - type_value_item.sig.ident = fn_ident_renamed.clone(); + // Rename the type_value function name + type_value_item.sig.ident = fn_ident_renamed.clone(); - let vis = &type_value.vis; - let ident = &type_value.ident; - let type_ = &type_value.type_; - let where_clause = &type_value.where_clause; + let vis = &type_value.vis; + let ident = &type_value.ident; + let type_ = &type_value.type_; + let where_clause = &type_value.where_clause; - let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { - ( - def.type_impl_generics(type_value.attr_span), - def.type_use_generics(type_value.attr_span), - ) - } else { - (Default::default(), Default::default()) - }; + let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { + ( + def.type_impl_generics(type_value.attr_span), + def.type_use_generics(type_value.attr_span), + ) + } else { + (Default::default(), Default::default()) + }; - let docs = &type_value.docs; + let docs = &type_value.docs; - expand.extend(quote::quote_spanned!(type_value.attr_span => - #( #[doc = #docs] )* - #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); - impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> - #where_clause - { - fn get() -> #type_ { - #fn_ident_renamed::<#struct_use_gen>() - } - } - )); - } - expand + expand.extend(quote::quote_spanned!(type_value.attr_span => + #( #[doc = #docs] )* + #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); + impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> + #where_clause + { + fn get() -> #type_ { + #fn_ident_renamed::<#struct_use_gen>() + } + } + )); + } + expand } diff --git a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs index 876995585..28c78a1c6 100644 --- a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs @@ -21,36 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = - Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_validate_unsigned_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.validate_unsigned.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ - remove `ValidateUnsigned` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.validate_unsigned.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ + remove `ValidateUnsigned` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_validate_unsigned_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_validate_unsigned_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/warnings.rs b/support/procedural-fork/src/pallet/expand/warnings.rs index 030e3ddaf..3d71b83af 100644 --- a/support/procedural-fork/src/pallet/expand/warnings.rs +++ b/support/procedural-fork/src/pallet/expand/warnings.rs @@ -20,79 +20,84 @@ use crate::pallet::parse::call::{CallVariantDef, CallWeightDef}; use proc_macro_warning::Warning; use syn::{ - spanned::Spanned, - visit::{self, Visit}, + spanned::Spanned, + visit::{self, Visit}, }; /// Warn if any of the call arguments starts with a underscore and is used in a weight formula. pub(crate) fn weight_witness_warning( - method: &CallVariantDef, - dev_mode: bool, - warnings: &mut Vec, + method: &CallVariantDef, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return - } - let CallWeightDef::Immediate(w) = &method.weight else { return }; + if dev_mode { + return; + } + let CallWeightDef::Immediate(w) = &method.weight else { + return; + }; - let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") - .old("not check weight witness data") - .new("ensure that all witness data for weight calculation is checked before usage") - .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); + let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") + .old("not check weight witness data") + .new("ensure that all witness data for weight calculation is checked before usage") + .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); - for (_, arg_ident, _) in method.args.iter() { - if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { - continue - } + for (_, arg_ident, _) in method.args.iter() { + if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { + continue; + } - let warning = partial_warning - .clone() - .index(warnings.len()) - .span(arg_ident.span()) - .build_or_panic(); + let warning = partial_warning + .clone() + .index(warnings.len()) + .span(arg_ident.span()) + .build_or_panic(); - warnings.push(warning); - } + warnings.push(warning); + } } /// Warn if the weight is a constant and the pallet not in `dev_mode`. pub(crate) fn weight_constant_warning( - weight: &syn::Expr, - dev_mode: bool, - warnings: &mut Vec, + weight: &syn::Expr, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return - } - let syn::Expr::Lit(lit) = weight else { return }; + if dev_mode { + return; + } + let syn::Expr::Lit(lit) = weight else { return }; - let warning = Warning::new_deprecated("ConstantWeight") - .index(warnings.len()) - .old("use hard-coded constant as call weight") - .new("benchmark all calls or put the pallet into `dev` mode") - .help_link("https://github.com/paritytech/substrate/pull/13798") - .span(lit.span()) - .build_or_panic(); + let warning = Warning::new_deprecated("ConstantWeight") + .index(warnings.len()) + .old("use hard-coded constant as call weight") + .new("benchmark all calls or put the pallet into `dev` mode") + .help_link("https://github.com/paritytech/substrate/pull/13798") + .span(lit.span()) + .build_or_panic(); - warnings.push(warning); + warnings.push(warning); } /// Returns whether `expr` contains `ident`. fn contains_ident(mut expr: syn::Expr, ident: &syn::Ident) -> bool { - struct ContainsIdent { - ident: syn::Ident, - found: bool, - } + struct ContainsIdent { + ident: syn::Ident, + found: bool, + } - impl<'a> Visit<'a> for ContainsIdent { - fn visit_ident(&mut self, i: &syn::Ident) { - if *i == self.ident { - self.found = true; - } - } - } + impl<'a> Visit<'a> for ContainsIdent { + fn visit_ident(&mut self, i: &syn::Ident) { + if *i == self.ident { + self.found = true; + } + } + } - let mut visitor = ContainsIdent { ident: ident.clone(), found: false }; - visit::visit_expr(&mut visitor, &mut expr); - visitor.found + let mut visitor = ContainsIdent { + ident: ident.clone(), + found: false, + }; + visit::visit_expr(&mut visitor, &mut expr); + visitor.found } diff --git a/support/procedural-fork/src/pallet/mod.rs b/support/procedural-fork/src/pallet/mod.rs index 42d8272fb..d3796662f 100644 --- a/support/procedural-fork/src/pallet/mod.rs +++ b/support/procedural-fork/src/pallet/mod.rs @@ -32,30 +32,30 @@ pub use parse::{composite::keyword::CompositeKeyword, Def}; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(dev_mode); + syn::custom_keyword!(dev_mode); } pub fn pallet( - attr: proc_macro::TokenStream, - item: proc_macro::TokenStream, + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - let mut dev_mode = false; - if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { - dev_mode = true; - } else { - let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ + let mut dev_mode = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + dev_mode = true; + } else { + let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::pallet]` or `#[pallet]`, or must specify the \ `dev_mode` attribute, such as `#[frame_support::pallet(dev_mode)]` or \ #[pallet(dev_mode)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into() - } - } - - let item = syn::parse_macro_input!(item as syn::ItemMod); - match parse::Def::try_from(item, dev_mode) { - Ok(def) => expand::expand(def).into(), - Err(e) => e.to_compile_error().into(), - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into(); + } + } + + let item = syn::parse_macro_input!(item as syn::ItemMod); + match parse::Def::try_from(item, dev_mode) { + Ok(def) => expand::expand(def).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/pallet/parse/call.rs b/support/procedural-fork/src/pallet/parse/call.rs index 4e09b86fd..865c63473 100644 --- a/support/procedural-fork/src/pallet/parse/call.rs +++ b/support/procedural-fork/src/pallet/parse/call.rs @@ -24,124 +24,124 @@ use syn::{spanned::Spanned, ExprClosure}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(RuntimeOrigin); - syn::custom_keyword!(weight); - syn::custom_keyword!(call_index); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(feeless_if); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(RuntimeOrigin); + syn::custom_keyword!(weight); + syn::custom_keyword!(call_index); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(feeless_if); } /// Definition of dispatchables typically `impl Pallet { ... }` pub struct CallDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// Information on methods (used for expansion). - pub methods: Vec, - /// The span of the pallet::call attribute. - pub attr_span: proc_macro2::Span, - /// Docs, specified on the impl Block. - pub docs: Vec, - /// The optional `weight` attribute on the `pallet::call`. - pub inherited_call_weight: Option, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// Information on methods (used for expansion). + pub methods: Vec, + /// The span of the pallet::call attribute. + pub attr_span: proc_macro2::Span, + /// Docs, specified on the impl Block. + pub docs: Vec, + /// The optional `weight` attribute on the `pallet::call`. + pub inherited_call_weight: Option, } /// The weight of a call. #[derive(Clone)] pub enum CallWeightDef { - /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. - Immediate(syn::Expr), + /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. + Immediate(syn::Expr), - /// The default value that should be set for dev-mode pallets. Usually zero. - DevModeDefault, + /// The default value that should be set for dev-mode pallets. Usually zero. + DevModeDefault, - /// Inherits whatever value is configured on the pallet level. - /// - /// The concrete value is not known at this point. - Inherited, + /// Inherits whatever value is configured on the pallet level. + /// + /// The concrete value is not known at this point. + Inherited, } /// Definition of dispatchable typically: `#[weight...] fn foo(origin .., param1: ...) -> ..` #[derive(Clone)] pub struct CallVariantDef { - /// Function name. - pub name: syn::Ident, - /// Information on args: `(is_compact, name, type)` - pub args: Vec<(bool, syn::Ident, Box)>, - /// Weight for the call. - pub weight: CallWeightDef, - /// Call index of the dispatchable. - pub call_index: u8, - /// Whether an explicit call index was specified. - pub explicit_call_index: bool, - /// Docs, used for metadata. - pub docs: Vec, - /// Attributes annotated at the top of the dispatchable function. - pub attrs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// The optional `feeless_if` attribute on the `pallet::call`. - pub feeless_check: Option, + /// Function name. + pub name: syn::Ident, + /// Information on args: `(is_compact, name, type)` + pub args: Vec<(bool, syn::Ident, Box)>, + /// Weight for the call. + pub weight: CallWeightDef, + /// Call index of the dispatchable. + pub call_index: u8, + /// Whether an explicit call index was specified. + pub explicit_call_index: bool, + /// Docs, used for metadata. + pub docs: Vec, + /// Attributes annotated at the top of the dispatchable function. + pub attrs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// The optional `feeless_if` attribute on the `pallet::call`. + pub feeless_check: Option, } /// Attributes for functions in call impl block. pub enum FunctionAttr { - /// Parse for `#[pallet::call_index(expr)]` - CallIndex(u8), - /// Parse for `#[pallet::weight(expr)]` - Weight(syn::Expr), - /// Parse for `#[pallet::feeless_if(expr)]` - FeelessIf(Span, syn::ExprClosure), + /// Parse for `#[pallet::call_index(expr)]` + CallIndex(u8), + /// Parse for `#[pallet::weight(expr)]` + Weight(syn::Expr), + /// Parse for `#[pallet::feeless_if(expr)]` + FeelessIf(Span, syn::ExprClosure), } impl syn::parse::Parse for FunctionAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::weight) { - content.parse::()?; - let weight_content; - syn::parenthesized!(weight_content in content); - Ok(FunctionAttr::Weight(weight_content.parse::()?)) - } else if lookahead.peek(keyword::call_index) { - content.parse::()?; - let call_index_content; - syn::parenthesized!(call_index_content in content); - let index = call_index_content.parse::()?; - if !index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(index.span(), msg)) - } - Ok(FunctionAttr::CallIndex(index.base10_parse()?)) - } else if lookahead.peek(keyword::feeless_if) { - content.parse::()?; - let closure_content; - syn::parenthesized!(closure_content in content); - Ok(FunctionAttr::FeelessIf( - closure_content.span(), - closure_content.parse::().map_err(|e| { - let msg = "Invalid feeless_if attribute: expected a closure"; - let mut err = syn::Error::new(closure_content.span(), msg); - err.combine(e); - err - })?, - )) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::weight) { + content.parse::()?; + let weight_content; + syn::parenthesized!(weight_content in content); + Ok(FunctionAttr::Weight(weight_content.parse::()?)) + } else if lookahead.peek(keyword::call_index) { + content.parse::()?; + let call_index_content; + syn::parenthesized!(call_index_content in content); + let index = call_index_content.parse::()?; + if !index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(index.span(), msg)); + } + Ok(FunctionAttr::CallIndex(index.base10_parse()?)) + } else if lookahead.peek(keyword::feeless_if) { + content.parse::()?; + let closure_content; + syn::parenthesized!(closure_content in content); + Ok(FunctionAttr::FeelessIf( + closure_content.span(), + closure_content.parse::().map_err(|e| { + let msg = "Invalid feeless_if attribute: expected a closure"; + let mut err = syn::Error::new(closure_content.span(), msg); + err.combine(e); + err + })?, + )) + } else { + Err(lookahead.error()) + } + } } /// Attribute for arguments in function in call impl block. @@ -149,319 +149,324 @@ impl syn::parse::Parse for FunctionAttr { pub struct ArgAttrIsCompact; impl syn::parse::Parse for ArgAttrIsCompact { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(ArgAttrIsCompact) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(ArgAttrIsCompact) + } } /// Check the syntax is `OriginFor`, `&OriginFor` or `T::RuntimeOrigin`. pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::Result<()> { - pub struct CheckOriginFor(bool); - impl syn::parse::Parse for CheckOriginFor { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let is_ref = input.parse::().is_ok(); - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(is_ref)) - } - } - - pub struct CheckRuntimeOrigin; - impl syn::parse::Parse for CheckRuntimeOrigin { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - let result_origin_for = syn::parse2::(ty.to_token_stream()); - let result_runtime_origin = syn::parse2::(ty.to_token_stream()); - return match (result_origin_for, result_runtime_origin) { - (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), - (_, Ok(_)) => Ok(()), - (_, _) => { - let msg = if is_ref { - "Invalid type: expected `&OriginFor`" - } else { - "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" - }; - return Err(syn::Error::new(ty.span(), msg)) - }, - } + pub struct CheckOriginFor(bool); + impl syn::parse::Parse for CheckOriginFor { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let is_ref = input.parse::().is_ok(); + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(is_ref)) + } + } + + pub struct CheckRuntimeOrigin; + impl syn::parse::Parse for CheckRuntimeOrigin { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + let result_origin_for = syn::parse2::(ty.to_token_stream()); + let result_runtime_origin = syn::parse2::(ty.to_token_stream()); + return match (result_origin_for, result_runtime_origin) { + (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), + (_, Ok(_)) => Ok(()), + (_, _) => { + let msg = if is_ref { + "Invalid type: expected `&OriginFor`" + } else { + "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" + }; + return Err(syn::Error::new(ty.span(), msg)); + } + }; } impl CallDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - inherited_call_weight: Option, - ) -> syn::Result { - let item_impl = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) - }; - - let instances = vec![ - helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, - helper::check_pallet_struct_usage(&item_impl.self_ty)?, - ]; - - if let Some((_, _, for_)) = item_impl.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + inherited_call_weight: Option, + ) -> syn::Result { + let item_impl = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::call, expected item impl", + )); + }; + + let instances = vec![ + helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, + helper::check_pallet_struct_usage(&item_impl.self_ty)?, + ]; + + if let Some((_, _, for_)) = item_impl.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)) - } - - let mut methods = vec![]; - let mut indices = HashMap::new(); - let mut last_index: Option = None; - for item in &mut item_impl.items { - if let syn::ImplItem::Fn(method) = item { - if !matches!(method.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::call, dispatchable function must be public: \ + return Err(syn::Error::new(for_.span(), msg)); + } + + let mut methods = vec![]; + let mut indices = HashMap::new(); + let mut last_index: Option = None; + for item in &mut item_impl.items { + if let syn::ImplItem::Fn(method) = item { + if !matches!(method.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::call, dispatchable function must be public: \ `pub fn`"; - let span = match method.vis { - syn::Visibility::Inherited => method.sig.span(), - _ => method.vis.span(), - }; - - return Err(syn::Error::new(span, msg)) - } - - match method.sig.inputs.first() { - None => { - let msg = "Invalid pallet::call, must have at least origin arg"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - Some(syn::FnArg::Receiver(_)) => { - let msg = "Invalid pallet::call, first argument must be a typed argument, \ + let span = match method.vis { + syn::Visibility::Inherited => method.sig.span(), + _ => method.vis.span(), + }; + + return Err(syn::Error::new(span, msg)); + } + + match method.sig.inputs.first() { + None => { + let msg = "Invalid pallet::call, must have at least origin arg"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + Some(syn::FnArg::Receiver(_)) => { + let msg = "Invalid pallet::call, first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - Some(syn::FnArg::Typed(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, false)?; - }, - } - - if let syn::ReturnType::Type(_, type_) = &method.sig.output { - helper::check_pallet_call_return_type(type_)?; - } else { - let msg = "Invalid pallet::call, require return type \ + return Err(syn::Error::new(method.sig.span(), msg)); + } + Some(syn::FnArg::Typed(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, false)?; + } + } + + if let syn::ReturnType::Type(_, type_) = &method.sig.output { + helper::check_pallet_call_return_type(type_)?; + } else { + let msg = "Invalid pallet::call, require return type \ DispatchResultWithPostInfo"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); - let mut call_idx_attrs = vec![]; - let mut weight_attrs = vec![]; - let mut feeless_attrs = vec![]; - for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { - match attr { - FunctionAttr::CallIndex(_) => { - call_idx_attrs.push(attr); - }, - FunctionAttr::Weight(_) => { - weight_attrs.push(attr); - }, - FunctionAttr::FeelessIf(span, _) => { - feeless_attrs.push((span, attr)); - }, - } - } - - if weight_attrs.is_empty() && dev_mode { - // inject a default O(1) weight when dev mode is enabled and no weight has - // been specified on the call - let empty_weight: syn::Expr = syn::parse_quote!(0); - weight_attrs.push(FunctionAttr::Weight(empty_weight)); - } - - let weight = match weight_attrs.len() { - 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, - 0 if dev_mode => CallWeightDef::DevModeDefault, - 0 => return Err(syn::Error::new( - method.sig.span(), - "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an + return Err(syn::Error::new(method.sig.span(), msg)); + } + + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); + let mut call_idx_attrs = vec![]; + let mut weight_attrs = vec![]; + let mut feeless_attrs = vec![]; + for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { + match attr { + FunctionAttr::CallIndex(_) => { + call_idx_attrs.push(attr); + } + FunctionAttr::Weight(_) => { + weight_attrs.push(attr); + } + FunctionAttr::FeelessIf(span, _) => { + feeless_attrs.push((span, attr)); + } + } + } + + if weight_attrs.is_empty() && dev_mode { + // inject a default O(1) weight when dev mode is enabled and no weight has + // been specified on the call + let empty_weight: syn::Expr = syn::parse_quote!(0); + weight_attrs.push(FunctionAttr::Weight(empty_weight)); + } + + let weight = match weight_attrs.len() { + 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, + 0 if dev_mode => CallWeightDef::DevModeDefault, + 0 => return Err(syn::Error::new( + method.sig.span(), + "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an inherited weight from the `#[pallet:call(weight($type))]` attribute, but none were given.", - )), - 1 => match weight_attrs.pop().unwrap() { - FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), - _ => unreachable!("checked during creation of the let binding"), - }, - _ => { - let msg = "Invalid pallet::call, too many weight attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - }; - - if call_idx_attrs.len() > 1 { - let msg = "Invalid pallet::call, too many call_index attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - let call_index = call_idx_attrs.pop().map(|attr| match attr { - FunctionAttr::CallIndex(idx) => idx, - _ => unreachable!("checked during creation of the let binding"), - }); - let explicit_call_index = call_index.is_some(); - - let final_index = match call_index { - Some(i) => i, - None => - last_index.map_or(Some(0), |idx| idx.checked_add(1)).ok_or_else(|| { - let msg = "Call index doesn't fit into u8, index is 256"; - syn::Error::new(method.sig.span(), msg) - })?, - }; - last_index = Some(final_index); - - if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { - let msg = format!( - "Call indices are conflicting: Both functions {} and {} are at index {}", - used_fn, method.sig.ident, final_index, - ); - let mut err = syn::Error::new(used_fn.span(), &msg); - err.combine(syn::Error::new(method.sig.ident.span(), msg)); - return Err(err) - } - - let mut args = vec![]; - for arg in method.sig.inputs.iter_mut().skip(1) { - let arg = if let syn::FnArg::Typed(arg) = arg { - arg - } else { - unreachable!("Only first argument can be receiver"); - }; - - let arg_attrs: Vec = - helper::take_item_pallet_attrs(&mut arg.attrs)?; - - if arg_attrs.len() > 1 { - let msg = "Invalid pallet::call, argument has too many attributes"; - return Err(syn::Error::new(arg.span(), msg)) - } - - let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { - pat.ident.clone() - } else { - let msg = "Invalid pallet::call, argument must be ident"; - return Err(syn::Error::new(arg.pat.span(), msg)) - }; - - args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); - } - - let docs = get_doc_literals(&method.attrs); - - if feeless_attrs.len() > 1 { - let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; - return Err(syn::Error::new(feeless_attrs[1].0, msg)) - } - let feeless_check: Option = - feeless_attrs.pop().map(|(_, attr)| match attr { - FunctionAttr::FeelessIf(_, closure) => closure, - _ => unreachable!("checked during creation of the let binding"), - }); - - if let Some(ref feeless_check) = feeless_check { - if feeless_check.inputs.len() != args.len() + 1 { - let msg = "Invalid pallet::call, feeless_if closure must have same \ + )), + 1 => match weight_attrs.pop().unwrap() { + FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), + _ => unreachable!("checked during creation of the let binding"), + }, + _ => { + let msg = "Invalid pallet::call, too many weight attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + }; + + if call_idx_attrs.len() > 1 { + let msg = "Invalid pallet::call, too many call_index attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + let call_index = call_idx_attrs.pop().map(|attr| match attr { + FunctionAttr::CallIndex(idx) => idx, + _ => unreachable!("checked during creation of the let binding"), + }); + let explicit_call_index = call_index.is_some(); + + let final_index = match call_index { + Some(i) => i, + None => last_index + .map_or(Some(0), |idx| idx.checked_add(1)) + .ok_or_else(|| { + let msg = "Call index doesn't fit into u8, index is 256"; + syn::Error::new(method.sig.span(), msg) + })?, + }; + last_index = Some(final_index); + + if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { + let msg = format!( + "Call indices are conflicting: Both functions {} and {} are at index {}", + used_fn, method.sig.ident, final_index, + ); + let mut err = syn::Error::new(used_fn.span(), &msg); + err.combine(syn::Error::new(method.sig.ident.span(), msg)); + return Err(err); + } + + let mut args = vec![]; + for arg in method.sig.inputs.iter_mut().skip(1) { + let arg = if let syn::FnArg::Typed(arg) = arg { + arg + } else { + unreachable!("Only first argument can be receiver"); + }; + + let arg_attrs: Vec = + helper::take_item_pallet_attrs(&mut arg.attrs)?; + + if arg_attrs.len() > 1 { + let msg = "Invalid pallet::call, argument has too many attributes"; + return Err(syn::Error::new(arg.span(), msg)); + } + + let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { + pat.ident.clone() + } else { + let msg = "Invalid pallet::call, argument must be ident"; + return Err(syn::Error::new(arg.pat.span(), msg)); + }; + + args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); + } + + let docs = get_doc_literals(&method.attrs); + + if feeless_attrs.len() > 1 { + let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; + return Err(syn::Error::new(feeless_attrs[1].0, msg)); + } + let feeless_check: Option = + feeless_attrs.pop().map(|(_, attr)| match attr { + FunctionAttr::FeelessIf(_, closure) => closure, + _ => unreachable!("checked during creation of the let binding"), + }); + + if let Some(ref feeless_check) = feeless_check { + if feeless_check.inputs.len() != args.len() + 1 { + let msg = "Invalid pallet::call, feeless_if closure must have same \ number of arguments as the dispatchable function"; - return Err(syn::Error::new(feeless_check.span(), msg)) - } - - match feeless_check.inputs.first() { - None => { - let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; - return Err(syn::Error::new(feeless_check.span(), msg)) - }, - Some(syn::Pat::Type(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, true)?; - }, - _ => { - let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ + return Err(syn::Error::new(feeless_check.span(), msg)); + } + + match feeless_check.inputs.first() { + None => { + let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; + return Err(syn::Error::new(feeless_check.span(), msg)); + } + Some(syn::Pat::Type(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, true)?; + } + _ => { + let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(feeless_check.span(), msg)) - }, - } - - for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { - let feeless_arg_type = - if let syn::Pat::Type(syn::PatType { ty, .. }) = feeless_arg.clone() { - if let syn::Type::Reference(pat) = *ty { - pat.elem.clone() - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; - return Err(syn::Error::new(ty.span(), msg)) - } - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; - return Err(syn::Error::new(feeless_arg.span(), msg)) - }; - - if feeless_arg_type != arg.2 { - let msg = - "Invalid pallet::call, feeless_if closure argument must have \ + return Err(syn::Error::new(feeless_check.span(), msg)); + } + } + + for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { + let feeless_arg_type = if let syn::Pat::Type(syn::PatType { ty, .. }) = + feeless_arg.clone() + { + if let syn::Type::Reference(pat) = *ty { + pat.elem.clone() + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; + return Err(syn::Error::new(ty.span(), msg)); + } + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; + return Err(syn::Error::new(feeless_arg.span(), msg)); + }; + + if feeless_arg_type != arg.2 { + let msg = + "Invalid pallet::call, feeless_if closure argument must have \ a reference to the same type as the dispatchable function argument"; - return Err(syn::Error::new(feeless_arg.span(), msg)) - } - } - - let valid_return = match &feeless_check.output { - syn::ReturnType::Type(_, type_) => match *(type_.clone()) { - syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), - _ => false, - }, - _ => false, - }; - if !valid_return { - let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; - return Err(syn::Error::new(feeless_check.output.span(), msg)) - } - } - - methods.push(CallVariantDef { - name: method.sig.ident.clone(), - weight, - call_index: final_index, - explicit_call_index, - args, - docs, - attrs: method.attrs.clone(), - cfg_attrs, - feeless_check, - }); - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(item.span(), msg)) - } - } - - Ok(Self { - index, - attr_span, - instances, - methods, - where_clause: item_impl.generics.where_clause.clone(), - docs: get_doc_literals(&item_impl.attrs), - inherited_call_weight, - }) - } + return Err(syn::Error::new(feeless_arg.span(), msg)); + } + } + + let valid_return = match &feeless_check.output { + syn::ReturnType::Type(_, type_) => match *(type_.clone()) { + syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), + _ => false, + }, + _ => false, + }; + if !valid_return { + let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; + return Err(syn::Error::new(feeless_check.output.span(), msg)); + } + } + + methods.push(CallVariantDef { + name: method.sig.ident.clone(), + weight, + call_index: final_index, + explicit_call_index, + args, + docs, + attrs: method.attrs.clone(), + cfg_attrs, + feeless_check, + }); + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(item.span(), msg)); + } + } + + Ok(Self { + index, + attr_span, + instances, + methods, + where_clause: item_impl.generics.where_clause.clone(), + docs: get_doc_literals(&item_impl.attrs), + inherited_call_weight, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/composite.rs b/support/procedural-fork/src/pallet/parse/composite.rs index c3ac74846..38da1f205 100644 --- a/support/procedural-fork/src/pallet/parse/composite.rs +++ b/support/procedural-fork/src/pallet/parse/composite.rs @@ -20,172 +20,178 @@ use quote::ToTokens; use syn::spanned::Spanned; pub mod keyword { - use super::*; - - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(Task); - - pub enum CompositeKeyword { - FreezeReason(FreezeReason), - HoldReason(HoldReason), - LockId(LockId), - SlashReason(SlashReason), - Task(Task), - } - - impl ToTokens for CompositeKeyword { - fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - use CompositeKeyword::*; - match self { - FreezeReason(inner) => inner.to_tokens(tokens), - HoldReason(inner) => inner.to_tokens(tokens), - LockId(inner) => inner.to_tokens(tokens), - SlashReason(inner) => inner.to_tokens(tokens), - Task(inner) => inner.to_tokens(tokens), - } - } - } - - impl syn::parse::Parse for CompositeKeyword { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else if lookahead.peek(Task) { - Ok(Self::Task(input.parse()?)) - } else { - Err(lookahead.error()) - } - } - } - - impl std::fmt::Display for CompositeKeyword { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use CompositeKeyword::*; - write!( - f, - "{}", - match self { - FreezeReason(_) => "FreezeReason", - HoldReason(_) => "HoldReason", - Task(_) => "Task", - LockId(_) => "LockId", - SlashReason(_) => "SlashReason", - } - ) - } - } + use super::*; + + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(Task); + + pub enum CompositeKeyword { + FreezeReason(FreezeReason), + HoldReason(HoldReason), + LockId(LockId), + SlashReason(SlashReason), + Task(Task), + } + + impl ToTokens for CompositeKeyword { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + use CompositeKeyword::*; + match self { + FreezeReason(inner) => inner.to_tokens(tokens), + HoldReason(inner) => inner.to_tokens(tokens), + LockId(inner) => inner.to_tokens(tokens), + SlashReason(inner) => inner.to_tokens(tokens), + Task(inner) => inner.to_tokens(tokens), + } + } + } + + impl syn::parse::Parse for CompositeKeyword { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else if lookahead.peek(Task) { + Ok(Self::Task(input.parse()?)) + } else { + Err(lookahead.error()) + } + } + } + + impl std::fmt::Display for CompositeKeyword { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CompositeKeyword::*; + write!( + f, + "{}", + match self { + FreezeReason(_) => "FreezeReason", + HoldReason(_) => "HoldReason", + Task(_) => "Task", + LockId(_) => "LockId", + SlashReason(_) => "SlashReason", + } + ) + } + } } pub struct CompositeDef { - /// The index of the CompositeDef item in the pallet module. - pub index: usize, - /// The composite keyword used (contains span). - pub composite_keyword: keyword::CompositeKeyword, - /// Name of the associated type. - pub ident: syn::Ident, - /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. - pub generics: syn::Generics, - /// The span of the pallet::composite_enum attribute. - pub attr_span: proc_macro2::Span, - /// Variant count of the pallet::composite_enum. - pub variant_count: u32, + /// The index of the CompositeDef item in the pallet module. + pub index: usize, + /// The composite keyword used (contains span). + pub composite_keyword: keyword::CompositeKeyword, + /// Name of the associated type. + pub ident: syn::Ident, + /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. + pub generics: syn::Generics, + /// The span of the pallet::composite_enum attribute. + pub attr_span: proc_macro2::Span, + /// Variant count of the pallet::composite_enum. + pub variant_count: u32, } impl CompositeDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - scrate: &syn::Path, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - // check variants: composite enums support only field-less enum variants. This is - // because fields can introduce too many possibilities, making it challenging to compute - // a fixed variant count. - for variant in &item.variants { - match variant.fields { - syn::Fields::Named(_) | syn::Fields::Unnamed(_) => - return Err(syn::Error::new( - variant.ident.span(), - "The composite enum does not support variants with fields!", - )), - syn::Fields::Unit => (), - } - } - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::composite_enum, expected enum item", - )) - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = format!("Invalid pallet::composite_enum, `{}` must be public", item.ident); - return Err(syn::Error::new(item.span(), msg)) - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_derive_attr = item.attrs.iter().any(|attr| { - if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { - path.get_ident().map(|ident| ident == "derive").unwrap_or(false) - } else { - false - } - }); - - if !has_derive_attr { - let derive_attr: syn::Attribute = syn::parse_quote! { - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - }; - item.attrs.push(derive_attr); - } - - if has_instance { - item.attrs.push(syn::parse_quote! { - #[scale_info(skip_type_params(I))] - }); - - item.variants.push(syn::parse_quote! { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData, - ) - }); - } - - let composite_keyword = - syn::parse2::(item.ident.to_token_stream())?; - - Ok(CompositeDef { - index, - composite_keyword, - attr_span, - generics: item.generics.clone(), - variant_count: item.variants.len() as u32, - ident: item.ident.clone(), - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + scrate: &syn::Path, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + // check variants: composite enums support only field-less enum variants. This is + // because fields can introduce too many possibilities, making it challenging to compute + // a fixed variant count. + for variant in &item.variants { + match variant.fields { + syn::Fields::Named(_) | syn::Fields::Unnamed(_) => { + return Err(syn::Error::new( + variant.ident.span(), + "The composite enum does not support variants with fields!", + )) + } + syn::Fields::Unit => (), + } + } + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::composite_enum, expected enum item", + )); + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = format!( + "Invalid pallet::composite_enum, `{}` must be public", + item.ident + ); + return Err(syn::Error::new(item.span(), msg)); + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_derive_attr = item.attrs.iter().any(|attr| { + if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { + path.get_ident() + .map(|ident| ident == "derive") + .unwrap_or(false) + } else { + false + } + }); + + if !has_derive_attr { + let derive_attr: syn::Attribute = syn::parse_quote! { + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + }; + item.attrs.push(derive_attr); + } + + if has_instance { + item.attrs.push(syn::parse_quote! { + #[scale_info(skip_type_params(I))] + }); + + item.variants.push(syn::parse_quote! { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData, + ) + }); + } + + let composite_keyword = + syn::parse2::(item.ident.to_token_stream())?; + + Ok(CompositeDef { + index, + composite_keyword, + attr_span, + generics: item.generics.clone(), + variant_count: item.variants.len() as u32, + ident: item.ident.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs index fbab92db1..cde565245 100644 --- a/support/procedural-fork/src/pallet/parse/config.rs +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -22,569 +22,592 @@ use syn::{spanned::Spanned, token, Token}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Config); - syn::custom_keyword!(From); - syn::custom_keyword!(T); - syn::custom_keyword!(I); - syn::custom_keyword!(config); - syn::custom_keyword!(pallet); - syn::custom_keyword!(IsType); - syn::custom_keyword!(RuntimeEvent); - syn::custom_keyword!(Event); - syn::custom_keyword!(frame_system); - syn::custom_keyword!(disable_frame_system_supertrait_check); - syn::custom_keyword!(no_default); - syn::custom_keyword!(no_default_bounds); - syn::custom_keyword!(constant); + syn::custom_keyword!(Config); + syn::custom_keyword!(From); + syn::custom_keyword!(T); + syn::custom_keyword!(I); + syn::custom_keyword!(config); + syn::custom_keyword!(pallet); + syn::custom_keyword!(IsType); + syn::custom_keyword!(RuntimeEvent); + syn::custom_keyword!(Event); + syn::custom_keyword!(frame_system); + syn::custom_keyword!(disable_frame_system_supertrait_check); + syn::custom_keyword!(no_default); + syn::custom_keyword!(no_default_bounds); + syn::custom_keyword!(constant); } #[derive(Default)] pub struct DefaultTrait { - /// A bool for each sub-trait item indicates whether the item has - /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds - /// in the generated default sub-trait. - pub items: Vec<(syn::TraitItem, bool)>, - pub has_system: bool, + /// A bool for each sub-trait item indicates whether the item has + /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds + /// in the generated default sub-trait. + pub items: Vec<(syn::TraitItem, bool)>, + pub has_system: bool, } /// Input definition for the pallet config. pub struct ConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// Whether the trait has instance (i.e. define with `Config`) - pub has_instance: bool, - /// Const associated type. - pub consts_metadata: Vec, - /// Whether the trait has the associated type `Event`, note that those bounds are - /// checked: - /// * `IsType::RuntimeEvent` - /// * `From` or `From>` or `From>` - pub has_event_type: bool, - /// The where clause on trait definition but modified so `Self` is `T`. - pub where_clause: Option, - /// The span of the pallet::config attribute. - pub attr_span: proc_macro2::Span, - /// Whether a default sub-trait should be generated. - /// - /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). - /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are - /// no trait items. - pub default_sub_trait: Option, + /// The index of item in pallet module. + pub index: usize, + /// Whether the trait has instance (i.e. define with `Config`) + pub has_instance: bool, + /// Const associated type. + pub consts_metadata: Vec, + /// Whether the trait has the associated type `Event`, note that those bounds are + /// checked: + /// * `IsType::RuntimeEvent` + /// * `From` or `From>` or `From>` + pub has_event_type: bool, + /// The where clause on trait definition but modified so `Self` is `T`. + pub where_clause: Option, + /// The span of the pallet::config attribute. + pub attr_span: proc_macro2::Span, + /// Whether a default sub-trait should be generated. + /// + /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). + /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are + /// no trait items. + pub default_sub_trait: Option, } /// Input definition for a constant in pallet config. pub struct ConstMetadataDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, } impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { - type Error = syn::Error; - - fn try_from(trait_ty: &syn::TraitItemType) -> Result { - let err = |span, msg| { - syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) - }; - let doc = get_doc_literals(&trait_ty.attrs); - let ident = trait_ty.ident.clone(); - let bound = trait_ty - .bounds - .iter() - .find_map(|b| { - if let syn::TypeParamBound::Trait(tb) = b { - tb.path - .segments - .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) - } else { - None - } - }) - .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { - if ab.args.len() == 1 { - if let syn::GenericArgument::Type(ref ty) = ab.args[0] { - Ok(ty) - } else { - Err(err(ab.args[0].span(), "Expected a type argument")) - } - } else { - Err(err(bound.span(), "Expected a single type argument")) - } - } else { - Err(err(bound.span(), "Expected trait generic args")) - }?; - let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) - .expect("Internal error: replacing `Self` by `T` should result in valid type"); - - Ok(Self { ident, type_, doc }) - } + type Error = syn::Error; + + fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let err = |span, msg| { + syn::Error::new( + span, + format!("Invalid usage of `#[pallet::constant]`: {}", msg), + ) + }; + let doc = get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty + .bounds + .iter() + .find_map(|b| { + if let syn::TypeParamBound::Trait(tb) = b { + tb.path + .segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) + } else { + None + } + }) + .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(err(ab.args[0].span(), "Expected a type argument")) + } + } else { + Err(err(bound.span(), "Expected a single type argument")) + } + } else { + Err(err(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); + + Ok(Self { ident, type_, doc }) + } } /// Parse for `#[pallet::disable_frame_system_supertrait_check]` pub struct DisableFrameSystemSupertraitCheck; impl syn::parse::Parse for DisableFrameSystemSupertraitCheck { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(Self) + } } /// Parsing for the `typ` portion of `PalletAttr` #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::no_default, name = "no_default")] - NoDefault(keyword::no_default), - #[peek(keyword::no_default_bounds, name = "no_default_bounds")] - NoBounds(keyword::no_default_bounds), - #[peek(keyword::constant, name = "constant")] - Constant(keyword::constant), + #[peek(keyword::no_default, name = "no_default")] + NoDefault(keyword::no_default), + #[peek(keyword::no_default_bounds, name = "no_default_bounds")] + NoBounds(keyword::no_default_bounds), + #[peek(keyword::constant, name = "constant")] + Constant(keyword::constant), } /// Parsing for `#[pallet::X]` #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - _pallet: keyword::pallet, - #[prefix(Token![::] in _bracket)] - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + _pallet: keyword::pallet, + #[prefix(Token![::] in _bracket)] + #[inside(_bracket)] + typ: PalletAttrType, } /// Parse for `IsType<::RuntimeEvent>` and retrieve `$path` pub struct IsTypeBoundEventParse(syn::Path); impl syn::parse::Parse for IsTypeBoundEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - let config_path = input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(config_path)) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + let config_path = input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(config_path)) + } } /// Parse for `From` or `From>` or `From>` pub struct FromEventParse { - is_generic: bool, - has_instance: bool, + is_generic: bool, + has_instance: bool, } impl syn::parse::Parse for FromEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut is_generic = false; - let mut has_instance = false; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - is_generic = true; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - input.parse::()?; - input.parse::()?; - has_instance = true; - } - input.parse::]>()?; - } - input.parse::]>()?; - - Ok(Self { is_generic, has_instance }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut is_generic = false; + let mut has_instance = false; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + is_generic = true; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + input.parse::()?; + input.parse::()?; + has_instance = true; + } + input.parse::]>()?; + } + input.parse::]>()?; + + Ok(Self { + is_generic, + has_instance, + }) + } } /// Check if trait_item is `type RuntimeEvent`, if so checks its bounds are those expected. /// (Event type is reserved type) fn check_event_type( - frame_system: &syn::Path, - trait_item: &syn::TraitItem, - trait_has_instance: bool, + frame_system: &syn::Path, + trait_item: &syn::TraitItem, + trait_has_instance: bool, ) -> syn::Result { - if let syn::TraitItem::Type(type_) = trait_item { - if type_.ident == "RuntimeEvent" { - // Check event has no generics - if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ + if let syn::TraitItem::Type(type_) = trait_item { + if type_.ident == "RuntimeEvent" { + // Check event has no generics + if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ no generics nor where_clause"; - return Err(syn::Error::new(trait_item.span(), msg)) - } + return Err(syn::Error::new(trait_item.span(), msg)); + } - // Check bound contains IsType and From - let has_is_type_bound = type_.bounds.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b.0, frame_system)) - }); + // Check bound contains IsType and From + let has_is_type_bound = type_.bounds.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b.0, frame_system)) + }); - if !has_is_type_bound { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + if !has_is_type_bound { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `IsType<::RuntimeEvent>`".to_string(); - return Err(syn::Error::new(type_.span(), msg)) - } - - let from_event_bound = type_ - .bounds - .iter() - .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); - - let from_event_bound = if let Some(b) = from_event_bound { - b - } else { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + return Err(syn::Error::new(type_.span(), msg)); + } + + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); + + let from_event_bound = if let Some(b) = from_event_bound { + b + } else { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `From` or `From>` or `From>`"; - return Err(syn::Error::new(type_.span(), msg)) - }; + return Err(syn::Error::new(type_.span(), msg)); + }; - if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) - { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) + { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ `From`. Config and generic Event must be both with instance or \ without instance"; - return Err(syn::Error::new(type_.span(), msg)) - } - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) - } + return Err(syn::Error::new(type_.span(), msg)); + } + + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } } /// Check that the path to `frame_system::Config` is valid, this is that the path is just /// `frame_system::Config` or when using the `frame` crate it is `frame::xyz::frame_system::Config`. fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool { - // Check if `frame_system` is actually 'frame_system'. - if path.segments.iter().all(|s| s.ident != "frame_system") { - return false - } - - let mut expected_system_config = - match (is_using_frame_crate(&path), is_using_frame_crate(&frame_system)) { - (true, false) => - // We can't use the path to `frame_system` from `frame` if `frame_system` is not being - // in scope through `frame`. - return false, - (false, true) => - // We know that the only valid frame_system path is one that is `frame_system`, as - // `frame` re-exports it as such. - syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed"), - (_, _) => - // They are either both `frame_system` or both `frame::xyz::frame_system`. - frame_system.clone(), - }; - - expected_system_config - .segments - .push(syn::PathSegment::from(syn::Ident::new("Config", path.span()))); - - // the parse path might be something like `frame_system::Config<...>`, so we - // only compare the idents along the path. - expected_system_config - .segments - .into_iter() - .map(|ps| ps.ident) - .collect::>() == - path.segments.into_iter().map(|ps| ps.ident).collect::>() + // Check if `frame_system` is actually 'frame_system'. + if path.segments.iter().all(|s| s.ident != "frame_system") { + return false; + } + + let mut expected_system_config = match ( + is_using_frame_crate(&path), + is_using_frame_crate(&frame_system), + ) { + (true, false) => + // We can't use the path to `frame_system` from `frame` if `frame_system` is not being + // in scope through `frame`. + { + return false + } + (false, true) => + // We know that the only valid frame_system path is one that is `frame_system`, as + // `frame` re-exports it as such. + { + syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed") + } + (_, _) => + // They are either both `frame_system` or both `frame::xyz::frame_system`. + { + frame_system.clone() + } + }; + + expected_system_config + .segments + .push(syn::PathSegment::from(syn::Ident::new( + "Config", + path.span(), + ))); + + // the parse path might be something like `frame_system::Config<...>`, so we + // only compare the idents along the path. + expected_system_config + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() + == path + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() } /// Replace ident `Self` by `T` pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream { - input - .into_iter() - .map(|token_tree| match token_tree { - proc_macro2::TokenTree::Group(group) => - proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into(), - proc_macro2::TokenTree::Ident(ident) if ident == "Self" => - proc_macro2::Ident::new("T", ident.span()).into(), - other => other, - }) - .collect() + input + .into_iter() + .map(|token_tree| match token_tree { + proc_macro2::TokenTree::Group(group) => { + proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into() + } + proc_macro2::TokenTree::Ident(ident) if ident == "Self" => { + proc_macro2::Ident::new("T", ident.span()).into() + } + other => other, + }) + .collect() } impl ConfigDef { - pub fn try_from( - frame_system: &syn::Path, - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - enable_default: bool, - ) -> syn::Result { - let item = if let syn::Item::Trait(item) = item { - item - } else { - let msg = "Invalid pallet::config, expected trait definition"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::config, trait must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - syn::parse2::(item.ident.to_token_stream())?; - - let where_clause = { - let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); - syn::parse2::>(stream).expect( - "Internal error: replacing `Self` by `T` should result in valid where + pub fn try_from( + frame_system: &syn::Path, + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + enable_default: bool, + ) -> syn::Result { + let item = if let syn::Item::Trait(item) = item { + item + } else { + let msg = "Invalid pallet::config, expected trait definition"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::config, trait must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + syn::parse2::(item.ident.to_token_stream())?; + + let where_clause = { + let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); + syn::parse2::>(stream).expect( + "Internal error: replacing `Self` by `T` should result in valid where clause", - ) - }; - - if item.generics.params.len() > 1 { - let msg = "Invalid pallet::config, expected no more than one generic"; - return Err(syn::Error::new(item.generics.params[2].span(), msg)) - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_frame_system_supertrait = item.supertraits.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b, frame_system)) - }); - - let mut has_event_type = false; - let mut consts_metadata = vec![]; - let mut default_sub_trait = if enable_default { - Some(DefaultTrait { - items: Default::default(), - has_system: has_frame_system_supertrait, - }) - } else { - None - }; - for trait_item in &mut item.items { - let is_event = check_event_type(frame_system, trait_item, has_instance)?; - has_event_type = has_event_type || is_event; - - let mut already_no_default = false; - let mut already_constant = false; - let mut already_no_default_bounds = false; - - while let Ok(Some(pallet_attr)) = - helper::take_first_item_pallet_attr::(trait_item) - { - match (pallet_attr.typ, &trait_item) { - (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { - if already_constant { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::constant] attribute not allowed.", - )) - } - already_constant = true; - consts_metadata.push(ConstMetadataDef::try_from(typ)?); - }, - (PalletAttrType::Constant(_), _) => - return Err(syn::Error::new( - trait_item.span(), - "Invalid #[pallet::constant] in #[pallet::config], expected type item", - )), - (PalletAttrType::NoDefault(_), _) => { - if !enable_default { - return Err(syn::Error::new( + ) + }; + + if item.generics.params.len() > 1 { + let msg = "Invalid pallet::config, expected no more than one generic"; + return Err(syn::Error::new(item.generics.params[2].span(), msg)); + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_frame_system_supertrait = item.supertraits.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b, frame_system)) + }); + + let mut has_event_type = false; + let mut consts_metadata = vec![]; + let mut default_sub_trait = if enable_default { + Some(DefaultTrait { + items: Default::default(), + has_system: has_frame_system_supertrait, + }) + } else { + None + }; + for trait_item in &mut item.items { + let is_event = check_event_type(frame_system, trait_item, has_instance)?; + has_event_type = has_event_type || is_event; + + let mut already_no_default = false; + let mut already_constant = false; + let mut already_no_default_bounds = false; + + while let Ok(Some(pallet_attr)) = + helper::take_first_item_pallet_attr::(trait_item) + { + match (pallet_attr.typ, &trait_item) { + (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { + if already_constant { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::constant] attribute not allowed.", + )); + } + already_constant = true; + consts_metadata.push(ConstMetadataDef::try_from(typ)?); + } + (PalletAttrType::Constant(_), _) => { + return Err(syn::Error::new( + trait_item.span(), + "Invalid #[pallet::constant] in #[pallet::config], expected type item", + )) + } + (PalletAttrType::NoDefault(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )) - } - if already_no_default { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default] attribute not allowed.", - )) - } - - already_no_default = true; - }, - (PalletAttrType::NoBounds(_), _) => { - if !enable_default { - return Err(syn::Error::new( + )); + } + if already_no_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default] attribute not allowed.", + )); + } + + already_no_default = true; + } + (PalletAttrType::NoBounds(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default_bounds]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )) - } - if already_no_default_bounds { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default_bounds] attribute not allowed.", - )) - } - already_no_default_bounds = true; - }, - } - } - - if !already_no_default && enable_default { - default_sub_trait - .as_mut() - .expect("is 'Some(_)' if 'enable_default'; qed") - .items - .push((trait_item.clone(), already_no_default_bounds)); - } - } - - let attr: Option = - helper::take_first_item_pallet_attr(&mut item.attrs)?; - let disable_system_supertrait_check = attr.is_some(); - - if !has_frame_system_supertrait && !disable_system_supertrait_check { - let found = if item.supertraits.is_empty() { - "none".to_string() - } else { - let mut found = item - .supertraits - .iter() - .fold(String::new(), |acc, s| format!("{}`{}`, ", acc, quote::quote!(#s))); - found.pop(); - found.pop(); - found - }; - - let msg = format!( - "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ + )); + } + if already_no_default_bounds { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default_bounds] attribute not allowed.", + )); + } + already_no_default_bounds = true; + } + } + } + + if !already_no_default && enable_default { + default_sub_trait + .as_mut() + .expect("is 'Some(_)' if 'enable_default'; qed") + .items + .push((trait_item.clone(), already_no_default_bounds)); + } + } + + let attr: Option = + helper::take_first_item_pallet_attr(&mut item.attrs)?; + let disable_system_supertrait_check = attr.is_some(); + + if !has_frame_system_supertrait && !disable_system_supertrait_check { + let found = if item.supertraits.is_empty() { + "none".to_string() + } else { + let mut found = item.supertraits.iter().fold(String::new(), |acc, s| { + format!("{}`{}`, ", acc, quote::quote!(#s)) + }); + found.pop(); + found.pop(); + found + }; + + let msg = format!( + "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ found {}. \ (try `pub trait Config: frame_system::Config {{ ...` or \ `pub trait Config: frame_system::Config {{ ...`). \ To disable this check, use `#[pallet::disable_frame_system_supertrait_check]`", - frame_system.to_token_stream(), - found, - ); - return Err(syn::Error::new(item.span(), msg)) - } - - Ok(Self { - index, - has_instance, - consts_metadata, - has_event_type, - where_clause, - attr_span, - default_sub_trait, - }) - } + frame_system.to_token_stream(), + found, + ); + return Err(syn::Error::new(item.span(), msg)); + } + + Ok(Self { + index, + has_instance, + consts_metadata, + has_event_type, + where_clause, + attr_span, + default_sub_trait, + }) + } } #[cfg(test)] mod tests { - use super::*; - #[test] - fn has_expected_system_config_works() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_assoc_type() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame_system::Config)) - .unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_other_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_frame_system() { - let frame_system = - syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_path() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_not_frame_system() { - let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); - let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } + use super::*; + #[test] + fn has_expected_system_config_works() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_assoc_type() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame_system::Config)) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_other_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_frame_system() { + let frame_system = + syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_path() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_not_frame_system() { + let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); + let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } } diff --git a/support/procedural-fork/src/pallet/parse/error.rs b/support/procedural-fork/src/pallet/parse/error.rs index 362df8d73..e93e2113f 100644 --- a/support/procedural-fork/src/pallet/parse/error.rs +++ b/support/procedural-fork/src/pallet/parse/error.rs @@ -22,94 +22,108 @@ use syn::{spanned::Spanned, Fields}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); + syn::custom_keyword!(Error); } /// Records information about the error enum variant field. pub struct VariantField { - /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. - pub is_named: bool, + /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. + pub is_named: bool, } /// Records information about the error enum variants. pub struct VariantDef { - /// The variant ident. - pub ident: syn::Ident, - /// The variant field, if any. - pub field: Option, - /// The variant doc literals. - pub docs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, + /// The variant ident. + pub ident: syn::Ident, + /// The variant field, if any. + pub field: Option, + /// The variant doc literals. + pub docs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, } /// This checks error declaration as a enum declaration with only variants without fields nor /// discriminant. pub struct ErrorDef { - /// The index of error item in pallet module. - pub index: usize, - /// Variant definitions. - pub variants: Vec, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The keyword error used (contains span). - pub error: keyword::Error, - /// The span of the pallet::error attribute. - pub attr_span: proc_macro2::Span, + /// The index of error item in pallet module. + pub index: usize, + /// Variant definitions. + pub variants: Vec, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The keyword error used (contains span). + pub error: keyword::Error, + /// The span of the pallet::error attribute. + pub attr_span: proc_macro2::Span, } impl ErrorDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")) - }; - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::error, `Error` must be public"; - return Err(syn::Error::new(item.span(), msg)) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::error, expected item enum", + )); + }; + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::error, `Error` must be public"; + return Err(syn::Error::new(item.span(), msg)); + } - let instances = - vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; + let instances = vec![helper::check_type_def_gen_no_bounds( + &item.generics, + item.ident.span(), + )?]; - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; - return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)) - } + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; + return Err(syn::Error::new( + item.generics.where_clause.as_ref().unwrap().span(), + msg, + )); + } - let error = syn::parse2::(item.ident.to_token_stream())?; + let error = syn::parse2::(item.ident.to_token_stream())?; - let variants = item - .variants - .iter() - .map(|variant| { - let field_ty = match &variant.fields { - Fields::Unit => None, - Fields::Named(_) => Some(VariantField { is_named: true }), - Fields::Unnamed(_) => Some(VariantField { is_named: false }), - }; - if variant.discriminant.is_some() { - let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ + let variants = item + .variants + .iter() + .map(|variant| { + let field_ty = match &variant.fields { + Fields::Unit => None, + Fields::Named(_) => Some(VariantField { is_named: true }), + Fields::Unnamed(_) => Some(VariantField { is_named: false }), + }; + if variant.discriminant.is_some() { + let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ are not supported"; - let span = variant.discriminant.as_ref().unwrap().0.span(); - return Err(syn::Error::new(span, msg)) - } - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); + let span = variant.discriminant.as_ref().unwrap().0.span(); + return Err(syn::Error::new(span, msg)); + } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); - Ok(VariantDef { - ident: variant.ident.clone(), - field: field_ty, - docs: get_doc_literals(&variant.attrs), - cfg_attrs, - }) - }) - .collect::>()?; + Ok(VariantDef { + ident: variant.ident.clone(), + field: field_ty, + docs: get_doc_literals(&variant.attrs), + cfg_attrs, + }) + }) + .collect::>()?; - Ok(ErrorDef { attr_span, index, variants, instances, error }) - } + Ok(ErrorDef { + attr_span, + index, + variants, + instances, + error, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/event.rs b/support/procedural-fork/src/pallet/parse/event.rs index 0fb8ee4f5..6102dd31f 100644 --- a/support/procedural-fork/src/pallet/parse/event.rs +++ b/support/procedural-fork/src/pallet/parse/event.rs @@ -21,28 +21,28 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Event); - syn::custom_keyword!(pallet); - syn::custom_keyword!(generate_deposit); - syn::custom_keyword!(deposit_event); + syn::custom_keyword!(Event); + syn::custom_keyword!(pallet); + syn::custom_keyword!(generate_deposit); + syn::custom_keyword!(deposit_event); } /// Definition for pallet event enum. pub struct EventDef { - /// The index of event item in pallet module. - pub index: usize, - /// The keyword Event used (contains span). - pub event: keyword::Event, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The kind of generic the type `Event` has. - pub gen_kind: super::GenericKind, - /// Whether the function `deposit_event` must be generated. - pub deposit_event: Option, - /// Where clause used in event definition. - pub where_clause: Option, - /// The span of the pallet::event attribute. - pub attr_span: proc_macro2::Span, + /// The index of event item in pallet module. + pub index: usize, + /// The keyword Event used (contains span). + pub event: keyword::Event, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The kind of generic the type `Event` has. + pub gen_kind: super::GenericKind, + /// Whether the function `deposit_event` must be generated. + pub deposit_event: Option, + /// Where clause used in event definition. + pub where_clause: Option, + /// The span of the pallet::event attribute. + pub attr_span: proc_macro2::Span, } /// Attribute for a pallet's Event. @@ -50,92 +50,110 @@ pub struct EventDef { /// Syntax is: /// * `#[pallet::generate_deposit($vis fn deposit_event)]` pub struct PalletEventDepositAttr { - pub fn_vis: syn::Visibility, - // Span for the keyword deposit_event - pub fn_span: proc_macro2::Span, - // Span of the attribute - pub span: proc_macro2::Span, + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, } impl syn::parse::Parse for PalletEventDepositAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let span = content.parse::()?.span(); - let generate_content; - syn::parenthesized!(generate_content in content); - let fn_vis = generate_content.parse::()?; - generate_content.parse::()?; - let fn_span = generate_content.parse::()?.span(); - - Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); + + Ok(PalletEventDepositAttr { + fn_vis, + span, + fn_span, + }) + } } struct PalletEventAttrInfo { - deposit_event: Option, + deposit_event: Option, } impl PalletEventAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut deposit_event = None; - for attr in attrs { - if deposit_event.is_none() { - deposit_event = Some(attr) - } else { - return Err(syn::Error::new(attr.span, "Duplicate attribute")) - } - } - - Ok(PalletEventAttrInfo { deposit_event }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut deposit_event = None; + for attr in attrs { + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")); + } + } + + Ok(PalletEventAttrInfo { deposit_event }) + } } impl EventDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected enum item")) - }; - - let event_attrs: Vec = - helper::take_item_pallet_attrs(&mut item.attrs)?; - let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; - let deposit_event = attr_info.deposit_event; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::event, `Event` must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - let where_clause = item.generics.where_clause.clone(); - - let mut instances = vec![]; - // NOTE: Event is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { - instances.push(u); - } else { - // construct_runtime only allow non generic event for non instantiable pallet. - instances.push(helper::InstanceUsage { has_instance: false, span: item.ident.span() }) - } - - let has_instance = item.generics.type_params().any(|t| t.ident == "I"); - let has_config = item.generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); - - let event = syn::parse2::(item.ident.to_token_stream())?; - - Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::event, expected enum item", + )); + }; + + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; + let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; + let deposit_event = attr_info.deposit_event; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::event, `Event` must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + let where_clause = item.generics.where_clause.clone(); + + let mut instances = vec![]; + // NOTE: Event is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { + instances.push(u); + } else { + // construct_runtime only allow non generic event for non instantiable pallet. + instances.push(helper::InstanceUsage { + has_instance: false, + span: item.ident.span(), + }) + } + + let has_instance = item.generics.type_params().any(|t| t.ident == "I"); + let has_config = item.generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); + + let event = syn::parse2::(item.ident.to_token_stream())?; + + Ok(EventDef { + attr_span, + index, + instances, + deposit_event, + event, + gen_kind, + where_clause, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/extra_constants.rs b/support/procedural-fork/src/pallet/parse/extra_constants.rs index 2ba6c44b7..38acea21a 100644 --- a/support/procedural-fork/src/pallet/parse/extra_constants.rs +++ b/support/procedural-fork/src/pallet/parse/extra_constants.rs @@ -21,140 +21,148 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(DispatchResultWithPostInfo); - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(weight); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(constant_name); + syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(weight); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(constant_name); } /// Definition of extra constants typically `impl Pallet { ... }` pub struct ExtraConstantsDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// The extra constant defined. - pub extra_constants: Vec, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// The extra constant defined. + pub extra_constants: Vec, } /// Input definition for an constant in pallet. pub struct ExtraConstantDef { - /// Name of the function - pub ident: syn::Ident, - /// The type returned by the function - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// Optional MetaData Name - pub metadata_name: Option, + /// Name of the function + pub ident: syn::Ident, + /// The type returned by the function + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// Optional MetaData Name + pub metadata_name: Option, } /// Attributes for functions in extra_constants impl block. /// Parse for `#[pallet::constant_name(ConstantName)]` pub struct ExtraConstAttr { - metadata_name: syn::Ident, + metadata_name: syn::Ident, } impl syn::parse::Parse for ExtraConstAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - content.parse::()?; - - let metadata_name; - syn::parenthesized!(metadata_name in content); - Ok(ExtraConstAttr { metadata_name: metadata_name.parse::()? }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + content.parse::()?; + + let metadata_name; + syn::parenthesized!(metadata_name in content); + Ok(ExtraConstAttr { + metadata_name: metadata_name.parse::()?, + }) + } } impl ExtraConstantsDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::extra_constants, expected item impl", - )) - }; - - let instances = vec![ - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - helper::check_pallet_struct_usage(&item.self_ty)?, - ]; - - if let Some((_, _, for_)) = item.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::extra_constants, expected item impl", + )); + }; + + let instances = vec![ + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + helper::check_pallet_struct_usage(&item.self_ty)?, + ]; + + if let Some((_, _, for_)) = item.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)) - } - - let mut extra_constants = vec![]; - for impl_item in &mut item.items { - let method = if let syn::ImplItem::Fn(method) = impl_item { - method - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(impl_item.span(), msg)) - }; - - if !method.sig.inputs.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 args"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - - if !method.sig.generics.params.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 generics"; - return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)) - } - - if method.sig.generics.where_clause.is_some() { - let msg = "Invalid pallet::extra_constants, method must have no where clause"; - return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)) - } - - let type_ = match &method.sig.output { - syn::ReturnType::Default => { - let msg = "Invalid pallet::extra_constants, method must have a return type"; - return Err(syn::Error::new(method.span(), msg)) - }, - syn::ReturnType::Type(_, type_) => *type_.clone(), - }; - - // parse metadata_name - let mut extra_constant_attrs: Vec = - helper::take_item_pallet_attrs(method)?; - - if extra_constant_attrs.len() > 1 { - let msg = - "Invalid attribute in pallet::constant_name, only one attribute is expected"; - return Err(syn::Error::new(extra_constant_attrs[1].metadata_name.span(), msg)) - } - - let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); - - extra_constants.push(ExtraConstantDef { - ident: method.sig.ident.clone(), - type_, - doc: get_doc_literals(&method.attrs), - metadata_name, - }); - } - - Ok(Self { - index, - instances, - where_clause: item.generics.where_clause.clone(), - extra_constants, - }) - } + return Err(syn::Error::new(for_.span(), msg)); + } + + let mut extra_constants = vec![]; + for impl_item in &mut item.items { + let method = if let syn::ImplItem::Fn(method) = impl_item { + method + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(impl_item.span(), msg)); + }; + + if !method.sig.inputs.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 args"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + + if !method.sig.generics.params.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 generics"; + return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)); + } + + if method.sig.generics.where_clause.is_some() { + let msg = "Invalid pallet::extra_constants, method must have no where clause"; + return Err(syn::Error::new( + method.sig.generics.where_clause.span(), + msg, + )); + } + + let type_ = match &method.sig.output { + syn::ReturnType::Default => { + let msg = "Invalid pallet::extra_constants, method must have a return type"; + return Err(syn::Error::new(method.span(), msg)); + } + syn::ReturnType::Type(_, type_) => *type_.clone(), + }; + + // parse metadata_name + let mut extra_constant_attrs: Vec = + helper::take_item_pallet_attrs(method)?; + + if extra_constant_attrs.len() > 1 { + let msg = + "Invalid attribute in pallet::constant_name, only one attribute is expected"; + return Err(syn::Error::new( + extra_constant_attrs[1].metadata_name.span(), + msg, + )); + } + + let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); + + extra_constants.push(ExtraConstantDef { + ident: method.sig.ident.clone(), + type_, + doc: get_doc_literals(&method.attrs), + metadata_name, + }); + } + + Ok(Self { + index, + instances, + where_clause: item.generics.where_clause.clone(), + extra_constants, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_build.rs b/support/procedural-fork/src/pallet/parse/genesis_build.rs index d0e1d9ec9..670d4d5ef 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_build.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_build.rs @@ -20,42 +20,47 @@ use syn::spanned::Spanned; /// Definition for pallet genesis build implementation. pub struct GenesisBuildDef { - /// The index of item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Option>, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::genesis_build attribute. - pub attr_span: proc_macro2::Span, + /// The index of item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Option>, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::genesis_build attribute. + pub attr_span: proc_macro2::Span, } impl GenesisBuildDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::genesis_build, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::genesis_build, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ for GenesisConfig<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - let instances = - helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); + let instances = + helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); - Ok(Self { attr_span, index, instances, where_clause: item.generics.where_clause.clone() }) - } + Ok(Self { + attr_span, + index, + instances, + where_clause: item.generics.where_clause.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_config.rs b/support/procedural-fork/src/pallet/parse/genesis_config.rs index 62da6ba13..1c52345eb 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_config.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_config.rs @@ -24,50 +24,55 @@ use syn::spanned::Spanned; /// * `struct GenesisConfig` /// * `enum GenesisConfig` pub struct GenesisConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// The kind of generic the type `GenesisConfig` has. - pub gen_kind: super::GenericKind, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The ident of genesis_config, can be used for span. - pub genesis_config: syn::Ident, + /// The index of item in pallet module. + pub index: usize, + /// The kind of generic the type `GenesisConfig` has. + pub gen_kind: super::GenericKind, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The ident of genesis_config, can be used for span. + pub genesis_config: syn::Ident, } impl GenesisConfigDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::genesis_config, expected enum or struct"; - return Err(syn::Error::new(item.span(), msg)) - }, - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::genesis_config, expected enum or struct"; + return Err(syn::Error::new(item.span(), msg)); + } + }; - let mut instances = vec![]; - // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { - instances.push(u); - } + let mut instances = vec![]; + // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { + instances.push(u); + } - let has_instance = generics.type_params().any(|t| t.ident == "I"); - let has_config = generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); + let has_instance = generics.type_params().any(|t| t.ident == "I"); + let has_config = generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; - return Err(syn::Error::new(item_span, msg)) - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; + return Err(syn::Error::new(item_span, msg)); + } - if ident != "GenesisConfig" { - let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; - return Err(syn::Error::new(ident.span(), msg)) - } + if ident != "GenesisConfig" { + let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; + return Err(syn::Error::new(ident.span(), msg)); + } - Ok(GenesisConfigDef { index, genesis_config: ident.clone(), instances, gen_kind }) - } + Ok(GenesisConfigDef { + index, + genesis_config: ident.clone(), + instances, + gen_kind, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs index 3187c9139..f58c8d81c 100644 --- a/support/procedural-fork/src/pallet/parse/helper.rs +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -21,164 +21,176 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(I); - syn::custom_keyword!(compact); - syn::custom_keyword!(GenesisBuild); - syn::custom_keyword!(BuildGenesisConfig); - syn::custom_keyword!(Config); - syn::custom_keyword!(T); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(origin); - syn::custom_keyword!(DispatchResult); - syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(I); + syn::custom_keyword!(compact); + syn::custom_keyword!(GenesisBuild); + syn::custom_keyword!(BuildGenesisConfig); + syn::custom_keyword!(Config); + syn::custom_keyword!(T); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(origin); + syn::custom_keyword!(DispatchResult); + syn::custom_keyword!(DispatchResultWithPostInfo); } /// A usage of instance, either the trait `Config` has been used with instance or without instance. /// Used to check for consistency. #[derive(Clone)] pub struct InstanceUsage { - pub has_instance: bool, - pub span: proc_macro2::Span, + pub has_instance: bool, + pub span: proc_macro2::Span, } /// Trait implemented for syn items to get mutable references on their attributes. /// /// NOTE: verbatim variants are not supported. pub trait MutItemAttrs { - fn mut_item_attrs(&mut self) -> Option<&mut Vec>; + fn mut_item_attrs(&mut self) -> Option<&mut Vec>; } /// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr` pub(crate) fn take_first_item_pallet_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; - - if let Some(index) = attrs.iter().position(|attr| { - attr.path().segments.first().map_or(false, |segment| segment.ident == "pallet") - }) { - let pallet_attr = attrs.remove(index); - Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) - } else { - Ok(None) - } + let attrs = if let Some(attrs) = item.mut_item_attrs() { + attrs + } else { + return Ok(None); + }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path() + .segments + .first() + .map_or(false, |segment| segment.ident == "pallet") + }) { + let pallet_attr = attrs.remove(index); + Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) + } else { + Ok(None) + } } /// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` pub(crate) fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let mut pallet_attrs = Vec::new(); + let mut pallet_attrs = Vec::new(); - while let Some(attr) = take_first_item_pallet_attr(item)? { - pallet_attrs.push(attr) - } + while let Some(attr) = take_first_item_pallet_attr(item)? { + pallet_attrs.push(attr) + } - Ok(pallet_attrs) + Ok(pallet_attrs) } /// Get all the cfg attributes (e.g. attribute like `#[cfg..]`) and decode them to `Attr` pub fn get_item_cfg_attrs(attrs: &[syn::Attribute]) -> Vec { - attrs - .iter() - .filter_map(|attr| { - if attr.path().segments.first().map_or(false, |segment| segment.ident == "cfg") { - Some(attr.clone()) - } else { - None - } - }) - .collect::>() + attrs + .iter() + .filter_map(|attr| { + if attr + .path() + .segments + .first() + .map_or(false, |segment| segment.ident == "cfg") + { + Some(attr.clone()) + } else { + None + } + }) + .collect::>() } impl MutItemAttrs for syn::Item { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Enum(item) => Some(item.attrs.as_mut()), - Self::ExternCrate(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::ForeignMod(item) => Some(item.attrs.as_mut()), - Self::Impl(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - Self::Mod(item) => Some(item.attrs.as_mut()), - Self::Static(item) => Some(item.attrs.as_mut()), - Self::Struct(item) => Some(item.attrs.as_mut()), - Self::Trait(item) => Some(item.attrs.as_mut()), - Self::TraitAlias(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Union(item) => Some(item.attrs.as_mut()), - Self::Use(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Enum(item) => Some(item.attrs.as_mut()), + Self::ExternCrate(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::ForeignMod(item) => Some(item.attrs.as_mut()), + Self::Impl(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + Self::Mod(item) => Some(item.attrs.as_mut()), + Self::Static(item) => Some(item.attrs.as_mut()), + Self::Struct(item) => Some(item.attrs.as_mut()), + Self::Trait(item) => Some(item.attrs.as_mut()), + Self::TraitAlias(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Union(item) => Some(item.attrs.as_mut()), + Self::Use(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for syn::TraitItem { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for Vec { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(self) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(self) + } } impl MutItemAttrs for syn::ItemMod { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ImplItemFn { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ItemType { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } /// Parse for `()` struct Unit; impl syn::parse::Parse for Unit { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - if !content.is_empty() { - let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; - return Err(syn::Error::new(content.span(), msg)) - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + if !content.is_empty() { + let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; + return Err(syn::Error::new(content.span(), msg)); + } + Ok(Self) + } } /// Parse for `'static` struct StaticLifetime; impl syn::parse::Parse for StaticLifetime { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lifetime = input.parse::()?; - if lifetime.ident != "static" { - let msg = "unexpected tokens, expected `static`"; - return Err(syn::Error::new(lifetime.ident.span(), msg)) - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lifetime = input.parse::()?; + if lifetime.ident != "static" { + let msg = "unexpected tokens, expected `static`"; + return Err(syn::Error::new(lifetime.ident.span(), msg)); + } + Ok(Self) + } } /// Check the syntax: `I: 'static = ()` @@ -187,28 +199,28 @@ impl syn::parse::Parse for StaticLifetime { /// /// return the instance if found. pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result<()> { - let expected = "expected `I: 'static = ()`"; - pub struct CheckTraitDefGenerics; - impl syn::parse::Parse for CheckTraitDefGenerics { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - syn::parse2::(gen.params.to_token_stream()).map_err(|e| { - let msg = format!("Invalid generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })?; - - Ok(()) + let expected = "expected `I: 'static = ()`"; + pub struct CheckTraitDefGenerics; + impl syn::parse::Parse for CheckTraitDefGenerics { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + syn::parse2::(gen.params.to_token_stream()).map_err(|e| { + let msg = format!("Invalid generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })?; + + Ok(()) } /// Check the syntax: @@ -219,38 +231,41 @@ pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn /// /// return the instance if found. pub fn check_type_def_gen_no_bounds( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T, I = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { has_instance: false, span: input.span() }; - - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `T` or `T, I = ()`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + has_instance: false, + span: input.span(), + }; + + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -264,76 +279,79 @@ pub fn check_type_def_gen_no_bounds( /// /// return some instance usage if there is some generic, or none otherwise. pub fn check_type_def_optional_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result> { - let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)) - } - - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else { - Err(lookahead.error()) - } - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)); + } + + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else { + Err(lookahead.error()) + } + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the syntax: @@ -342,36 +360,39 @@ pub fn check_type_def_optional_gen( /// /// return the instance if found. pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result { - let expected = "expected `Pallet` or `Pallet`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid pallet struct: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `Pallet` or `Pallet`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid pallet struct: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the generic is: @@ -382,39 +403,42 @@ pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result syn::Result { - let expected = "expected `impl` or `impl, I: 'static>`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `impl` or `impl, I: 'static>`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -427,70 +451,73 @@ pub fn check_impl_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Resu /// /// return the instance if found. pub fn check_type_def_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)) - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else { - Err(lookahead.error()) - } - } - } - - let mut i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - // Span can be call_site if generic is empty. Thus we replace it. - i.span = span; - - Ok(i) + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)); + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else { + Err(lookahead.error()) + } + } + } + + let mut i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + // Span can be call_site if generic is empty. Thus we replace it. + i.span = span; + + Ok(i) } /// Check the syntax: @@ -501,40 +528,43 @@ pub fn check_type_def_gen( /// return the instance if found for `GenesisBuild` /// return None for BuildGenesisConfig pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result> { - let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - if input.peek(keyword::GenesisBuild) { - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - return Ok(Self(Some(instance_usage))) - } else { - input.parse::()?; - return Ok(Self(None)) - } - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid genesis builder: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + if input.peek(keyword::GenesisBuild) { + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + return Ok(Self(Some(instance_usage))); + } else { + input.parse::()?; + return Ok(Self(None)); + } + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid genesis builder: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -546,87 +576,90 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result syn::Result> { - let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)) - } - - input.parse::()?; - input.parse::()?; - input.parse::()?; - - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)); + } + + input.parse::()?; + input.parse::()?; + input.parse::()?; + + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the keyword `DispatchResultWithPostInfo` or `DispatchResult`. pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { - pub struct Checker; - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keyword::DispatchResultWithPostInfo) { - input.parse::()?; - Ok(Self) - } else if lookahead.peek(keyword::DispatchResult) { - input.parse::()?; - Ok(Self) - } else { - Err(lookahead.error()) - } - } - } - - syn::parse2::(type_.to_token_stream()).map(|_| ()) + pub struct Checker; + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keyword::DispatchResultWithPostInfo) { + input.parse::()?; + Ok(Self) + } else if lookahead.peek(keyword::DispatchResult) { + input.parse::()?; + Ok(Self) + } else { + Err(lookahead.error()) + } + } + } + + syn::parse2::(type_.to_token_stream()).map(|_| ()) } pub(crate) fn two128_str(s: &str) -> TokenStream { - bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) + bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) } pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream { - let bytes = bytes.into_iter(); + let bytes = bytes.into_iter(); - quote!( - [ #( #bytes ),* ] - ) - .into() + quote!( + [ #( #bytes ),* ] + ) + .into() } diff --git a/support/procedural-fork/src/pallet/parse/hooks.rs b/support/procedural-fork/src/pallet/parse/hooks.rs index 37d7d22f4..1cf5c72cc 100644 --- a/support/procedural-fork/src/pallet/parse/hooks.rs +++ b/support/procedural-fork/src/pallet/parse/hooks.rs @@ -20,67 +20,67 @@ use syn::spanned::Spanned; /// Implementation of the pallet hooks. pub struct HooksDef { - /// The index of item in pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::hooks attribute. - pub attr_span: proc_macro2::Span, - /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. - pub has_runtime_upgrade: bool, + /// The index of item in pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::hooks attribute. + pub attr_span: proc_macro2::Span, + /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. + pub has_runtime_upgrade: bool, } impl HooksDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::hooks, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::hooks, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ for Pallet<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { - let msg = format!( - "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ + if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { + let msg = format!( + "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ , you can import from `frame_support::pallet_prelude`", - quote::quote!(#item_trait) - ); + quote::quote!(#item_trait) + ); - return Err(syn::Error::new(item_trait.span(), msg)) - } + return Err(syn::Error::new(item_trait.span(), msg)); + } - let has_runtime_upgrade = item.items.iter().any(|i| match i { - syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", - _ => false, - }); + let has_runtime_upgrade = item.items.iter().any(|i| match i { + syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", + _ => false, + }); - Ok(Self { - attr_span, - index, - instances, - has_runtime_upgrade, - where_clause: item.generics.where_clause.clone(), - }) - } + Ok(Self { + attr_span, + index, + instances, + has_runtime_upgrade, + where_clause: item.generics.where_clause.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/inherent.rs b/support/procedural-fork/src/pallet/parse/inherent.rs index d8641691a..4eb04e914 100644 --- a/support/procedural-fork/src/pallet/parse/inherent.rs +++ b/support/procedural-fork/src/pallet/parse/inherent.rs @@ -20,41 +20,41 @@ use syn::spanned::Spanned; /// The definition of the pallet inherent implementation. pub struct InherentDef { - /// The index of inherent item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of inherent item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl InherentDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::inherent, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ProvideInherent" { - let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; - return Err(syn::Error::new(last.span(), msg)) - } - } else { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; - - Ok(InherentDef { index, instances }) - } + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::inherent, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)); + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ProvideInherent" { + let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; + return Err(syn::Error::new(last.span(), msg)); + } + } else { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)); + } + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + Ok(InherentDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index 6e1277461..57c252473 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -47,68 +47,68 @@ use syn::spanned::Spanned; /// Parsed definition of a pallet. pub struct Def { - /// The module items. - /// (their order must not be modified because they are registered in individual definitions). - pub item: syn::ItemMod, - pub config: config::ConfigDef, - pub pallet_struct: pallet_struct::PalletStructDef, - pub hooks: Option, - pub call: Option, - pub tasks: Option, - pub task_enum: Option, - pub storages: Vec, - pub error: Option, - pub event: Option, - pub origin: Option, - pub inherent: Option, - pub genesis_config: Option, - pub genesis_build: Option, - pub validate_unsigned: Option, - pub extra_constants: Option, - pub composites: Vec, - pub type_values: Vec, - pub frame_system: syn::Path, - pub frame_support: syn::Path, - pub dev_mode: bool, + /// The module items. + /// (their order must not be modified because they are registered in individual definitions). + pub item: syn::ItemMod, + pub config: config::ConfigDef, + pub pallet_struct: pallet_struct::PalletStructDef, + pub hooks: Option, + pub call: Option, + pub tasks: Option, + pub task_enum: Option, + pub storages: Vec, + pub error: Option, + pub event: Option, + pub origin: Option, + pub inherent: Option, + pub genesis_config: Option, + pub genesis_build: Option, + pub validate_unsigned: Option, + pub extra_constants: Option, + pub composites: Vec, + pub type_values: Vec, + pub frame_system: syn::Path, + pub frame_support: syn::Path, + pub dev_mode: bool, } impl Def { - pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid pallet definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; - - let mut config = None; - let mut pallet_struct = None; - let mut hooks = None; - let mut call = None; - let mut tasks = None; - let mut task_enum = None; - let mut error = None; - let mut event = None; - let mut origin = None; - let mut inherent = None; - let mut genesis_config = None; - let mut genesis_build = None; - let mut validate_unsigned = None; - let mut extra_constants = None; - let mut storages = vec![]; - let mut type_values = vec![]; - let mut composites: Vec = vec![]; - - for (index, item) in items.iter_mut().enumerate() { - let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; - - match pallet_attr { + pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid pallet definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; + + let mut config = None; + let mut pallet_struct = None; + let mut hooks = None; + let mut call = None; + let mut tasks = None; + let mut task_enum = None; + let mut error = None; + let mut event = None; + let mut origin = None; + let mut inherent = None; + let mut genesis_config = None; + let mut genesis_build = None; + let mut validate_unsigned = None; + let mut extra_constants = None; + let mut storages = vec![]; + let mut type_values = vec![]; + let mut composites: Vec = vec![]; + + for (index, item) in items.iter_mut().enumerate() { + let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; + + match pallet_attr { Some(PalletAttr::Config(span, with_default)) if config.is_none() => config = Some(config::ConfigDef::try_from( &frame_system, @@ -212,538 +212,596 @@ impl Def { }, None => (), } - } + } - if genesis_config.is_some() != genesis_build.is_some() { - let msg = format!( - "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ + if genesis_config.is_some() != genesis_build.is_some() { + let msg = format!( + "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ either both used or both not used, instead genesis_config is {} and genesis_build \ is {}", - genesis_config.as_ref().map_or("unused", |_| "used"), - genesis_build.as_ref().map_or("unused", |_| "used"), - ); - return Err(syn::Error::new(item_span, msg)) - } - - Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; - - let def = Def { - item, - config: config - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, - pallet_struct: pallet_struct - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, - hooks, - call, - tasks, - task_enum, - extra_constants, - genesis_config, - genesis_build, - validate_unsigned, - error, - event, - origin, - inherent, - storages, - composites, - type_values, - frame_system, - frame_support, - dev_mode, - }; - - def.check_instance_usage()?; - def.check_event_usage()?; - - Ok(def) - } - - /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. - fn resolve_tasks( - item_span: &proc_macro2::Span, - tasks: &mut Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - // fallback for manual (without macros) definition of tasks impl - Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; - - // fallback for manual (without macros) definition of task enum - Self::resolve_manual_task_enum(tasks, task_enum, items)?; - - // ensure that if `task_enum` is specified, `tasks` is also specified - match (&task_enum, &tasks) { - (Some(_), None) => - return Err(syn::Error::new( - *item_span, - "Missing `#[pallet::tasks_experimental]` impl", - )), - (None, Some(tasks)) => - if tasks.tasks_attr.is_none() { - return Err(syn::Error::new( + genesis_config.as_ref().map_or("unused", |_| "used"), + genesis_build.as_ref().map_or("unused", |_| "used"), + ); + return Err(syn::Error::new(item_span, msg)); + } + + Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; + + let def = Def { + item, + config: config + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, + pallet_struct: pallet_struct + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, + hooks, + call, + tasks, + task_enum, + extra_constants, + genesis_config, + genesis_build, + validate_unsigned, + error, + event, + origin, + inherent, + storages, + composites, + type_values, + frame_system, + frame_support, + dev_mode, + }; + + def.check_instance_usage()?; + def.check_event_usage()?; + + Ok(def) + } + + /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. + fn resolve_tasks( + item_span: &proc_macro2::Span, + tasks: &mut Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + // fallback for manual (without macros) definition of tasks impl + Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; + + // fallback for manual (without macros) definition of task enum + Self::resolve_manual_task_enum(tasks, task_enum, items)?; + + // ensure that if `task_enum` is specified, `tasks` is also specified + match (&task_enum, &tasks) { + (Some(_), None) => { + return Err(syn::Error::new( + *item_span, + "Missing `#[pallet::tasks_experimental]` impl", + )) + } + (None, Some(tasks)) => { + if tasks.tasks_attr.is_none() { + return Err(syn::Error::new( tasks.item_impl.impl_token.span(), "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ task enum has been omitted", - )) - } else { - }, - _ => (), - } - - Ok(()) - } - - /// Tries to locate task enum based on the tasks impl target if attribute is not specified - /// but impl is present. If one is found, `task_enum` is set appropriately. - fn resolve_manual_task_enum( - tasks: &Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - let (None, Some(tasks)) = (&task_enum, &tasks) else { return Ok(()) }; - let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { return Ok(()) }; - let type_path = type_path.path.segments.iter().collect::>(); - let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { return Ok(()) }; - let mut result = None; - for item in items { - let syn::Item::Enum(item_enum) = item else { continue }; - if item_enum.ident == seg.ident { - result = Some(syn::parse2::(item_enum.to_token_stream())?); - // replace item with a no-op because it will be handled by the expansion of - // `task_enum`. We use a no-op instead of simply removing it from the vec - // so that any indices collected by `Def::try_from` remain accurate - *item = syn::Item::Verbatim(quote::quote!()); - break - } - } - *task_enum = result; - Ok(()) - } - - /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is - /// `Task`) in the event that one has not been found already via the attribute macro - pub fn resolve_manual_tasks_impl( - tasks: &mut Option, - task_enum: &Option, - items: &Vec, - ) -> syn::Result<()> { - let None = tasks else { return Ok(()) }; - let mut result = None; - for item in items { - let syn::Item::Impl(item_impl) = item else { continue }; - let Some((_, path, _)) = &item_impl.trait_ else { continue }; - let Some(trait_last_seg) = path.segments.last() else { continue }; - let syn::Type::Path(target_path) = &*item_impl.self_ty else { continue }; - let target_path = target_path.path.segments.iter().collect::>(); - let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { - continue - }; - let matches_task_enum = match task_enum { - Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, - None => true, - }; - if trait_last_seg.ident == "Task" && matches_task_enum { - result = Some(syn::parse2::(item_impl.to_token_stream())?); - break - } - } - *tasks = result; - Ok(()) - } - - /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared - /// and trait defines type RuntimeEvent, or not declared and no trait associated type. - fn check_event_usage(&self) -> syn::Result<()> { - match (self.config.has_event_type, self.event.is_some()) { - (true, false) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ + )); + } else { + } + } + _ => (), + } + + Ok(()) + } + + /// Tries to locate task enum based on the tasks impl target if attribute is not specified + /// but impl is present. If one is found, `task_enum` is set appropriately. + fn resolve_manual_task_enum( + tasks: &Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + let (None, Some(tasks)) = (&task_enum, &tasks) else { + return Ok(()); + }; + let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { + return Ok(()); + }; + let type_path = type_path.path.segments.iter().collect::>(); + let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { + return Ok(()); + }; + let mut result = None; + for item in items { + let syn::Item::Enum(item_enum) = item else { + continue; + }; + if item_enum.ident == seg.ident { + result = Some(syn::parse2::( + item_enum.to_token_stream(), + )?); + // replace item with a no-op because it will be handled by the expansion of + // `task_enum`. We use a no-op instead of simply removing it from the vec + // so that any indices collected by `Def::try_from` remain accurate + *item = syn::Item::Verbatim(quote::quote!()); + break; + } + } + *task_enum = result; + Ok(()) + } + + /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is + /// `Task`) in the event that one has not been found already via the attribute macro + pub fn resolve_manual_tasks_impl( + tasks: &mut Option, + task_enum: &Option, + items: &Vec, + ) -> syn::Result<()> { + let None = tasks else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Impl(item_impl) = item else { + continue; + }; + let Some((_, path, _)) = &item_impl.trait_ else { + continue; + }; + let Some(trait_last_seg) = path.segments.last() else { + continue; + }; + let syn::Type::Path(target_path) = &*item_impl.self_ty else { + continue; + }; + let target_path = target_path.path.segments.iter().collect::>(); + let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + continue; + }; + let matches_task_enum = match task_enum { + Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, + None => true, + }; + if trait_last_seg.ident == "Task" && matches_task_enum { + result = Some(syn::parse2::(item_impl.to_token_stream())?); + break; + } + } + *tasks = result; + Ok(()) + } + + /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared + /// and trait defines type RuntimeEvent, or not declared and no trait associated type. + fn check_event_usage(&self) -> syn::Result<()> { + match (self.config.has_event_type, self.event.is_some()) { + (true, false) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ but enum `Event` is not declared (i.e. no use of `#[pallet::event]`). \ Note that type `RuntimeEvent` in trait is reserved to work alongside pallet event."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - }, - (false, true) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + } + (false, true) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ `RuntimeEvent`, but enum `Event` is declared (in use of `#[pallet::event]`). \ An RuntimeEvent associated type must be declare on trait `Config`."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - }, - _ => Ok(()), - } - } - - /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with - /// instance iff it is defined with instance. - fn check_instance_usage(&self) -> syn::Result<()> { - let mut instances = vec![]; - instances.extend_from_slice(&self.pallet_struct.instances[..]); - instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); - if let Some(call) = &self.call { - instances.extend_from_slice(&call.instances[..]); - } - if let Some(hooks) = &self.hooks { - instances.extend_from_slice(&hooks.instances[..]); - } - if let Some(event) = &self.event { - instances.extend_from_slice(&event.instances[..]); - } - if let Some(error) = &self.error { - instances.extend_from_slice(&error.instances[..]); - } - if let Some(inherent) = &self.inherent { - instances.extend_from_slice(&inherent.instances[..]); - } - if let Some(origin) = &self.origin { - instances.extend_from_slice(&origin.instances[..]); - } - if let Some(genesis_config) = &self.genesis_config { - instances.extend_from_slice(&genesis_config.instances[..]); - } - if let Some(genesis_build) = &self.genesis_build { - genesis_build.instances.as_ref().map(|i| instances.extend_from_slice(&i)); - } - if let Some(extra_constants) = &self.extra_constants { - instances.extend_from_slice(&extra_constants.instances[..]); - } - - let mut errors = instances.into_iter().filter_map(|instances| { - if instances.has_instance == self.config.has_instance { - return None - } - let msg = if self.config.has_instance { - "Invalid generic declaration, trait is defined with instance but generic use none" - } else { - "Invalid generic declaration, trait is defined without instance but generic use \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + } + _ => Ok(()), + } + } + + /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with + /// instance iff it is defined with instance. + fn check_instance_usage(&self) -> syn::Result<()> { + let mut instances = vec![]; + instances.extend_from_slice(&self.pallet_struct.instances[..]); + instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); + if let Some(call) = &self.call { + instances.extend_from_slice(&call.instances[..]); + } + if let Some(hooks) = &self.hooks { + instances.extend_from_slice(&hooks.instances[..]); + } + if let Some(event) = &self.event { + instances.extend_from_slice(&event.instances[..]); + } + if let Some(error) = &self.error { + instances.extend_from_slice(&error.instances[..]); + } + if let Some(inherent) = &self.inherent { + instances.extend_from_slice(&inherent.instances[..]); + } + if let Some(origin) = &self.origin { + instances.extend_from_slice(&origin.instances[..]); + } + if let Some(genesis_config) = &self.genesis_config { + instances.extend_from_slice(&genesis_config.instances[..]); + } + if let Some(genesis_build) = &self.genesis_build { + genesis_build + .instances + .as_ref() + .map(|i| instances.extend_from_slice(&i)); + } + if let Some(extra_constants) = &self.extra_constants { + instances.extend_from_slice(&extra_constants.instances[..]); + } + + let mut errors = instances.into_iter().filter_map(|instances| { + if instances.has_instance == self.config.has_instance { + return None; + } + let msg = if self.config.has_instance { + "Invalid generic declaration, trait is defined with instance but generic use none" + } else { + "Invalid generic declaration, trait is defined without instance but generic use \ some" - }; - Some(syn::Error::new(instances.span, msg)) - }); - - if let Some(mut first_error) = errors.next() { - for error in errors { - first_error.combine(error) - } - Err(first_error) - } else { - Ok(()) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static` - pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static = ()` - pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static = ()) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I = ()` - pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I = ()) - } else { - quote::quote_spanned!(span => T) - } - } - - /// Depending on if pallet is instantiable: - /// * either `` - /// * or `` - /// to be used when using pallet trait `Config` - pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => ) - } else { - quote::quote_spanned!(span => ) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I` - pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I) - } else { - quote::quote_spanned!(span => T) - } - } + }; + Some(syn::Error::new(instances.span, msg)) + }); + + if let Some(mut first_error) = errors.next() { + for error in errors { + first_error.combine(error) + } + Err(first_error) + } else { + Ok(()) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static` + pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static = ()` + pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static = ()) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I = ()` + pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I = ()) + } else { + quote::quote_spanned!(span => T) + } + } + + /// Depending on if pallet is instantiable: + /// * either `` + /// * or `` + /// to be used when using pallet trait `Config` + pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => ) + } else { + quote::quote_spanned!(span => ) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I` + pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I) + } else { + quote::quote_spanned!(span => T) + } + } } /// Some generic kind for type which can be not generic, or generic over config, /// or generic over config and instance, but not generic only over instance. pub enum GenericKind { - None, - Config, - ConfigAndInstance, + None, + Config, + ConfigAndInstance, } impl GenericKind { - /// Return Err if it is only generics over instance but not over config. - pub fn from_gens(has_config: bool, has_instance: bool) -> Result { - match (has_config, has_instance) { - (false, false) => Ok(GenericKind::None), - (true, false) => Ok(GenericKind::Config), - (true, true) => Ok(GenericKind::ConfigAndInstance), - (false, true) => Err(()), - } - } - - /// Return the generic to be used when using the type. - /// - /// Depending on its definition it can be: ``, `T` or `T, I` - pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T), - GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), - } - } - - /// Return the generic to be used in `impl<..>` when implementing on the type. - pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T: Config), - GenericKind::ConfigAndInstance => { - quote::quote_spanned!(span => T: Config, I: 'static) - }, - } - } - - /// Return whereas the type has some generic. - pub fn is_generic(&self) -> bool { - match self { - GenericKind::None => false, - GenericKind::Config | GenericKind::ConfigAndInstance => true, - } - } + /// Return Err if it is only generics over instance but not over config. + pub fn from_gens(has_config: bool, has_instance: bool) -> Result { + match (has_config, has_instance) { + (false, false) => Ok(GenericKind::None), + (true, false) => Ok(GenericKind::Config), + (true, true) => Ok(GenericKind::ConfigAndInstance), + (false, true) => Err(()), + } + } + + /// Return the generic to be used when using the type. + /// + /// Depending on its definition it can be: ``, `T` or `T, I` + pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T), + GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), + } + } + + /// Return the generic to be used in `impl<..>` when implementing on the type. + pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T: Config), + GenericKind::ConfigAndInstance => { + quote::quote_spanned!(span => T: Config, I: 'static) + } + } + } + + /// Return whereas the type has some generic. + pub fn is_generic(&self) -> bool { + match self { + GenericKind::None => false, + GenericKind::Config | GenericKind::ConfigAndInstance => true, + } + } } /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(origin); - syn::custom_keyword!(call); - syn::custom_keyword!(tasks_experimental); - syn::custom_keyword!(task_enum); - syn::custom_keyword!(task_list); - syn::custom_keyword!(task_condition); - syn::custom_keyword!(task_index); - syn::custom_keyword!(weight); - syn::custom_keyword!(event); - syn::custom_keyword!(config); - syn::custom_keyword!(with_default); - syn::custom_keyword!(hooks); - syn::custom_keyword!(inherent); - syn::custom_keyword!(error); - syn::custom_keyword!(storage); - syn::custom_keyword!(genesis_build); - syn::custom_keyword!(genesis_config); - syn::custom_keyword!(validate_unsigned); - syn::custom_keyword!(type_value); - syn::custom_keyword!(pallet); - syn::custom_keyword!(extra_constants); - syn::custom_keyword!(composite_enum); + syn::custom_keyword!(origin); + syn::custom_keyword!(call); + syn::custom_keyword!(tasks_experimental); + syn::custom_keyword!(task_enum); + syn::custom_keyword!(task_list); + syn::custom_keyword!(task_condition); + syn::custom_keyword!(task_index); + syn::custom_keyword!(weight); + syn::custom_keyword!(event); + syn::custom_keyword!(config); + syn::custom_keyword!(with_default); + syn::custom_keyword!(hooks); + syn::custom_keyword!(inherent); + syn::custom_keyword!(error); + syn::custom_keyword!(storage); + syn::custom_keyword!(genesis_build); + syn::custom_keyword!(genesis_config); + syn::custom_keyword!(validate_unsigned); + syn::custom_keyword!(type_value); + syn::custom_keyword!(pallet); + syn::custom_keyword!(extra_constants); + syn::custom_keyword!(composite_enum); } /// Parse attributes for item in pallet module /// syntax must be `pallet::` (e.g. `#[pallet::config]`) enum PalletAttr { - Config(proc_macro2::Span, bool), - Pallet(proc_macro2::Span), - Hooks(proc_macro2::Span), - /// A `#[pallet::call]` with optional attributes to specialize the behaviour. - /// - /// # Attributes - /// - /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or - /// `#[pallet::call(attr(…))]`. The possible attributes are: - /// - /// ## `weight` - /// - /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one - /// argument that is expected to be an implementation of the `WeightInfo` or something that - /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. - /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the - /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: - /// - /// ```ignore - /// #[pallet::call] - /// impl Pallet { - /// #[pallet::weight(T::WeightInfo::create())] - /// pub fn create( - /// ``` - /// you can now omit it on the call itself, if the name of the weigh function matches the call: - /// - /// ```ignore - /// #[pallet::call(weight = ::WeightInfo)] - /// impl Pallet { - /// pub fn create( - /// ``` - /// - /// It is possible to use this syntax together with instantiated pallets by using `Config` - /// instead. - /// - /// ### Dev Mode - /// - /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation - /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used - /// instead of the zero weight. So to say: it works together with `dev_mode`. - RuntimeCall(Option, proc_macro2::Span), - Error(proc_macro2::Span), - Tasks(proc_macro2::Span), - TaskList(proc_macro2::Span), - TaskCondition(proc_macro2::Span), - TaskIndex(proc_macro2::Span), - RuntimeTask(proc_macro2::Span), - RuntimeEvent(proc_macro2::Span), - RuntimeOrigin(proc_macro2::Span), - Inherent(proc_macro2::Span), - Storage(proc_macro2::Span), - GenesisConfig(proc_macro2::Span), - GenesisBuild(proc_macro2::Span), - ValidateUnsigned(proc_macro2::Span), - TypeValue(proc_macro2::Span), - ExtraConstants(proc_macro2::Span), - Composite(proc_macro2::Span), + Config(proc_macro2::Span, bool), + Pallet(proc_macro2::Span), + Hooks(proc_macro2::Span), + /// A `#[pallet::call]` with optional attributes to specialize the behaviour. + /// + /// # Attributes + /// + /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or + /// `#[pallet::call(attr(…))]`. The possible attributes are: + /// + /// ## `weight` + /// + /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one + /// argument that is expected to be an implementation of the `WeightInfo` or something that + /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. + /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the + /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: + /// + /// ```ignore + /// #[pallet::call] + /// impl Pallet { + /// #[pallet::weight(T::WeightInfo::create())] + /// pub fn create( + /// ``` + /// you can now omit it on the call itself, if the name of the weigh function matches the call: + /// + /// ```ignore + /// #[pallet::call(weight = ::WeightInfo)] + /// impl Pallet { + /// pub fn create( + /// ``` + /// + /// It is possible to use this syntax together with instantiated pallets by using `Config` + /// instead. + /// + /// ### Dev Mode + /// + /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation + /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used + /// instead of the zero weight. So to say: it works together with `dev_mode`. + RuntimeCall(Option, proc_macro2::Span), + Error(proc_macro2::Span), + Tasks(proc_macro2::Span), + TaskList(proc_macro2::Span), + TaskCondition(proc_macro2::Span), + TaskIndex(proc_macro2::Span), + RuntimeTask(proc_macro2::Span), + RuntimeEvent(proc_macro2::Span), + RuntimeOrigin(proc_macro2::Span), + Inherent(proc_macro2::Span), + Storage(proc_macro2::Span), + GenesisConfig(proc_macro2::Span), + GenesisBuild(proc_macro2::Span), + ValidateUnsigned(proc_macro2::Span), + TypeValue(proc_macro2::Span), + ExtraConstants(proc_macro2::Span), + Composite(proc_macro2::Span), } impl PalletAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Config(span, _) => *span, - Self::Pallet(span) => *span, - Self::Hooks(span) => *span, - Self::Tasks(span) => *span, - Self::TaskCondition(span) => *span, - Self::TaskIndex(span) => *span, - Self::TaskList(span) => *span, - Self::Error(span) => *span, - Self::RuntimeTask(span) => *span, - Self::RuntimeCall(_, span) => *span, - Self::RuntimeEvent(span) => *span, - Self::RuntimeOrigin(span) => *span, - Self::Inherent(span) => *span, - Self::Storage(span) => *span, - Self::GenesisConfig(span) => *span, - Self::GenesisBuild(span) => *span, - Self::ValidateUnsigned(span) => *span, - Self::TypeValue(span) => *span, - Self::ExtraConstants(span) => *span, - Self::Composite(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Config(span, _) => *span, + Self::Pallet(span) => *span, + Self::Hooks(span) => *span, + Self::Tasks(span) => *span, + Self::TaskCondition(span) => *span, + Self::TaskIndex(span) => *span, + Self::TaskList(span) => *span, + Self::Error(span) => *span, + Self::RuntimeTask(span) => *span, + Self::RuntimeCall(_, span) => *span, + Self::RuntimeEvent(span) => *span, + Self::RuntimeOrigin(span) => *span, + Self::Inherent(span) => *span, + Self::Storage(span) => *span, + Self::GenesisConfig(span) => *span, + Self::GenesisBuild(span) => *span, + Self::ValidateUnsigned(span) => *span, + Self::TypeValue(span) => *span, + Self::ExtraConstants(span) => *span, + Self::Composite(span) => *span, + } + } } impl syn::parse::Parse for PalletAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::config) { - let span = content.parse::()?.span(); - let with_default = content.peek(syn::token::Paren); - if with_default { - let inside_config; - let _paren = syn::parenthesized!(inside_config in content); - inside_config.parse::()?; - } - Ok(PalletAttr::Config(span, with_default)) - } else if lookahead.peek(keyword::pallet) { - Ok(PalletAttr::Pallet(content.parse::()?.span())) - } else if lookahead.peek(keyword::hooks) { - Ok(PalletAttr::Hooks(content.parse::()?.span())) - } else if lookahead.peek(keyword::call) { - let span = content.parse::().expect("peeked").span(); - let attr = match content.is_empty() { - true => None, - false => Some(InheritedCallWeightAttr::parse(&content)?), - }; - Ok(PalletAttr::RuntimeCall(attr, span)) - } else if lookahead.peek(keyword::tasks_experimental) { - Ok(PalletAttr::Tasks(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_enum) { - Ok(PalletAttr::RuntimeTask(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_condition) { - Ok(PalletAttr::TaskCondition(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_index) { - Ok(PalletAttr::TaskIndex(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_list) { - Ok(PalletAttr::TaskList(content.parse::()?.span())) - } else if lookahead.peek(keyword::error) { - Ok(PalletAttr::Error(content.parse::()?.span())) - } else if lookahead.peek(keyword::event) { - Ok(PalletAttr::RuntimeEvent(content.parse::()?.span())) - } else if lookahead.peek(keyword::origin) { - Ok(PalletAttr::RuntimeOrigin(content.parse::()?.span())) - } else if lookahead.peek(keyword::inherent) { - Ok(PalletAttr::Inherent(content.parse::()?.span())) - } else if lookahead.peek(keyword::storage) { - Ok(PalletAttr::Storage(content.parse::()?.span())) - } else if lookahead.peek(keyword::genesis_config) { - Ok(PalletAttr::GenesisConfig(content.parse::()?.span())) - } else if lookahead.peek(keyword::genesis_build) { - Ok(PalletAttr::GenesisBuild(content.parse::()?.span())) - } else if lookahead.peek(keyword::validate_unsigned) { - Ok(PalletAttr::ValidateUnsigned(content.parse::()?.span())) - } else if lookahead.peek(keyword::type_value) { - Ok(PalletAttr::TypeValue(content.parse::()?.span())) - } else if lookahead.peek(keyword::extra_constants) { - Ok(PalletAttr::ExtraConstants(content.parse::()?.span())) - } else if lookahead.peek(keyword::composite_enum) { - Ok(PalletAttr::Composite(content.parse::()?.span())) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::config) { + let span = content.parse::()?.span(); + let with_default = content.peek(syn::token::Paren); + if with_default { + let inside_config; + let _paren = syn::parenthesized!(inside_config in content); + inside_config.parse::()?; + } + Ok(PalletAttr::Config(span, with_default)) + } else if lookahead.peek(keyword::pallet) { + Ok(PalletAttr::Pallet( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::hooks) { + Ok(PalletAttr::Hooks(content.parse::()?.span())) + } else if lookahead.peek(keyword::call) { + let span = content.parse::().expect("peeked").span(); + let attr = match content.is_empty() { + true => None, + false => Some(InheritedCallWeightAttr::parse(&content)?), + }; + Ok(PalletAttr::RuntimeCall(attr, span)) + } else if lookahead.peek(keyword::tasks_experimental) { + Ok(PalletAttr::Tasks( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_enum) { + Ok(PalletAttr::RuntimeTask( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_condition) { + Ok(PalletAttr::TaskCondition( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_index) { + Ok(PalletAttr::TaskIndex( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_list) { + Ok(PalletAttr::TaskList( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::error) { + Ok(PalletAttr::Error(content.parse::()?.span())) + } else if lookahead.peek(keyword::event) { + Ok(PalletAttr::RuntimeEvent( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::origin) { + Ok(PalletAttr::RuntimeOrigin( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::inherent) { + Ok(PalletAttr::Inherent( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::storage) { + Ok(PalletAttr::Storage( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::genesis_config) { + Ok(PalletAttr::GenesisConfig( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::genesis_build) { + Ok(PalletAttr::GenesisBuild( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::validate_unsigned) { + Ok(PalletAttr::ValidateUnsigned( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::type_value) { + Ok(PalletAttr::TypeValue( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::extra_constants) { + Ok(PalletAttr::ExtraConstants( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::composite_enum) { + Ok(PalletAttr::Composite( + content.parse::()?.span(), + )) + } else { + Err(lookahead.error()) + } + } } /// The optional weight annotation on a `#[pallet::call]` like `#[pallet::call(weight($type))]`. #[derive(Clone)] pub struct InheritedCallWeightAttr { - pub typename: syn::Type, - pub span: proc_macro2::Span, + pub typename: syn::Type, + pub span: proc_macro2::Span, } impl syn::parse::Parse for InheritedCallWeightAttr { - // Parses `(weight($type))` or `(weight = $type)`. - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - content.parse::()?; - let lookahead = content.lookahead1(); - - let buffer = if lookahead.peek(syn::token::Paren) { - let inner; - syn::parenthesized!(inner in content); - inner - } else if lookahead.peek(syn::Token![=]) { - content.parse::().expect("peeked"); - content - } else { - return Err(lookahead.error()) - }; - - Ok(Self { typename: buffer.parse()?, span: input.span() }) - } + // Parses `(weight($type))` or `(weight = $type)`. + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + content.parse::()?; + let lookahead = content.lookahead1(); + + let buffer = if lookahead.peek(syn::token::Paren) { + let inner; + syn::parenthesized!(inner in content); + inner + } else if lookahead.peek(syn::Token![=]) { + content.parse::().expect("peeked"); + content + } else { + return Err(lookahead.error()); + }; + + Ok(Self { + typename: buffer.parse()?, + span: input.span(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/origin.rs b/support/procedural-fork/src/pallet/parse/origin.rs index 76e2a8841..2dd84c40d 100644 --- a/support/procedural-fork/src/pallet/parse/origin.rs +++ b/support/procedural-fork/src/pallet/parse/origin.rs @@ -25,48 +25,56 @@ use syn::spanned::Spanned; /// * `struct Origin` /// * `enum Origin` pub struct OriginDef { - /// The index of item in pallet module. - pub index: usize, - pub has_instance: bool, - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of item in pallet module. + pub index: usize, + pub has_instance: bool, + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl OriginDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::origin, expected enum or struct or type"; - return Err(syn::Error::new(item.span(), msg)) - }, - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::origin, expected enum or struct or type"; + return Err(syn::Error::new(item.span(), msg)); + } + }; - let has_instance = generics.params.len() == 2; - let is_generic = !generics.params.is_empty(); + let has_instance = generics.params.len() == 2; + let is_generic = !generics.params.is_empty(); - let mut instances = vec![]; - if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { - instances.push(u); - } else { - // construct_runtime only allow generic event for instantiable pallet. - instances.push(helper::InstanceUsage { has_instance: false, span: ident.span() }) - } + let mut instances = vec![]; + if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { + instances.push(u); + } else { + // construct_runtime only allow generic event for instantiable pallet. + instances.push(helper::InstanceUsage { + has_instance: false, + span: ident.span(), + }) + } - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::origin, Origin must be public"; - return Err(syn::Error::new(item_span, msg)) - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::origin, Origin must be public"; + return Err(syn::Error::new(item_span, msg)); + } - if ident != "Origin" { - let msg = "Invalid pallet::origin, ident must `Origin`"; - return Err(syn::Error::new(ident.span(), msg)) - } + if ident != "Origin" { + let msg = "Invalid pallet::origin, ident must `Origin`"; + return Err(syn::Error::new(ident.span(), msg)); + } - Ok(OriginDef { index, has_instance, is_generic, instances }) - } + Ok(OriginDef { + index, + has_instance, + is_generic, + instances, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/pallet_struct.rs b/support/procedural-fork/src/pallet/parse/pallet_struct.rs index b64576099..320cf01fa 100644 --- a/support/procedural-fork/src/pallet/parse/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/parse/pallet_struct.rs @@ -21,129 +21,137 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(pallet); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(without_storage_info); - syn::custom_keyword!(storage_version); + syn::custom_keyword!(pallet); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(without_storage_info); + syn::custom_keyword!(storage_version); } /// Definition of the pallet pallet. pub struct PalletStructDef { - /// The index of item in pallet pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config trait. - pub instances: Vec, - /// The keyword Pallet used (contains span). - pub pallet: keyword::Pallet, - /// The span of the pallet::pallet attribute. - pub attr_span: proc_macro2::Span, - /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. - /// Contains the span of the attribute. - pub without_storage_info: Option, - /// The in-code storage version of the pallet. - pub storage_version: Option, + /// The index of item in pallet pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config trait. + pub instances: Vec, + /// The keyword Pallet used (contains span). + pub pallet: keyword::Pallet, + /// The span of the pallet::pallet attribute. + pub attr_span: proc_macro2::Span, + /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. + /// Contains the span of the attribute. + pub without_storage_info: Option, + /// The in-code storage version of the pallet. + pub storage_version: Option, } /// Parse for one variant of: /// * `#[pallet::without_storage_info]` /// * `#[pallet::storage_version(STORAGE_VERSION)]` pub enum PalletStructAttr { - WithoutStorageInfoTrait(proc_macro2::Span), - StorageVersion { storage_version: syn::Path, span: proc_macro2::Span }, + WithoutStorageInfoTrait(proc_macro2::Span), + StorageVersion { + storage_version: syn::Path, + span: proc_macro2::Span, + }, } impl PalletStructAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, + } + } } impl syn::parse::Parse for PalletStructAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::without_storage_info) { - let span = content.parse::()?.span(); - Ok(Self::WithoutStorageInfoTrait(span)) - } else if lookahead.peek(keyword::storage_version) { - let span = content.parse::()?.span(); - - let version_content; - syn::parenthesized!(version_content in content); - let storage_version = version_content.parse::()?; - - Ok(Self::StorageVersion { storage_version, span }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::without_storage_info) { + let span = content.parse::()?.span(); + Ok(Self::WithoutStorageInfoTrait(span)) + } else if lookahead.peek(keyword::storage_version) { + let span = content.parse::()?.span(); + + let version_content; + syn::parenthesized!(version_content in content); + let storage_version = version_content.parse::()?; + + Ok(Self::StorageVersion { + storage_version, + span, + }) + } else { + Err(lookahead.error()) + } + } } impl PalletStructDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid pallet::pallet, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)) - }; - - let mut without_storage_info = None; - let mut storage_version_found = None; - - let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - for attr in struct_attrs { - match attr { - PalletStructAttr::WithoutStorageInfoTrait(span) - if without_storage_info.is_none() => - { - without_storage_info = Some(span); - }, - PalletStructAttr::StorageVersion { storage_version, .. } - if storage_version_found.is_none() => - { - storage_version_found = Some(storage_version); - }, - attr => { - let msg = "Unexpected duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)) - }, - } - } - - let pallet = syn::parse2::(item.ident.to_token_stream())?; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::pallet, Pallet must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; - return Err(syn::Error::new(item.generics.where_clause.span(), msg)) - } - - let instances = - vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; - - Ok(Self { - index, - instances, - pallet, - attr_span, - without_storage_info, - storage_version: storage_version_found, - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid pallet::pallet, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)); + }; + + let mut without_storage_info = None; + let mut storage_version_found = None; + + let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + for attr in struct_attrs { + match attr { + PalletStructAttr::WithoutStorageInfoTrait(span) + if without_storage_info.is_none() => + { + without_storage_info = Some(span); + } + PalletStructAttr::StorageVersion { + storage_version, .. + } if storage_version_found.is_none() => { + storage_version_found = Some(storage_version); + } + attr => { + let msg = "Unexpected duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)); + } + } + } + + let pallet = syn::parse2::(item.ident.to_token_stream())?; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::pallet, Pallet must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; + return Err(syn::Error::new(item.generics.where_clause.span(), msg)); + } + + let instances = vec![helper::check_type_def_gen_no_bounds( + &item.generics, + item.ident.span(), + )?]; + + Ok(Self { + index, + instances, + pallet, + attr_span, + without_storage_info, + storage_version: storage_version_found, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs index 9d96a18b5..811832427 100644 --- a/support/procedural-fork/src/pallet/parse/storage.rs +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -23,16 +23,16 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); - syn::custom_keyword!(pallet); - syn::custom_keyword!(getter); - syn::custom_keyword!(storage_prefix); - syn::custom_keyword!(unbounded); - syn::custom_keyword!(whitelist_storage); - syn::custom_keyword!(disable_try_decode_storage); - syn::custom_keyword!(OptionQuery); - syn::custom_keyword!(ResultQuery); - syn::custom_keyword!(ValueQuery); + syn::custom_keyword!(Error); + syn::custom_keyword!(pallet); + syn::custom_keyword!(getter); + syn::custom_keyword!(storage_prefix); + syn::custom_keyword!(unbounded); + syn::custom_keyword!(whitelist_storage); + syn::custom_keyword!(disable_try_decode_storage); + syn::custom_keyword!(OptionQuery); + syn::custom_keyword!(ResultQuery); + syn::custom_keyword!(ValueQuery); } /// Parse for one of the following: @@ -42,906 +42,1003 @@ mod keyword { /// * `#[pallet::whitelist_storage] /// * `#[pallet::disable_try_decode_storage]` pub enum PalletStorageAttr { - Getter(syn::Ident, proc_macro2::Span), - StorageName(syn::LitStr, proc_macro2::Span), - Unbounded(proc_macro2::Span), - WhitelistStorage(proc_macro2::Span), - DisableTryDecodeStorage(proc_macro2::Span), + Getter(syn::Ident, proc_macro2::Span), + StorageName(syn::LitStr, proc_macro2::Span), + Unbounded(proc_macro2::Span), + WhitelistStorage(proc_macro2::Span), + DisableTryDecodeStorage(proc_macro2::Span), } impl PalletStorageAttr { - fn attr_span(&self) -> proc_macro2::Span { - match self { - Self::Getter(_, span) | - Self::StorageName(_, span) | - Self::Unbounded(span) | - Self::WhitelistStorage(span) => *span, - Self::DisableTryDecodeStorage(span) => *span, - } - } + fn attr_span(&self) -> proc_macro2::Span { + match self { + Self::Getter(_, span) + | Self::StorageName(_, span) + | Self::Unbounded(span) + | Self::WhitelistStorage(span) => *span, + Self::DisableTryDecodeStorage(span) => *span, + } + } } impl syn::parse::Parse for PalletStorageAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let attr_span = input.span(); - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::getter) { - content.parse::()?; - - let generate_content; - syn::parenthesized!(generate_content in content); - generate_content.parse::()?; - Ok(Self::Getter(generate_content.parse::()?, attr_span)) - } else if lookahead.peek(keyword::storage_prefix) { - content.parse::()?; - content.parse::()?; - - let renamed_prefix = content.parse::()?; - // Ensure the renamed prefix is a proper Rust identifier - syn::parse_str::(&renamed_prefix.value()).map_err(|_| { - let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); - syn::Error::new(renamed_prefix.span(), msg) - })?; - - Ok(Self::StorageName(renamed_prefix, attr_span)) - } else if lookahead.peek(keyword::unbounded) { - content.parse::()?; - - Ok(Self::Unbounded(attr_span)) - } else if lookahead.peek(keyword::whitelist_storage) { - content.parse::()?; - Ok(Self::WhitelistStorage(attr_span)) - } else if lookahead.peek(keyword::disable_try_decode_storage) { - content.parse::()?; - Ok(Self::DisableTryDecodeStorage(attr_span)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let attr_span = input.span(); + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::getter) { + content.parse::()?; + + let generate_content; + syn::parenthesized!(generate_content in content); + generate_content.parse::()?; + Ok(Self::Getter( + generate_content.parse::()?, + attr_span, + )) + } else if lookahead.peek(keyword::storage_prefix) { + content.parse::()?; + content.parse::()?; + + let renamed_prefix = content.parse::()?; + // Ensure the renamed prefix is a proper Rust identifier + syn::parse_str::(&renamed_prefix.value()).map_err(|_| { + let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); + syn::Error::new(renamed_prefix.span(), msg) + })?; + + Ok(Self::StorageName(renamed_prefix, attr_span)) + } else if lookahead.peek(keyword::unbounded) { + content.parse::()?; + + Ok(Self::Unbounded(attr_span)) + } else if lookahead.peek(keyword::whitelist_storage) { + content.parse::()?; + Ok(Self::WhitelistStorage(attr_span)) + } else if lookahead.peek(keyword::disable_try_decode_storage) { + content.parse::()?; + Ok(Self::DisableTryDecodeStorage(attr_span)) + } else { + Err(lookahead.error()) + } + } } struct PalletStorageAttrInfo { - getter: Option, - rename_as: Option, - unbounded: bool, - whitelisted: bool, - try_decode: bool, + getter: Option, + rename_as: Option, + unbounded: bool, + whitelisted: bool, + try_decode: bool, } impl PalletStorageAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut getter = None; - let mut rename_as = None; - let mut unbounded = false; - let mut whitelisted = false; - let mut disable_try_decode_storage = false; - for attr in attrs { - match attr { - PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), - PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => - rename_as = Some(name), - PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, - PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, - PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => - disable_try_decode_storage = true, - attr => - return Err(syn::Error::new( - attr.attr_span(), - "Invalid attribute: Duplicate attribute", - )), - } - } - - Ok(PalletStorageAttrInfo { - getter, - rename_as, - unbounded, - whitelisted, - try_decode: !disable_try_decode_storage, - }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut getter = None; + let mut rename_as = None; + let mut unbounded = false; + let mut whitelisted = false; + let mut disable_try_decode_storage = false; + for attr in attrs { + match attr { + PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), + PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => { + rename_as = Some(name) + } + PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, + PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, + PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => { + disable_try_decode_storage = true + } + attr => { + return Err(syn::Error::new( + attr.attr_span(), + "Invalid attribute: Duplicate attribute", + )) + } + } + } + + Ok(PalletStorageAttrInfo { + getter, + rename_as, + unbounded, + whitelisted, + try_decode: !disable_try_decode_storage, + }) + } } /// The value and key types used by storages. Needed to expand metadata. pub enum Metadata { - Value { value: syn::Type }, - Map { value: syn::Type, key: syn::Type }, - CountedMap { value: syn::Type, key: syn::Type }, - DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, - NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, - CountedNMap { keys: Vec, keygen: syn::Type, value: syn::Type }, + Value { + value: syn::Type, + }, + Map { + value: syn::Type, + key: syn::Type, + }, + CountedMap { + value: syn::Type, + key: syn::Type, + }, + DoubleMap { + value: syn::Type, + key1: syn::Type, + key2: syn::Type, + }, + NMap { + keys: Vec, + keygen: syn::Type, + value: syn::Type, + }, + CountedNMap { + keys: Vec, + keygen: syn::Type, + value: syn::Type, + }, } pub enum QueryKind { - OptionQuery, - ResultQuery(syn::Path, syn::Ident), - ValueQuery, + OptionQuery, + ResultQuery(syn::Path, syn::Ident), + ValueQuery, } /// Definition of a storage, storage is a storage type like /// `type MyStorage = StorageValue` /// The keys and values types are parsed in order to get metadata pub struct StorageDef { - /// The index of storage item in pallet module. - pub index: usize, - /// Visibility of the storage type. - pub vis: syn::Visibility, - /// The type ident, to generate the StoragePrefix for. - pub ident: syn::Ident, - /// The keys and value metadata of the storage. - pub metadata: Metadata, - /// The doc associated to the storage. - pub docs: Vec, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// Optional getter to generate. If some then query_kind is ensured to be some as well. - pub getter: Option, - /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of - /// ident. - pub rename_as: Option, - /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. - /// Note that this is best effort as it can't be determined when QueryKind is generic, and - /// result can be false if user do some unexpected type alias. - pub query_kind: Option, - /// Where clause of type definition. - pub where_clause: Option, - /// The span of the pallet::storage attribute. - pub attr_span: proc_macro2::Span, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// If generics are named (e.g. `StorageValue`) then this contains all the - /// generics of the storage. - /// If generics are not named, this is none. - pub named_generics: Option, - /// If the value stored in this storage is unbounded. - pub unbounded: bool, - /// Whether or not reads to this storage key will be ignored by benchmarking - pub whitelisted: bool, - /// Whether or not to try to decode the storage key when running try-runtime checks. - pub try_decode: bool, - /// Whether or not a default hasher is allowed to replace `_` - pub use_default_hasher: bool, + /// The index of storage item in pallet module. + pub index: usize, + /// Visibility of the storage type. + pub vis: syn::Visibility, + /// The type ident, to generate the StoragePrefix for. + pub ident: syn::Ident, + /// The keys and value metadata of the storage. + pub metadata: Metadata, + /// The doc associated to the storage. + pub docs: Vec, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// Optional getter to generate. If some then query_kind is ensured to be some as well. + pub getter: Option, + /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of + /// ident. + pub rename_as: Option, + /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. + /// Note that this is best effort as it can't be determined when QueryKind is generic, and + /// result can be false if user do some unexpected type alias. + pub query_kind: Option, + /// Where clause of type definition. + pub where_clause: Option, + /// The span of the pallet::storage attribute. + pub attr_span: proc_macro2::Span, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// If generics are named (e.g. `StorageValue`) then this contains all the + /// generics of the storage. + /// If generics are not named, this is none. + pub named_generics: Option, + /// If the value stored in this storage is unbounded. + pub unbounded: bool, + /// Whether or not reads to this storage key will be ignored by benchmarking + pub whitelisted: bool, + /// Whether or not to try to decode the storage key when running try-runtime checks. + pub try_decode: bool, + /// Whether or not a default hasher is allowed to replace `_` + pub use_default_hasher: bool, } /// The parsed generic from the #[derive(Clone)] pub enum StorageGenerics { - DoubleMap { - hasher1: syn::Type, - key1: syn::Type, - hasher2: syn::Type, - key2: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Map { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedMap { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Value { - value: syn::Type, - query_kind: Option, - on_empty: Option, - }, - NMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedNMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, + DoubleMap { + hasher1: syn::Type, + key1: syn::Type, + hasher2: syn::Type, + key2: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Map { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedMap { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Value { + value: syn::Type, + query_kind: Option, + on_empty: Option, + }, + NMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedNMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, } impl StorageGenerics { - /// Return the metadata from the defined generics - fn metadata(&self) -> syn::Result { - let res = match self.clone() { - Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, - Self::Map { value, key, .. } => Metadata::Map { value, key }, - Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, - Self::Value { value, .. } => Metadata::Value { value }, - Self::NMap { keygen, value, .. } => - Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, - Self::CountedNMap { keygen, value, .. } => - Metadata::CountedNMap { keys: collect_keys(&keygen)?, keygen, value }, - }; - - Ok(res) - } - - /// Return the query kind from the defined generics - fn query_kind(&self) -> Option { - match &self { - Self::DoubleMap { query_kind, .. } | - Self::Map { query_kind, .. } | - Self::CountedMap { query_kind, .. } | - Self::Value { query_kind, .. } | - Self::NMap { query_kind, .. } | - Self::CountedNMap { query_kind, .. } => query_kind.clone(), - } - } + /// Return the metadata from the defined generics + fn metadata(&self) -> syn::Result { + let res = match self.clone() { + Self::DoubleMap { + value, key1, key2, .. + } => Metadata::DoubleMap { value, key1, key2 }, + Self::Map { value, key, .. } => Metadata::Map { value, key }, + Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, + Self::Value { value, .. } => Metadata::Value { value }, + Self::NMap { keygen, value, .. } => Metadata::NMap { + keys: collect_keys(&keygen)?, + keygen, + value, + }, + Self::CountedNMap { keygen, value, .. } => Metadata::CountedNMap { + keys: collect_keys(&keygen)?, + keygen, + value, + }, + }; + + Ok(res) + } + + /// Return the query kind from the defined generics + fn query_kind(&self) -> Option { + match &self { + Self::DoubleMap { query_kind, .. } + | Self::Map { query_kind, .. } + | Self::CountedMap { query_kind, .. } + | Self::Value { query_kind, .. } + | Self::NMap { query_kind, .. } + | Self::CountedNMap { query_kind, .. } => query_kind.clone(), + } + } } enum StorageKind { - Value, - Map, - CountedMap, - DoubleMap, - NMap, - CountedNMap, + Value, + Map, + CountedMap, + DoubleMap, + NMap, + CountedNMap, } /// Check the generics in the `map` contains the generics in `gen` may contains generics in /// `optional_gen`, and doesn't contains any other. fn check_generics( - map: &HashMap, - mandatory_generics: &[&str], - optional_generics: &[&str], - storage_type_name: &str, - args_span: proc_macro2::Span, + map: &HashMap, + mandatory_generics: &[&str], + optional_generics: &[&str], + storage_type_name: &str, + args_span: proc_macro2::Span, ) -> syn::Result<()> { - let mut errors = vec![]; - - let expectation = { - let mut e = format!( - "`{}` expect generics {}and optional generics {}", - storage_type_name, - mandatory_generics - .iter() - .map(|name| format!("`{}`, ", name)) - .collect::(), - &optional_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), - ); - e.pop(); - e.pop(); - e.push('.'); - e - }; - - for (gen_name, gen_binding) in map { - if !mandatory_generics.contains(&gen_name.as_str()) && - !optional_generics.contains(&gen_name.as_str()) - { - let msg = format!( - "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", - gen_name, storage_type_name, expectation, - ); - errors.push(syn::Error::new(gen_binding.span(), msg)); - } - } - - for mandatory_generic in mandatory_generics { - if !map.contains_key(&mandatory_generic.to_string()) { - let msg = format!( - "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", - mandatory_generic, storage_type_name - ); - errors.push(syn::Error::new(args_span, msg)); - } - } - - let mut errors = errors.drain(..); - if let Some(mut error) = errors.next() { - for other_error in errors { - error.combine(other_error); - } - Err(error) - } else { - Ok(()) - } + let mut errors = vec![]; + + let expectation = { + let mut e = format!( + "`{}` expect generics {}and optional generics {}", + storage_type_name, + mandatory_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + &optional_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + ); + e.pop(); + e.pop(); + e.push('.'); + e + }; + + for (gen_name, gen_binding) in map { + if !mandatory_generics.contains(&gen_name.as_str()) + && !optional_generics.contains(&gen_name.as_str()) + { + let msg = format!( + "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", + gen_name, storage_type_name, expectation, + ); + errors.push(syn::Error::new(gen_binding.span(), msg)); + } + } + + for mandatory_generic in mandatory_generics { + if !map.contains_key(&mandatory_generic.to_string()) { + let msg = format!( + "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", + mandatory_generic, storage_type_name + ); + errors.push(syn::Error::new(args_span, msg)); + } + } + + let mut errors = errors.drain(..); + if let Some(mut error) = errors.next() { + for other_error in errors { + error.combine(other_error); + } + Err(error) + } else { + Ok(()) + } } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_named_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::AssocType], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::AssocType], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let mut parsed = HashMap::::new(); - - // Ensure no duplicate. - for arg in args { - if let Some(other) = parsed.get(&arg.ident.to_string()) { - let msg = "Invalid pallet::storage, Duplicated named generic"; - let mut err = syn::Error::new(arg.ident.span(), msg); - err.combine(syn::Error::new(other.ident.span(), msg)); - return Err(err) - } - parsed.insert(arg.ident.to_string(), arg.clone()); - } - - let mut map_mandatory_generics = vec!["Key", "Value"]; - let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; - if dev_mode { - map_optional_generics.push("Hasher"); - } else { - map_mandatory_generics.push("Hasher"); - } - - let generics = match storage { - StorageKind::Value => { - check_generics( - &parsed, - &["Value"], - &["QueryKind", "OnEmpty"], - "StorageValue", - args_span, - )?; - - StorageGenerics::Value { - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - } - }, - StorageKind::Map => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "StorageMap", - args_span, - )?; - - StorageGenerics::Map { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::CountedMap => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "CountedStorageMap", - args_span, - )?; - - StorageGenerics::CountedMap { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::DoubleMap => { - let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; - if dev_mode { - map_optional_generics.extend(["Hasher1", "Hasher2"]); - } else { - double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); - } - - check_generics( - &parsed, - &double_map_mandatory_generics, - &map_optional_generics, - "StorageDoubleMap", - args_span, - )?; - - StorageGenerics::DoubleMap { - hasher1: parsed - .remove("Hasher1") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key1: parsed - .remove("Key1") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - hasher2: parsed - .remove("Hasher2") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key2: parsed - .remove("Key2") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::NMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "StorageNMap", - args_span, - )?; - - StorageGenerics::NMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::CountedNMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "CountedStorageNMap", - args_span, - )?; - - StorageGenerics::CountedNMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - }; - - let metadata = generics.metadata()?; - let query_kind = generics.query_kind(); - - Ok((Some(generics), metadata, query_kind, false)) + let mut parsed = HashMap::::new(); + + // Ensure no duplicate. + for arg in args { + if let Some(other) = parsed.get(&arg.ident.to_string()) { + let msg = "Invalid pallet::storage, Duplicated named generic"; + let mut err = syn::Error::new(arg.ident.span(), msg); + err.combine(syn::Error::new(other.ident.span(), msg)); + return Err(err); + } + parsed.insert(arg.ident.to_string(), arg.clone()); + } + + let mut map_mandatory_generics = vec!["Key", "Value"]; + let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; + if dev_mode { + map_optional_generics.push("Hasher"); + } else { + map_mandatory_generics.push("Hasher"); + } + + let generics = match storage { + StorageKind::Value => { + check_generics( + &parsed, + &["Value"], + &["QueryKind", "OnEmpty"], + "StorageValue", + args_span, + )?; + + StorageGenerics::Value { + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + } + } + StorageKind::Map => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "StorageMap", + args_span, + )?; + + StorageGenerics::Map { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::CountedMap => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "CountedStorageMap", + args_span, + )?; + + StorageGenerics::CountedMap { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::DoubleMap => { + let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; + if dev_mode { + map_optional_generics.extend(["Hasher1", "Hasher2"]); + } else { + double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); + } + + check_generics( + &parsed, + &double_map_mandatory_generics, + &map_optional_generics, + "StorageDoubleMap", + args_span, + )?; + + StorageGenerics::DoubleMap { + hasher1: parsed + .remove("Hasher1") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key1: parsed + .remove("Key1") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + hasher2: parsed + .remove("Hasher2") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key2: parsed + .remove("Key2") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::NMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "StorageNMap", + args_span, + )?; + + StorageGenerics::NMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::CountedNMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "CountedStorageNMap", + args_span, + )?; + + StorageGenerics::CountedNMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + }; + + let metadata = generics.metadata()?; + let query_kind = generics.query_kind(); + + Ok((Some(generics), metadata, query_kind, false)) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_unnamed_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::Type], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::Type], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let retrieve_arg = |arg_pos| { - args.get(arg_pos).cloned().ok_or_else(|| { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic argument, \ + let retrieve_arg = |arg_pos| { + args.get(arg_pos).cloned().ok_or_else(|| { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic argument, \ expect at least {} args, found {}.", - arg_pos + 1, - args.len(), - ); - syn::Error::new(args_span, msg) - }) - }; - - let prefix_arg = retrieve_arg(0)?; - syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { - let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ + arg_pos + 1, + args.len(), + ); + syn::Error::new(args_span, msg) + }) + }; + + let prefix_arg = retrieve_arg(0)?; + syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { + let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ first generic argument must be `_`, the argument is then replaced by macro."; - let mut err = syn::Error::new(prefix_arg.span(), msg); - err.combine(e); - err - })?; - - let use_default_hasher = |arg_pos| { - let arg = retrieve_arg(arg_pos)?; - if syn::parse2::(arg.to_token_stream()).is_ok() { - if dev_mode { - Ok(true) - } else { - let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; - Err(syn::Error::new(arg.span(), msg)) - } - } else { - Ok(false) - } - }; - - let res = match storage { - StorageKind::Value => - (None, Metadata::Value { value: retrieve_arg(1)? }, retrieve_arg(2).ok(), false), - StorageKind::Map => ( - None, - Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::CountedMap => ( - None, - Metadata::CountedMap { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::DoubleMap => ( - None, - Metadata::DoubleMap { - key1: retrieve_arg(2)?, - key2: retrieve_arg(4)?, - value: retrieve_arg(5)?, - }, - retrieve_arg(6).ok(), - use_default_hasher(1)? && use_default_hasher(3)?, - ), - StorageKind::NMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::NMap { keys, keygen, value: retrieve_arg(2)? }, - retrieve_arg(3).ok(), - false, - ) - }, - StorageKind::CountedNMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::CountedNMap { keys, keygen, value: retrieve_arg(2)? }, - retrieve_arg(3).ok(), - false, - ) - }, - }; - - Ok(res) + let mut err = syn::Error::new(prefix_arg.span(), msg); + err.combine(e); + err + })?; + + let use_default_hasher = |arg_pos| { + let arg = retrieve_arg(arg_pos)?; + if syn::parse2::(arg.to_token_stream()).is_ok() { + if dev_mode { + Ok(true) + } else { + let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; + Err(syn::Error::new(arg.span(), msg)) + } + } else { + Ok(false) + } + }; + + let res = match storage { + StorageKind::Value => ( + None, + Metadata::Value { + value: retrieve_arg(1)?, + }, + retrieve_arg(2).ok(), + false, + ), + StorageKind::Map => ( + None, + Metadata::Map { + key: retrieve_arg(2)?, + value: retrieve_arg(3)?, + }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::CountedMap => ( + None, + Metadata::CountedMap { + key: retrieve_arg(2)?, + value: retrieve_arg(3)?, + }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::DoubleMap => ( + None, + Metadata::DoubleMap { + key1: retrieve_arg(2)?, + key2: retrieve_arg(4)?, + value: retrieve_arg(5)?, + }, + retrieve_arg(6).ok(), + use_default_hasher(1)? && use_default_hasher(3)?, + ), + StorageKind::NMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::NMap { + keys, + keygen, + value: retrieve_arg(2)?, + }, + retrieve_arg(3).ok(), + false, + ) + } + StorageKind::CountedNMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::CountedNMap { + keys, + keygen, + value: retrieve_arg(2)?, + }, + retrieve_arg(3).ok(), + false, + ) + } + }; + + Ok(res) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_generics( - segment: &syn::PathSegment, - dev_mode: bool, + segment: &syn::PathSegment, + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let storage_kind = match &*segment.ident.to_string() { - "StorageValue" => StorageKind::Value, - "StorageMap" => StorageKind::Map, - "CountedStorageMap" => StorageKind::CountedMap, - "StorageDoubleMap" => StorageKind::DoubleMap, - "StorageNMap" => StorageKind::NMap, - "CountedStorageNMap" => StorageKind::CountedNMap, - found => { - let msg = format!( + let storage_kind = match &*segment.ident.to_string() { + "StorageValue" => StorageKind::Value, + "StorageMap" => StorageKind::Map, + "CountedStorageMap" => StorageKind::CountedMap, + "StorageDoubleMap" => StorageKind::DoubleMap, + "StorageNMap" => StorageKind::NMap, + "CountedStorageNMap" => StorageKind::CountedNMap, + found => { + let msg = format!( "Invalid pallet::storage, expected ident: `StorageValue` or \ `StorageMap` or `CountedStorageMap` or `StorageDoubleMap` or `StorageNMap` or `CountedStorageNMap` \ in order to expand metadata, found `{}`.", found, ); - return Err(syn::Error::new(segment.ident.span(), msg)) - }, - }; + return Err(syn::Error::new(segment.ident.span(), msg)); + } + }; - let args_span = segment.arguments.span(); + let args_span = segment.arguments.span(); - let args = match &segment.arguments { - syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, - _ => { - let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ + let args = match &segment.arguments { + syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, + _ => { + let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ expect more that 0 generic arguments."; - return Err(syn::Error::new(segment.span(), msg)) - }, - }; - - if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Type(_))) { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::Type(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are types"), - }) - .collect::>(); - process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) - } else if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::AssocType(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are bindings"), - }) - .collect::>(); - process_named_generics(&storage_kind, args_span, &args, dev_mode) - } else { - let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ + return Err(syn::Error::new(segment.span(), msg)); + } + }; + + if args + .args + .iter() + .all(|gen| matches!(gen, syn::GenericArgument::Type(_))) + { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::Type(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are types"), + }) + .collect::>(); + process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) + } else if args + .args + .iter() + .all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) + { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::AssocType(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are bindings"), + }) + .collect::>(); + process_named_generics(&storage_kind, args_span, &args, dev_mode) + } else { + let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ type generics or binding generics, e.g. `` or \ ``."; - Err(syn::Error::new(segment.span(), msg)) - } + Err(syn::Error::new(segment.span(), msg)) + } } /// Parse the 2nd type argument to `StorageNMap` and return its keys. fn collect_keys(keygen: &syn::Type) -> syn::Result> { - if let syn::Type::Tuple(tup) = keygen { - tup.elems.iter().map(extract_key).collect::>>() - } else { - Ok(vec![extract_key(keygen)?]) - } + if let syn::Type::Tuple(tup) = keygen { + tup.elems + .iter() + .map(extract_key) + .collect::>>() + } else { + Ok(vec![extract_key(keygen)?]) + } } /// In `Key`, extract K and return it. fn extract_key(ty: &syn::Type) -> syn::Result { - let typ = if let syn::Type::Path(typ) = ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(ty.span(), msg)) - }; - - let key_struct = typ.path.segments.last().ok_or_else(|| { - let msg = "Invalid pallet::storage, expected type path with at least one segment"; - syn::Error::new(typ.path.span(), msg) - })?; - if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { - let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; - return Err(syn::Error::new(key_struct.ident.span(), msg)) - } - - let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { - args - } else { - let msg = "Invalid pallet::storage, expected angle bracketed arguments"; - return Err(syn::Error::new(key_struct.arguments.span(), msg)) - }; - - if ty_params.args.len() != 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + let typ = if let syn::Type::Path(typ) = ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(ty.span(), msg)); + }; + + let key_struct = typ.path.segments.last().ok_or_else(|| { + let msg = "Invalid pallet::storage, expected type path with at least one segment"; + syn::Error::new(typ.path.span(), msg) + })?; + if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { + let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; + return Err(syn::Error::new(key_struct.ident.span(), msg)); + } + + let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { + args + } else { + let msg = "Invalid pallet::storage, expected angle bracketed arguments"; + return Err(syn::Error::new(key_struct.arguments.span(), msg)); + }; + + if ty_params.args.len() != 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for Key struct, expected 2 args, found {}", - ty_params.args.len() - ); - return Err(syn::Error::new(ty_params.span(), msg)) - } - - let key = match &ty_params.args[1] { - syn::GenericArgument::Type(key_ty) => key_ty.clone(), - _ => { - let msg = "Invalid pallet::storage, expected type"; - return Err(syn::Error::new(ty_params.args[1].span(), msg)) - }, - }; - - Ok(key) + ty_params.args.len() + ); + return Err(syn::Error::new(ty_params.span(), msg)); + } + + let key = match &ty_params.args[1] { + syn::GenericArgument::Type(key_ty) => key_ty.clone(), + _ => { + let msg = "Invalid pallet::storage, expected type"; + return Err(syn::Error::new(ty_params.args[1].span(), msg)); + } + }; + + Ok(key) } impl StorageDef { - /// Return the storage prefix for this storage item - pub fn prefix(&self) -> String { - self.rename_as - .as_ref() - .map(syn::LitStr::value) - .unwrap_or_else(|| self.ident.to_string()) - } - - /// Return either the span of the ident or the span of the literal in the - /// #[storage_prefix] attribute - pub fn prefix_span(&self) -> proc_macro2::Span { - self.rename_as - .as_ref() - .map(syn::LitStr::span) - .unwrap_or_else(|| self.ident.span()) - } - - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - ) -> syn::Result { - let item = if let syn::Item::Type(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")) - }; - - let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - let PalletStorageAttrInfo { getter, rename_as, mut unbounded, whitelisted, try_decode } = - PalletStorageAttrInfo::from_attrs(attrs)?; - - // set all storages to be unbounded if dev_mode is enabled - unbounded |= dev_mode; - let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); - - let instances = vec![helper::check_type_def_gen(&item.generics, item.ident.span())?]; - - let where_clause = item.generics.where_clause.clone(); - let docs = get_doc_literals(&item.attrs); - - let typ = if let syn::Type::Path(typ) = &*item.ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(item.ty.span(), msg)) - }; - - if typ.path.segments.len() != 1 { - let msg = "Invalid pallet::storage, expected type path with one segment"; - return Err(syn::Error::new(item.ty.span(), msg)) - } - - let (named_generics, metadata, query_kind, use_default_hasher) = - process_generics(&typ.path.segments[0], dev_mode)?; - - let query_kind = query_kind - .map(|query_kind| { - use syn::{ - AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, - TypePath, - }; - - let result_query = match query_kind { - Type::Path(path) - if path - .path - .segments - .last() - .map_or(false, |s| s.ident == "OptionQuery") => - return Ok(Some(QueryKind::OptionQuery)), - Type::Path(TypePath { path: Path { segments, .. }, .. }) - if segments.last().map_or(false, |s| s.ident == "ResultQuery") => - segments - .last() - .expect("segments is checked to have the last value; qed") - .clone(), - Type::Path(path) - if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") => - return Ok(Some(QueryKind::ValueQuery)), - _ => return Ok(None), - }; - - let error_type = match result_query.arguments { - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - args, .. - }) => { - if args.len() != 1 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + /// Return the storage prefix for this storage item + pub fn prefix(&self) -> String { + self.rename_as + .as_ref() + .map(syn::LitStr::value) + .unwrap_or_else(|| self.ident.to_string()) + } + + /// Return either the span of the ident or the span of the literal in the + /// #[storage_prefix] attribute + pub fn prefix_span(&self) -> proc_macro2::Span { + self.rename_as + .as_ref() + .map(syn::LitStr::span) + .unwrap_or_else(|| self.ident.span()) + } + + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + ) -> syn::Result { + let item = if let syn::Item::Type(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::storage, expect item type.", + )); + }; + + let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let PalletStorageAttrInfo { + getter, + rename_as, + mut unbounded, + whitelisted, + try_decode, + } = PalletStorageAttrInfo::from_attrs(attrs)?; + + // set all storages to be unbounded if dev_mode is enabled + unbounded |= dev_mode; + let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); + + let instances = vec![helper::check_type_def_gen( + &item.generics, + item.ident.span(), + )?]; + + let where_clause = item.generics.where_clause.clone(); + let docs = get_doc_literals(&item.attrs); + + let typ = if let syn::Type::Path(typ) = &*item.ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(item.ty.span(), msg)); + }; + + if typ.path.segments.len() != 1 { + let msg = "Invalid pallet::storage, expected type path with one segment"; + return Err(syn::Error::new(item.ty.span(), msg)); + } + + let (named_generics, metadata, query_kind, use_default_hasher) = + process_generics(&typ.path.segments[0], dev_mode)?; + + let query_kind = query_kind + .map(|query_kind| { + use syn::{ + AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, + TypePath, + }; + + let result_query = match query_kind { + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "OptionQuery") => + { + return Ok(Some(QueryKind::OptionQuery)) + } + Type::Path(TypePath { + path: Path { segments, .. }, + .. + }) if segments.last().map_or(false, |s| s.ident == "ResultQuery") => segments + .last() + .expect("segments is checked to have the last value; qed") + .clone(), + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "ValueQuery") => + { + return Ok(Some(QueryKind::ValueQuery)) + } + _ => return Ok(None), + }; + + let error_type = match result_query.arguments { + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + args, .. + }) => { + if args.len() != 1 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for ResultQuery, expected 1 type argument, found {}", - args.len(), - ); - return Err(syn::Error::new(args.span(), msg)) - } - - args[0].clone() - }, - args => { - let msg = format!( - "Invalid pallet::storage, unexpected generic args for ResultQuery, \ + args.len(), + ); + return Err(syn::Error::new(args.span(), msg)); + } + + args[0].clone() + } + args => { + let msg = format!( + "Invalid pallet::storage, unexpected generic args for ResultQuery, \ expected angle-bracketed arguments, found `{}`", - args.to_token_stream().to_string() - ); - return Err(syn::Error::new(args.span(), msg)) - }, - }; - - match error_type { - GenericArgument::Type(Type::Path(TypePath { - path: Path { segments: err_variant, leading_colon }, - .. - })) => { - if err_variant.len() < 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of path segments for \ + args.to_token_stream().to_string() + ); + return Err(syn::Error::new(args.span(), msg)); + } + }; + + match error_type { + GenericArgument::Type(Type::Path(TypePath { + path: + Path { + segments: err_variant, + leading_colon, + }, + .. + })) => { + if err_variant.len() < 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of path segments for \ the generics in ResultQuery, expected a path with at least 2 \ segments, found {}", - err_variant.len(), - ); - return Err(syn::Error::new(err_variant.span(), msg)) - } - let mut error = err_variant.clone(); - let err_variant = error - .pop() - .expect("Checked to have at least 2; qed") - .into_value() - .ident; - - // Necessary here to eliminate the last double colon - let last = - error.pop().expect("Checked to have at least 2; qed").into_value(); - error.push_value(last); - - Ok(Some(QueryKind::ResultQuery( - syn::Path { leading_colon, segments: error }, - err_variant, - ))) - }, - gen_arg => { - let msg = format!( + err_variant.len(), + ); + return Err(syn::Error::new(err_variant.span(), msg)); + } + let mut error = err_variant.clone(); + let err_variant = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value() + .ident; + + // Necessary here to eliminate the last double colon + let last = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value(); + error.push_value(last); + + Ok(Some(QueryKind::ResultQuery( + syn::Path { + leading_colon, + segments: error, + }, + err_variant, + ))) + } + gen_arg => { + let msg = format!( "Invalid pallet::storage, unexpected generic argument kind, expected a \ type path to a `PalletError` enum variant, found `{}`", gen_arg.to_token_stream().to_string(), ); - Err(syn::Error::new(gen_arg.span(), msg)) - }, - } - }) - .transpose()? - .unwrap_or(Some(QueryKind::OptionQuery)); - - if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { - let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ + Err(syn::Error::new(gen_arg.span(), msg)) + } + } + }) + .transpose()? + .unwrap_or(Some(QueryKind::OptionQuery)); + + if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { + let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ identifiable. QueryKind must be `OptionQuery`, `ResultQuery`, `ValueQuery`, or default \ one to be identifiable."; - return Err(syn::Error::new(getter.span(), msg)) - } - - Ok(StorageDef { - attr_span, - index, - vis: item.vis.clone(), - ident: item.ident.clone(), - instances, - metadata, - docs, - getter, - rename_as, - query_kind, - where_clause, - cfg_attrs, - named_generics, - unbounded, - whitelisted, - try_decode, - use_default_hasher, - }) - } + return Err(syn::Error::new(getter.span(), msg)); + } + + Ok(StorageDef { + attr_span, + index, + vis: item.vis.clone(), + ident: item.ident.clone(), + instances, + metadata, + docs, + getter, + rename_as, + query_kind, + where_clause, + cfg_attrs, + named_generics, + unbounded, + whitelisted, + try_decode, + use_default_hasher, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/type_value.rs b/support/procedural-fork/src/pallet/parse/type_value.rs index 4d9db30b3..d5c85248f 100644 --- a/support/procedural-fork/src/pallet/parse/type_value.rs +++ b/support/procedural-fork/src/pallet/parse/type_value.rs @@ -20,104 +20,104 @@ use syn::spanned::Spanned; /// Definition of type value. Just a function which is expanded to a struct implementing `Get`. pub struct TypeValueDef { - /// The index of error item in pallet module. - pub index: usize, - /// Visibility of the struct to generate. - pub vis: syn::Visibility, - /// Ident of the struct to generate. - pub ident: syn::Ident, - /// The type return by Get. - pub type_: Box, - /// The block returning the value to get - pub block: Box, - /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// The where clause of the function. - pub where_clause: Option, - /// The span of the pallet::type_value attribute. - pub attr_span: proc_macro2::Span, - /// Docs on the item. - pub docs: Vec, + /// The index of error item in pallet module. + pub index: usize, + /// Visibility of the struct to generate. + pub vis: syn::Visibility, + /// Ident of the struct to generate. + pub ident: syn::Ident, + /// The type return by Get. + pub type_: Box, + /// The block returning the value to get + pub block: Box, + /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// The where clause of the function. + pub where_clause: Option, + /// The span of the pallet::type_value attribute. + pub attr_span: proc_macro2::Span, + /// Docs on the item. + pub docs: Vec, } impl TypeValueDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Fn(item) = item { - item - } else { - let msg = "Invalid pallet::type_value, expected item fn"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Fn(item) = item { + item + } else { + let msg = "Invalid pallet::type_value, expected item fn"; + return Err(syn::Error::new(item.span(), msg)); + }; - let mut docs = vec![]; - for attr in &item.attrs { - if let syn::Meta::NameValue(meta) = &attr.meta { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - docs.push(meta.value.clone()); - continue - } - } + let mut docs = vec![]; + for attr in &item.attrs { + if let syn::Meta::NameValue(meta) = &attr.meta { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + docs.push(meta.value.clone()); + continue; + } + } - let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ + let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ allowed"; - return Err(syn::Error::new(attr.span(), msg)) - } + return Err(syn::Error::new(attr.span(), msg)); + } - if let Some(span) = item - .sig - .constness - .as_ref() - .map(|t| t.span()) - .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) - .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) - .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) - .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) - { - let msg = "Invalid pallet::type_value, unexpected token"; - return Err(syn::Error::new(span, msg)) - } + if let Some(span) = item + .sig + .constness + .as_ref() + .map(|t| t.span()) + .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) + .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) + .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) + .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) + { + let msg = "Invalid pallet::type_value, unexpected token"; + return Err(syn::Error::new(span, msg)); + } - if !item.sig.inputs.is_empty() { - let msg = "Invalid pallet::type_value, unexpected argument"; - return Err(syn::Error::new(item.sig.inputs[0].span(), msg)) - } + if !item.sig.inputs.is_empty() { + let msg = "Invalid pallet::type_value, unexpected argument"; + return Err(syn::Error::new(item.sig.inputs[0].span(), msg)); + } - let vis = item.vis.clone(); - let ident = item.sig.ident.clone(); - let block = item.block.clone(); - let type_ = match item.sig.output.clone() { - syn::ReturnType::Type(_, type_) => type_, - syn::ReturnType::Default => { - let msg = "Invalid pallet::type_value, expected return type"; - return Err(syn::Error::new(item.sig.span(), msg)) - }, - }; + let vis = item.vis.clone(); + let ident = item.sig.ident.clone(); + let block = item.block.clone(); + let type_ = match item.sig.output.clone() { + syn::ReturnType::Type(_, type_) => type_, + syn::ReturnType::Default => { + let msg = "Invalid pallet::type_value, expected return type"; + return Err(syn::Error::new(item.sig.span(), msg)); + } + }; - let mut instances = vec![]; - if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { - instances.push(usage); - } + let mut instances = vec![]; + if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { + instances.push(usage); + } - let is_generic = item.sig.generics.type_params().count() > 0; - let where_clause = item.sig.generics.where_clause.clone(); + let is_generic = item.sig.generics.type_params().count() > 0; + let where_clause = item.sig.generics.where_clause.clone(); - Ok(TypeValueDef { - attr_span, - index, - is_generic, - vis, - ident, - block, - type_, - instances, - where_clause, - docs, - }) - } + Ok(TypeValueDef { + attr_span, + index, + is_generic, + vis, + ident, + block, + type_, + instances, + where_clause, + docs, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs index 2bf0a1b6c..6e5109a74 100644 --- a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs @@ -20,43 +20,43 @@ use syn::spanned::Spanned; /// The definition of the pallet validate unsigned implementation. pub struct ValidateUnsignedDef { - /// The index of validate unsigned item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, + /// The index of validate unsigned item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, } impl ValidateUnsignedDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::validate_unsigned, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::validate_unsigned, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ValidateUnsigned" { - let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; - return Err(syn::Error::new(last.span(), msg)) - } - } else { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + return Err(syn::Error::new(item.span(), msg)); + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ValidateUnsigned" { + let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; + return Err(syn::Error::new(last.span(), msg)); + } + } else { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } + return Err(syn::Error::new(item.span(), msg)); + } - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - Ok(ValidateUnsignedDef { index, instances }) - } + Ok(ValidateUnsignedDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet_error.rs b/support/procedural-fork/src/pallet_error.rs index 693a1e982..bdf8330cd 100644 --- a/support/procedural-fork/src/pallet_error.rs +++ b/support/procedural-fork/src/pallet_error.rs @@ -20,159 +20,172 @@ use quote::ToTokens; // Derive `PalletError` pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let syn::DeriveInput { ident: name, generics, data, .. } = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; - - let frame_support = match generate_access_from_frame_or_crate("frame-support") { - Ok(c) => c, - Err(e) => return e.into_compile_error().into(), - }; - let frame_support = &frame_support; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - - let max_encoded_size = match data { - syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | - syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { - let maybe_field_tys = fields - .iter() - .map(|f| generate_field_types(f, &frame_support)) - .collect::>>(); - let field_tys = match maybe_field_tys { - Ok(tys) => tys.into_iter().flatten(), - Err(e) => return e.into_compile_error().into(), - }; - quote::quote! { - 0_usize - #( - .saturating_add(< - #field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE) - )* - } - }, - syn::Fields::Unit => quote::quote!(0), - }, - syn::Data::Enum(syn::DataEnum { variants, .. }) => { - let field_tys = variants - .iter() - .map(|variant| generate_variant_field_types(variant, &frame_support)) - .collect::>>, syn::Error>>(); - - let field_tys = match field_tys { - Ok(tys) => tys.into_iter().flatten().collect::>(), - Err(e) => return e.to_compile_error().into(), - }; - - // We start with `1`, because the discriminant of an enum is stored as u8 - if field_tys.is_empty() { - quote::quote!(1) - } else { - let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { - quote::quote! { - 1_usize - #(.saturating_add(< - #variant_field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE))* - } - }); - - quote::quote! {{ - let mut size = 1_usize; - let mut tmp = 0_usize; - #( - tmp = #variant_sizes; - size = if tmp > size { tmp } else { size }; - tmp = 0_usize; - )* - size - }} - } - }, - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - let msg = "Cannot derive `PalletError` for union; please implement it directly"; - return syn::Error::new(union_token.span, msg).into_compile_error().into() - }, - }; - - quote::quote!( - const _: () = { - impl #impl_generics #frame_support::traits::PalletError - for #name #ty_generics #where_clause - { - const MAX_ENCODED_SIZE: usize = #max_encoded_size; - } - }; - ) - .into() + let syn::DeriveInput { + ident: name, + generics, + data, + .. + } = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let frame_support = match generate_access_from_frame_or_crate("frame-support") { + Ok(c) => c, + Err(e) => return e.into_compile_error().into(), + }; + let frame_support = &frame_support; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + let max_encoded_size = match data { + syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) + | syn::Fields::Unnamed(syn::FieldsUnnamed { + unnamed: fields, .. + }) => { + let maybe_field_tys = fields + .iter() + .map(|f| generate_field_types(f, &frame_support)) + .collect::>>(); + let field_tys = match maybe_field_tys { + Ok(tys) => tys.into_iter().flatten(), + Err(e) => return e.into_compile_error().into(), + }; + quote::quote! { + 0_usize + #( + .saturating_add(< + #field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE) + )* + } + } + syn::Fields::Unit => quote::quote!(0), + }, + syn::Data::Enum(syn::DataEnum { variants, .. }) => { + let field_tys = variants + .iter() + .map(|variant| generate_variant_field_types(variant, &frame_support)) + .collect::>>, syn::Error>>(); + + let field_tys = match field_tys { + Ok(tys) => tys.into_iter().flatten().collect::>(), + Err(e) => return e.to_compile_error().into(), + }; + + // We start with `1`, because the discriminant of an enum is stored as u8 + if field_tys.is_empty() { + quote::quote!(1) + } else { + let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { + quote::quote! { + 1_usize + #(.saturating_add(< + #variant_field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE))* + } + }); + + quote::quote! {{ + let mut size = 1_usize; + let mut tmp = 0_usize; + #( + tmp = #variant_sizes; + size = if tmp > size { tmp } else { size }; + tmp = 0_usize; + )* + size + }} + } + } + syn::Data::Union(syn::DataUnion { union_token, .. }) => { + let msg = "Cannot derive `PalletError` for union; please implement it directly"; + return syn::Error::new(union_token.span, msg) + .into_compile_error() + .into(); + } + }; + + quote::quote!( + const _: () = { + impl #impl_generics #frame_support::traits::PalletError + for #name #ty_generics #where_clause + { + const MAX_ENCODED_SIZE: usize = #max_encoded_size; + } + }; + ) + .into() } fn generate_field_types( - field: &syn::Field, - scrate: &syn::Path, + field: &syn::Field, + scrate: &syn::Path, ) -> syn::Result> { - let attrs = &field.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut res = None; - - attr.parse_nested_meta(|meta| { - if meta.path.is_ident("skip") { - res = Some(None); - } else if meta.path.is_ident("compact") { - let field_ty = &field.ty; - res = Some(Some(quote::quote!(#scrate::__private::codec::Compact<#field_ty>))); - } else if meta.path.is_ident("compact") { - res = Some(Some(meta.value()?.parse()?)); - } - - Ok(()) - })?; - - if let Some(v) = res { - return Ok(v) - } - } - } - - Ok(Some(field.ty.to_token_stream())) + let attrs = &field.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut res = None; + + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("skip") { + res = Some(None); + } else if meta.path.is_ident("compact") { + let field_ty = &field.ty; + res = Some(Some( + quote::quote!(#scrate::__private::codec::Compact<#field_ty>), + )); + } else if meta.path.is_ident("compact") { + res = Some(Some(meta.value()?.parse()?)); + } + + Ok(()) + })?; + + if let Some(v) = res { + return Ok(v); + } + } + } + + Ok(Some(field.ty.to_token_stream())) } fn generate_variant_field_types( - variant: &syn::Variant, - scrate: &syn::Path, + variant: &syn::Variant, + scrate: &syn::Path, ) -> syn::Result>> { - let attrs = &variant.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut skip = false; - - // We ignore the error intentionally as this isn't `codec(skip)` when - // `parse_nested_meta` fails. - let _ = attr.parse_nested_meta(|meta| { - skip = meta.path.is_ident("skip"); - Ok(()) - }); - - if skip { - return Ok(None) - } - } - } - - match &variant.fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | - syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { - let field_tys = fields - .iter() - .map(|field| generate_field_types(field, scrate)) - .collect::>>()?; - Ok(Some(field_tys.into_iter().flatten().collect())) - }, - syn::Fields::Unit => Ok(None), - } + let attrs = &variant.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut skip = false; + + // We ignore the error intentionally as this isn't `codec(skip)` when + // `parse_nested_meta` fails. + let _ = attr.parse_nested_meta(|meta| { + skip = meta.path.is_ident("skip"); + Ok(()) + }); + + if skip { + return Ok(None); + } + } + } + + match &variant.fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) + | syn::Fields::Unnamed(syn::FieldsUnnamed { + unnamed: fields, .. + }) => { + let field_tys = fields + .iter() + .map(|field| generate_field_types(field, scrate)) + .collect::>>()?; + Ok(Some(field_tys.into_iter().flatten().collect())) + } + syn::Fields::Unit => Ok(None), + } } diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index 93c88fce9..c26cbccb7 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -17,20 +17,20 @@ use super::parse::runtime_types::RuntimeType; use crate::{ - construct_runtime::{ - check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, - decl_static_assertions, expand, - }, - runtime::{ - parse::{ - AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, - }, - Def, - }, + construct_runtime::{ + check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, + decl_static_assertions, expand, + }, + runtime::{ + parse::{ + AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, + }, + Def, + }, }; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use proc_macro2::TokenStream as TokenStream2; use quote::quote; @@ -41,280 +41,300 @@ use syn::{Ident, Result}; const SYSTEM_PALLET_NAME: &str = "System"; pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { - let input = def.input; - - let (check_pallet_number_res, res) = match def.pallets { - AllPalletsDeclaration::Implicit(ref decl) => ( - check_pallet_number(input.clone(), decl.pallet_count), - construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), - ), - AllPalletsDeclaration::Explicit(ref decl) => ( - check_pallet_number(input, decl.pallets.len()), - construct_runtime_final_expansion( - def.runtime_struct.ident.clone(), - decl.clone(), - def.runtime_types.clone(), - legacy_ordering, - ), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - let res = expander::Expander::new("construct_runtime") - .dry(std::env::var("FRAME_EXPAND").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + let input = def.input; + + let (check_pallet_number_res, res) = match def.pallets { + AllPalletsDeclaration::Implicit(ref decl) => ( + check_pallet_number(input.clone(), decl.pallet_count), + construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), + ), + AllPalletsDeclaration::Explicit(ref decl) => ( + check_pallet_number(input, decl.pallets.len()), + construct_runtime_final_expansion( + def.runtime_struct.ident.clone(), + decl.clone(), + def.runtime_types.clone(), + legacy_ordering, + ), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("FRAME_EXPAND").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitAllPalletsDeclaration, - legacy_ordering: bool, + input: TokenStream2, + definition: ImplicitAllPalletsDeclaration, + legacy_ordering: bool, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let attr = if legacy_ordering { quote!((legacy_ordering)) } else { quote!() }; - let mut expansion = quote::quote!( - #[frame_support::runtime #attr] - #input - ); - for pallet in definition.pallet_decls.iter() { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts_v2 }] - frame_support = [{ #frame_support }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name = #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let attr = if legacy_ordering { + quote!((legacy_ordering)) + } else { + quote!() + }; + let mut expansion = quote::quote!( + #[frame_support::runtime #attr] + #input + ); + for pallet in definition.pallet_decls.iter() { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts_v2 }] + frame_support = [{ #frame_support }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name = #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } fn construct_runtime_final_expansion( - name: Ident, - definition: ExplicitAllPalletsDeclaration, - runtime_types: Vec, - legacy_ordering: bool, + name: Ident, + definition: ExplicitAllPalletsDeclaration, + runtime_types: Vec, + legacy_ordering: bool, ) -> Result { - let ExplicitAllPalletsDeclaration { mut pallets, name: pallets_name } = definition; - - if !legacy_ordering { - // Ensure that order of hooks is based on the pallet index - pallets.sort_by_key(|p| p.index); - } - - let system_pallet = - pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { - syn::Error::new( - pallets_name.span(), - "`System` pallet declaration is missing. \ + let ExplicitAllPalletsDeclaration { + mut pallets, + name: pallets_name, + } = definition; + + if !legacy_ordering { + // Ensure that order of hooks is based on the pallet index + pallets.sort_by_key(|p| p.index); + } + + let system_pallet = pallets + .iter() + .find(|decl| decl.name == SYSTEM_PALLET_NAME) + .ok_or_else(|| { + syn::Error::new( + pallets_name.span(), + "`System` pallet declaration is missing. \ Please add this line: `pub type System = frame_system;`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )) - } - - let features = pallets - .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - }) - .flatten() - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let mut dispatch = None; - let mut outer_event = None; - let mut outer_error = None; - let mut outer_origin = None; - let mut freeze_reason = None; - let mut hold_reason = None; - let mut slash_reason = None; - let mut lock_id = None; - let mut task = None; - - for runtime_type in runtime_types.iter() { - match runtime_type { - RuntimeType::RuntimeCall(_) => { - dispatch = - Some(expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate)); - }, - RuntimeType::RuntimeEvent(_) => { - outer_event = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Event, - )?); - }, - RuntimeType::RuntimeError(_) => { - outer_error = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Error, - )?); - }, - RuntimeType::RuntimeOrigin(_) => { - outer_origin = - Some(expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?); - }, - RuntimeType::RuntimeFreezeReason(_) => { - freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeHoldReason(_) => { - hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeSlashReason(_) => { - slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeLockId(_) => { - lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); - }, - RuntimeType::RuntimeTask(_) => { - task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); - }, - } - } - - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let res = quote!( - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `runtime` may be used without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` - // is used. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} - - #outer_event - - #outer_error - - #outer_origin - - #all_pallets - - #pallet_to_index - - #dispatch - - #task - - #metadata - - #outer_config - - #inherent - - #validate_unsigned - - #freeze_reason - - #hold_reason - - #lock_id - - #slash_reason - - #integrity_test - - #static_assertions - ); + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )); + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let mut dispatch = None; + let mut outer_event = None; + let mut outer_error = None; + let mut outer_origin = None; + let mut freeze_reason = None; + let mut hold_reason = None; + let mut slash_reason = None; + let mut lock_id = None; + let mut task = None; + + for runtime_type in runtime_types.iter() { + match runtime_type { + RuntimeType::RuntimeCall(_) => { + dispatch = Some(expand::expand_outer_dispatch( + &name, + system_pallet, + &pallets, + &scrate, + )); + } + RuntimeType::RuntimeEvent(_) => { + outer_event = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Event, + )?); + } + RuntimeType::RuntimeError(_) => { + outer_error = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Error, + )?); + } + RuntimeType::RuntimeOrigin(_) => { + outer_origin = Some(expand::expand_outer_origin( + &name, + system_pallet, + &pallets, + &scrate, + )?); + } + RuntimeType::RuntimeFreezeReason(_) => { + freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeHoldReason(_) => { + hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeSlashReason(_) => { + slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeLockId(_) => { + lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); + } + RuntimeType::RuntimeTask(_) => { + task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); + } + } + } + + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let res = quote!( + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `runtime` may be used without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` + // is used. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} + + #outer_event + + #outer_error + + #outer_origin + + #all_pallets + + #pallet_to_index + + #dispatch + + #task + + #metadata + + #outer_config + + #inherent + + #validate_unsigned + + #freeze_reason + + #hold_reason + + #lock_id + + #slash_reason + + #integrity_test + + #static_assertions + ); - Ok(res) + Ok(res) } diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs index aaae579eb..589acff6c 100644 --- a/support/procedural-fork/src/runtime/mod.rs +++ b/support/procedural-fork/src/runtime/mod.rs @@ -210,27 +210,27 @@ mod expand; mod parse; mod keyword { - syn::custom_keyword!(legacy_ordering); + syn::custom_keyword!(legacy_ordering); } pub fn runtime(attr: TokenStream, tokens: TokenStream) -> TokenStream { - let mut legacy_ordering = false; - if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { - legacy_ordering = true; - } else { - let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ + let mut legacy_ordering = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + legacy_ordering = true; + } else { + let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::runtime]` or `#[runtime]`, or must specify the \ `legacy_ordering` attribute, such as `#[frame_support::runtime(legacy_ordering)]` or \ #[runtime(legacy_ordering)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into() - } - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into(); + } + } - let item = syn::parse_macro_input!(tokens as syn::ItemMod); - match parse::Def::try_from(item) { - Ok(def) => expand::expand(def, legacy_ordering).into(), - Err(e) => e.to_compile_error().into(), - } + let item = syn::parse_macro_input!(tokens as syn::ItemMod); + match parse::Def::try_from(item) { + Ok(def) => expand::expand(def, legacy_ordering).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/runtime/parse/helper.rs b/support/procedural-fork/src/runtime/parse/helper.rs index f05395f9b..17e362410 100644 --- a/support/procedural-fork/src/runtime/parse/helper.rs +++ b/support/procedural-fork/src/runtime/parse/helper.rs @@ -19,19 +19,26 @@ use crate::pallet::parse::helper::MutItemAttrs; use quote::ToTokens; pub(crate) fn take_first_item_runtime_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + let attrs = if let Some(attrs) = item.mut_item_attrs() { + attrs + } else { + return Ok(None); + }; - if let Some(index) = attrs.iter().position(|attr| { - attr.path().segments.first().map_or(false, |segment| segment.ident == "runtime") - }) { - let runtime_attr = attrs.remove(index); - Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) - } else { - Ok(None) - } + if let Some(index) = attrs.iter().position(|attr| { + attr.path() + .segments + .first() + .map_or(false, |segment| segment.ident == "runtime") + }) { + let runtime_attr = attrs.remove(index); + Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) + } else { + Ok(None) + } } diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs index 893cb4726..01245187f 100644 --- a/support/procedural-fork/src/runtime/parse/mod.rs +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -32,220 +32,229 @@ use frame_support_procedural_tools::syn_ext as ext; use runtime_types::RuntimeType; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(runtime); - custom_keyword!(derive); - custom_keyword!(pallet_index); - custom_keyword!(disable_call); - custom_keyword!(disable_unsigned); + custom_keyword!(runtime); + custom_keyword!(derive); + custom_keyword!(pallet_index); + custom_keyword!(disable_call); + custom_keyword!(disable_unsigned); } enum RuntimeAttr { - Runtime(proc_macro2::Span), - Derive(proc_macro2::Span, Vec), - PalletIndex(proc_macro2::Span, u8), - DisableCall(proc_macro2::Span), - DisableUnsigned(proc_macro2::Span), + Runtime(proc_macro2::Span), + Derive(proc_macro2::Span, Vec), + PalletIndex(proc_macro2::Span, u8), + DisableCall(proc_macro2::Span), + DisableUnsigned(proc_macro2::Span), } impl RuntimeAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Runtime(span) => *span, - Self::Derive(span, _) => *span, - Self::PalletIndex(span, _) => *span, - Self::DisableCall(span) => *span, - Self::DisableUnsigned(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Runtime(span) => *span, + Self::Derive(span, _) => *span, + Self::PalletIndex(span, _) => *span, + Self::DisableCall(span) => *span, + Self::DisableUnsigned(span) => *span, + } + } } impl syn::parse::Parse for RuntimeAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::runtime) { - Ok(RuntimeAttr::Runtime(content.parse::()?.span())) - } else if lookahead.peek(keyword::derive) { - let _ = content.parse::(); - let derive_content; - syn::parenthesized!(derive_content in content); - let runtime_types = - derive_content.parse::>()?; - let runtime_types = runtime_types.inner.into_iter().collect(); - Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) - } else if lookahead.peek(keyword::pallet_index) { - let _ = content.parse::(); - let pallet_index_content; - syn::parenthesized!(pallet_index_content in content); - let pallet_index = pallet_index_content.parse::()?; - if !pallet_index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(pallet_index.span(), msg)) - } - Ok(RuntimeAttr::PalletIndex(pallet_index.span(), pallet_index.base10_parse()?)) - } else if lookahead.peek(keyword::disable_call) { - Ok(RuntimeAttr::DisableCall(content.parse::()?.span())) - } else if lookahead.peek(keyword::disable_unsigned) { - Ok(RuntimeAttr::DisableUnsigned(content.parse::()?.span())) - } else { - Err(lookahead.error()) - } - } + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::runtime) { + Ok(RuntimeAttr::Runtime( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::derive) { + let _ = content.parse::(); + let derive_content; + syn::parenthesized!(derive_content in content); + let runtime_types = + derive_content.parse::>()?; + let runtime_types = runtime_types.inner.into_iter().collect(); + Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) + } else if lookahead.peek(keyword::pallet_index) { + let _ = content.parse::(); + let pallet_index_content; + syn::parenthesized!(pallet_index_content in content); + let pallet_index = pallet_index_content.parse::()?; + if !pallet_index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(pallet_index.span(), msg)); + } + Ok(RuntimeAttr::PalletIndex( + pallet_index.span(), + pallet_index.base10_parse()?, + )) + } else if lookahead.peek(keyword::disable_call) { + Ok(RuntimeAttr::DisableCall( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::disable_unsigned) { + Ok(RuntimeAttr::DisableUnsigned( + content.parse::()?.span(), + )) + } else { + Err(lookahead.error()) + } + } } #[derive(Debug, Clone)] pub enum AllPalletsDeclaration { - Implicit(ImplicitAllPalletsDeclaration), - Explicit(ExplicitAllPalletsDeclaration), + Implicit(ImplicitAllPalletsDeclaration), + Explicit(ExplicitAllPalletsDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug, Clone)] pub struct ImplicitAllPalletsDeclaration { - pub name: Ident, - pub pallet_decls: Vec, - pub pallet_count: usize, + pub name: Ident, + pub pallet_decls: Vec, + pub pallet_count: usize, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug, Clone)] pub struct ExplicitAllPalletsDeclaration { - pub name: Ident, - pub pallets: Vec, + pub name: Ident, + pub pallets: Vec, } pub struct Def { - pub input: TokenStream2, - pub item: syn::ItemMod, - pub runtime_struct: runtime_struct::RuntimeStructDef, - pub pallets: AllPalletsDeclaration, - pub runtime_types: Vec, + pub input: TokenStream2, + pub item: syn::ItemMod, + pub runtime_struct: runtime_struct::RuntimeStructDef, + pub pallets: AllPalletsDeclaration, + pub runtime_types: Vec, } impl Def { - pub fn try_from(mut item: syn::ItemMod) -> syn::Result { - let input: TokenStream2 = item.to_token_stream().into(); - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid runtime definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; + pub fn try_from(mut item: syn::ItemMod) -> syn::Result { + let input: TokenStream2 = item.to_token_stream().into(); + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid runtime definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; - let mut runtime_struct = None; - let mut runtime_types = None; + let mut runtime_struct = None; + let mut runtime_types = None; - let mut indices = HashMap::new(); - let mut names = HashMap::new(); + let mut indices = HashMap::new(); + let mut names = HashMap::new(); - let mut pallet_decls = vec![]; - let mut pallets = vec![]; + let mut pallet_decls = vec![]; + let mut pallets = vec![]; - for item in items.iter_mut() { - let mut pallet_item = None; - let mut pallet_index = 0; + for item in items.iter_mut() { + let mut pallet_item = None; + let mut pallet_index = 0; - let mut disable_call = false; - let mut disable_unsigned = false; + let mut disable_call = false; + let mut disable_unsigned = false; - while let Some(runtime_attr) = - helper::take_first_item_runtime_attr::(item)? - { - match runtime_attr { - RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { - let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; - runtime_struct = Some(p); - }, - RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { - runtime_types = Some(types); - }, - RuntimeAttr::PalletIndex(span, index) => { - pallet_index = index; - pallet_item = if let syn::Item::Type(item) = item { - Some(item.clone()) - } else { - let msg = "Invalid runtime::pallet_index, expected type definition"; - return Err(syn::Error::new(span, msg)) - }; - }, - RuntimeAttr::DisableCall(_) => disable_call = true, - RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, - attr => { - let msg = "Invalid duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)) - }, - } - } + while let Some(runtime_attr) = + helper::take_first_item_runtime_attr::(item)? + { + match runtime_attr { + RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { + let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; + runtime_struct = Some(p); + } + RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { + runtime_types = Some(types); + } + RuntimeAttr::PalletIndex(span, index) => { + pallet_index = index; + pallet_item = if let syn::Item::Type(item) = item { + Some(item.clone()) + } else { + let msg = "Invalid runtime::pallet_index, expected type definition"; + return Err(syn::Error::new(span, msg)); + }; + } + RuntimeAttr::DisableCall(_) => disable_call = true, + RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, + attr => { + let msg = "Invalid duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)); + } + } + } - if let Some(pallet_item) = pallet_item { - match *pallet_item.ty.clone() { - syn::Type::Path(ref path) => { - let pallet_decl = - PalletDeclaration::try_from(item.span(), &pallet_item, path)?; + if let Some(pallet_item) = pallet_item { + match *pallet_item.ty.clone() { + syn::Type::Path(ref path) => { + let pallet_decl = + PalletDeclaration::try_from(item.span(), &pallet_item, path)?; - if let Some(used_pallet) = - names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) - { - let msg = "Two pallets with the same name!"; + if let Some(used_pallet) = + names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) + { + let msg = "Two pallets with the same name!"; - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); - return Err(err) - } + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); + return Err(err); + } - pallet_decls.push(pallet_decl); - }, - syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { - let pallet = Pallet::try_from( - item.span(), - &pallet_item, - pallet_index, - disable_call, - disable_unsigned, - &bounds, - )?; + pallet_decls.push(pallet_decl); + } + syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { + let pallet = Pallet::try_from( + item.span(), + &pallet_item, + pallet_index, + disable_call, + disable_unsigned, + &bounds, + )?; - if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) - { - let msg = format!( + if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) + { + let msg = format!( "Pallet indices are conflicting: Both pallets {} and {} are at index {}", used_pallet, pallet.name, pallet.index, ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err) - } + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err); + } - pallets.push(pallet); - }, - _ => continue, - } - } - } + pallets.push(pallet); + } + _ => continue, + } + } + } - let name = item.ident.clone(); - let decl_count = pallet_decls.len(); - let pallets = if decl_count > 0 { - AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { - name, - pallet_decls, - pallet_count: decl_count.saturating_add(pallets.len()), - }) - } else { - AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) - }; + let name = item.ident.clone(); + let decl_count = pallet_decls.len(); + let pallets = if decl_count > 0 { + AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { + name, + pallet_decls, + pallet_count: decl_count.saturating_add(pallets.len()), + }) + } else { + AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) + }; - let def = Def { + let def = Def { input, item, runtime_struct: runtime_struct.ok_or_else(|| { @@ -261,6 +270,6 @@ impl Def { })?, }; - Ok(def) - } + Ok(def) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index d2f1857fb..039e2631b 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -20,80 +20,88 @@ use quote::ToTokens; use syn::{punctuated::Punctuated, spanned::Spanned, token, Error, Ident, PathArguments}; impl Pallet { - pub fn try_from( - attr_span: proc_macro2::Span, - item: &syn::ItemType, - pallet_index: u8, - disable_call: bool, - disable_unsigned: bool, - bounds: &Punctuated, - ) -> syn::Result { - let name = item.ident.clone(); + pub fn try_from( + attr_span: proc_macro2::Span, + item: &syn::ItemType, + pallet_index: u8, + disable_call: bool, + disable_unsigned: bool, + bounds: &Punctuated, + ) -> syn::Result { + let name = item.ident.clone(); - let mut pallet_path = None; - let mut pallet_parts = vec![]; + let mut pallet_path = None; + let mut pallet_parts = vec![]; - for (index, bound) in bounds.into_iter().enumerate() { - if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { - if index == 0 { - pallet_path = Some(PalletPath { inner: path.clone() }); - } else { - let pallet_part = syn::parse2::(bound.into_token_stream())?; - pallet_parts.push(pallet_part); - } - } else { - return Err(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - )) - }; - } + for (index, bound) in bounds.into_iter().enumerate() { + if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { + if index == 0 { + pallet_path = Some(PalletPath { + inner: path.clone(), + }); + } else { + let pallet_part = syn::parse2::(bound.into_token_stream())?; + pallet_parts.push(pallet_part); + } + } else { + return Err(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + )); + }; + } - let mut path = pallet_path.ok_or(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - ))?; + let mut path = pallet_path.ok_or(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + ))?; - let mut instance = None; - if let Some(segment) = path.inner.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) - { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = - Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); - segment.arguments = PathArguments::None; - } - } - } + let mut instance = None; + if let Some(segment) = path + .inner + .segments + .iter_mut() + .find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = Some(Ident::new( + &arg_path.to_token_stream().to_string(), + arg_path.span(), + )); + segment.arguments = PathArguments::None; + } + } + } - pallet_parts = pallet_parts - .into_iter() - .filter(|part| { - if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { - false - } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = - (disable_unsigned, &part.keyword) - { - false - } else { - true - } - }) - .collect(); + pallet_parts = pallet_parts + .into_iter() + .filter(|part| { + if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { + false + } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = + (disable_unsigned, &part.keyword) + { + false + } else { + true + } + }) + .collect(); - let cfg_pattern = vec![]; + let cfg_pattern = vec![]; - Ok(Pallet { - is_expanded: true, - name, - index: pallet_index, - path, - instance, - cfg_pattern, - pallet_parts, - }) - } + Ok(Pallet { + is_expanded: true, + name, + index: pallet_index, + path, + instance, + cfg_pattern, + pallet_parts, + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet_decl.rs b/support/procedural-fork/src/runtime/parse/pallet_decl.rs index 437a163cf..bb1246606 100644 --- a/support/procedural-fork/src/runtime/parse/pallet_decl.rs +++ b/support/procedural-fork/src/runtime/parse/pallet_decl.rs @@ -21,40 +21,51 @@ use syn::{spanned::Spanned, Attribute, Ident, PathArguments}; /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: syn::Path, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: syn::Path, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, } impl PalletDeclaration { - pub fn try_from( - _attr_span: proc_macro2::Span, - item: &syn::ItemType, - path: &syn::TypePath, - ) -> syn::Result { - let name = item.ident.clone(); - - let mut path = path.path.clone(); - - let mut instance = None; - if let Some(segment) = path.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = - Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); - segment.arguments = PathArguments::None; - } - } - } - - Ok(Self { name, path, instance, attrs: item.attrs.clone() }) - } + pub fn try_from( + _attr_span: proc_macro2::Span, + item: &syn::ItemType, + path: &syn::TypePath, + ) -> syn::Result { + let name = item.ident.clone(); + + let mut path = path.path.clone(); + + let mut instance = None; + if let Some(segment) = path + .segments + .iter_mut() + .find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = Some(Ident::new( + &arg_path.to_token_stream().to_string(), + arg_path.span(), + )); + segment.arguments = PathArguments::None; + } + } + } + + Ok(Self { + name, + path, + instance, + attrs: item.attrs.clone(), + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_struct.rs b/support/procedural-fork/src/runtime/parse/runtime_struct.rs index 8fa746ee8..7ddbdcfeb 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_struct.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_struct.rs @@ -17,19 +17,22 @@ use syn::spanned::Spanned; pub struct RuntimeStructDef { - pub ident: syn::Ident, - pub attr_span: proc_macro2::Span, + pub ident: syn::Ident, + pub attr_span: proc_macro2::Span, } impl RuntimeStructDef { - pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid runtime::runtime, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid runtime::runtime, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)); + }; - Ok(Self { ident: item.ident.clone(), attr_span }) - } + Ok(Self { + ident: item.ident.clone(), + attr_span, + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_types.rs b/support/procedural-fork/src/runtime/parse/runtime_types.rs index a4480e2a1..4d8c8358c 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_types.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_types.rs @@ -16,61 +16,61 @@ // limitations under the License. use syn::{ - parse::{Parse, ParseStream}, - Result, + parse::{Parse, ParseStream}, + Result, }; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(RuntimeCall); - custom_keyword!(RuntimeEvent); - custom_keyword!(RuntimeError); - custom_keyword!(RuntimeOrigin); - custom_keyword!(RuntimeFreezeReason); - custom_keyword!(RuntimeHoldReason); - custom_keyword!(RuntimeSlashReason); - custom_keyword!(RuntimeLockId); - custom_keyword!(RuntimeTask); + custom_keyword!(RuntimeCall); + custom_keyword!(RuntimeEvent); + custom_keyword!(RuntimeError); + custom_keyword!(RuntimeOrigin); + custom_keyword!(RuntimeFreezeReason); + custom_keyword!(RuntimeHoldReason); + custom_keyword!(RuntimeSlashReason); + custom_keyword!(RuntimeLockId); + custom_keyword!(RuntimeTask); } #[derive(Debug, Clone, PartialEq)] pub enum RuntimeType { - RuntimeCall(keyword::RuntimeCall), - RuntimeEvent(keyword::RuntimeEvent), - RuntimeError(keyword::RuntimeError), - RuntimeOrigin(keyword::RuntimeOrigin), - RuntimeFreezeReason(keyword::RuntimeFreezeReason), - RuntimeHoldReason(keyword::RuntimeHoldReason), - RuntimeSlashReason(keyword::RuntimeSlashReason), - RuntimeLockId(keyword::RuntimeLockId), - RuntimeTask(keyword::RuntimeTask), + RuntimeCall(keyword::RuntimeCall), + RuntimeEvent(keyword::RuntimeEvent), + RuntimeError(keyword::RuntimeError), + RuntimeOrigin(keyword::RuntimeOrigin), + RuntimeFreezeReason(keyword::RuntimeFreezeReason), + RuntimeHoldReason(keyword::RuntimeHoldReason), + RuntimeSlashReason(keyword::RuntimeSlashReason), + RuntimeLockId(keyword::RuntimeLockId), + RuntimeTask(keyword::RuntimeTask), } impl Parse for RuntimeType { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); - if lookahead.peek(keyword::RuntimeCall) { - Ok(Self::RuntimeCall(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeEvent) { - Ok(Self::RuntimeEvent(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeError) { - Ok(Self::RuntimeError(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeOrigin) { - Ok(Self::RuntimeOrigin(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeFreezeReason) { - Ok(Self::RuntimeFreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeHoldReason) { - Ok(Self::RuntimeHoldReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeSlashReason) { - Ok(Self::RuntimeSlashReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeLockId) { - Ok(Self::RuntimeLockId(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeTask) { - Ok(Self::RuntimeTask(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + if lookahead.peek(keyword::RuntimeCall) { + Ok(Self::RuntimeCall(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeEvent) { + Ok(Self::RuntimeEvent(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeError) { + Ok(Self::RuntimeError(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeOrigin) { + Ok(Self::RuntimeOrigin(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeFreezeReason) { + Ok(Self::RuntimeFreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeHoldReason) { + Ok(Self::RuntimeHoldReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeSlashReason) { + Ok(Self::RuntimeSlashReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeLockId) { + Ok(Self::RuntimeLockId(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeTask) { + Ok(Self::RuntimeTask(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } diff --git a/support/procedural-fork/src/storage_alias.rs b/support/procedural-fork/src/storage_alias.rs index 06f62768f..7099239f9 100644 --- a/support/procedural-fork/src/storage_alias.rs +++ b/support/procedural-fork/src/storage_alias.rs @@ -22,655 +22,688 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens}; use syn::{ - parenthesized, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, - visit::Visit, - Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, + parenthesized, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, + visit::Visit, + Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, }; /// Extension trait for [`Type`]. trait TypeExt { - fn get_ident(&self) -> Option<&Ident>; - fn contains_ident(&self, ident: &Ident) -> bool; + fn get_ident(&self) -> Option<&Ident>; + fn contains_ident(&self, ident: &Ident) -> bool; } impl TypeExt for Type { - fn get_ident(&self) -> Option<&Ident> { - match self { - Type::Path(p) => match &p.qself { - Some(qself) => qself.ty.get_ident(), - None => p.path.get_ident(), - }, - _ => None, - } - } - - fn contains_ident(&self, ident: &Ident) -> bool { - struct ContainsIdent<'a> { - ident: &'a Ident, - found: bool, - } - impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { - fn visit_ident(&mut self, i: &'ast Ident) { - if i == self.ident { - self.found = true; - } - } - } - - let mut visitor = ContainsIdent { ident, found: false }; - syn::visit::visit_type(&mut visitor, self); - visitor.found - } + fn get_ident(&self) -> Option<&Ident> { + match self { + Type::Path(p) => match &p.qself { + Some(qself) => qself.ty.get_ident(), + None => p.path.get_ident(), + }, + _ => None, + } + } + + fn contains_ident(&self, ident: &Ident) -> bool { + struct ContainsIdent<'a> { + ident: &'a Ident, + found: bool, + } + impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { + fn visit_ident(&mut self, i: &'ast Ident) { + if i == self.ident { + self.found = true; + } + } + } + + let mut visitor = ContainsIdent { + ident, + found: false, + }; + syn::visit::visit_type(&mut visitor, self); + visitor.found + } } /// Represents generics which only support [`TypeParam`] separated by commas. struct SimpleGenerics { - lt_token: Token![<], - params: Punctuated, - gt_token: Token![>], + lt_token: Token![<], + params: Punctuated, + gt_token: Token![>], } impl SimpleGenerics { - /// Returns the generics for types declarations etc. - fn type_generics(&self) -> impl Iterator { - self.params.iter().map(|p| &p.ident) - } - - /// Returns the generics for the `impl` block. - fn impl_generics(&self) -> impl Iterator { - self.params.iter() - } + /// Returns the generics for types declarations etc. + fn type_generics(&self) -> impl Iterator { + self.params.iter().map(|p| &p.ident) + } + + /// Returns the generics for the `impl` block. + fn impl_generics(&self) -> impl Iterator { + self.params.iter() + } } impl Parse for SimpleGenerics { - fn parse(input: ParseStream<'_>) -> Result { - Ok(Self { - lt_token: input.parse()?, - params: Punctuated::parse_separated_nonempty(input)?, - gt_token: input.parse()?, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + Ok(Self { + lt_token: input.parse()?, + params: Punctuated::parse_separated_nonempty(input)?, + gt_token: input.parse()?, + }) + } } impl ToTokens for SimpleGenerics { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.lt_token.to_tokens(tokens); - self.params.to_tokens(tokens); - self.gt_token.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lt_token.to_tokens(tokens); + self.params.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } } mod storage_types { - syn::custom_keyword!(StorageValue); - syn::custom_keyword!(StorageMap); - syn::custom_keyword!(CountedStorageMap); - syn::custom_keyword!(StorageDoubleMap); - syn::custom_keyword!(StorageNMap); + syn::custom_keyword!(StorageValue); + syn::custom_keyword!(StorageMap); + syn::custom_keyword!(CountedStorageMap); + syn::custom_keyword!(StorageDoubleMap); + syn::custom_keyword!(StorageNMap); } /// The types of prefixes the storage alias macro supports. mod prefix_types { - // Use the verbatim/unmodified input name as the prefix. - syn::custom_keyword!(verbatim); - // The input type is a pallet and its pallet name should be used as the prefix. - syn::custom_keyword!(pallet_name); - // The input type implements `Get<'static str>` and this `str` should be used as the prefix. - syn::custom_keyword!(dynamic); + // Use the verbatim/unmodified input name as the prefix. + syn::custom_keyword!(verbatim); + // The input type is a pallet and its pallet name should be used as the prefix. + syn::custom_keyword!(pallet_name); + // The input type implements `Get<'static str>` and this `str` should be used as the prefix. + syn::custom_keyword!(dynamic); } /// The supported storage types enum StorageType { - Value { - _kw: storage_types::StorageValue, - _lt_token: Token![<], - prefix: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - Map { - _kw: storage_types::StorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - CountedMap { - _kw: storage_types::CountedStorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - DoubleMap { - _kw: storage_types::StorageDoubleMap, - _lt_token: Token![<], - prefix: Type, - _hasher1_comma: Token![,], - hasher1_ty: Type, - _key1_comma: Token![,], - key1_ty: Type, - _hasher2_comma: Token![,], - hasher2_ty: Type, - _key2_comma: Token![,], - key2_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - NMap { - _kw: storage_types::StorageNMap, - _lt_token: Token![<], - prefix: Type, - _paren_comma: Token![,], - _paren_token: token::Paren, - key_types: Punctuated, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, + Value { + _kw: storage_types::StorageValue, + _lt_token: Token![<], + prefix: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + Map { + _kw: storage_types::StorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + CountedMap { + _kw: storage_types::CountedStorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + DoubleMap { + _kw: storage_types::StorageDoubleMap, + _lt_token: Token![<], + prefix: Type, + _hasher1_comma: Token![,], + hasher1_ty: Type, + _key1_comma: Token![,], + key1_ty: Type, + _hasher2_comma: Token![,], + hasher2_ty: Type, + _key2_comma: Token![,], + key2_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + NMap { + _kw: storage_types::StorageNMap, + _lt_token: Token![<], + prefix: Type, + _paren_comma: Token![,], + _paren_token: token::Paren, + key_types: Punctuated, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, } impl StorageType { - /// Generate the actual type declaration. - fn generate_type_declaration( - &self, - crate_: &syn::Path, - storage_instance: &StorageInstance, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - visibility: &Visibility, - attributes: &[Attribute], - ) -> TokenStream { - let storage_instance_generics = &storage_instance.generics; - let storage_instance = &storage_instance.name; - let attributes = attributes.iter(); - let storage_generics = storage_generics.map(|g| { - let generics = g.type_generics(); - - quote!( < #( #generics ),* > ) - }); - - match self { - Self::Value { value_ty, query_type, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< - #storage_instance #storage_instance_generics, - #value_ty - #query_type - >; - } - }, - Self::CountedMap { value_ty, query_type, hasher_ty, key_ty, .. } | - Self::Map { value_ty, query_type, hasher_ty, key_ty, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let map_type = Ident::new( - match self { - Self::Map { .. } => "StorageMap", - _ => "CountedStorageMap", - }, - Span::call_site(), - ); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< - #storage_instance #storage_instance_generics, - #hasher_ty, - #key_ty, - #value_ty - #query_type - >; - } - }, - Self::DoubleMap { - value_ty, - query_type, - hasher1_ty, - key1_ty, - hasher2_ty, - key2_ty, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< - #storage_instance #storage_instance_generics, - #hasher1_ty, - #key1_ty, - #hasher2_ty, - #key2_ty, - #value_ty - #query_type - >; - } - }, - Self::NMap { value_ty, query_type, key_types, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let key_types = key_types.iter(); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< - #storage_instance #storage_instance_generics, - ( #( #key_types ),* ), - #value_ty - #query_type - >; - } - }, - } - } - - /// The prefix for this storage type. - fn prefix(&self) -> &Type { - match self { - Self::Value { prefix, .. } | - Self::Map { prefix, .. } | - Self::CountedMap { prefix, .. } | - Self::NMap { prefix, .. } | - Self::DoubleMap { prefix, .. } => prefix, - } - } + /// Generate the actual type declaration. + fn generate_type_declaration( + &self, + crate_: &syn::Path, + storage_instance: &StorageInstance, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + visibility: &Visibility, + attributes: &[Attribute], + ) -> TokenStream { + let storage_instance_generics = &storage_instance.generics; + let storage_instance = &storage_instance.name; + let attributes = attributes.iter(); + let storage_generics = storage_generics.map(|g| { + let generics = g.type_generics(); + + quote!( < #( #generics ),* > ) + }); + + match self { + Self::Value { + value_ty, + query_type, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< + #storage_instance #storage_instance_generics, + #value_ty + #query_type + >; + } + } + Self::CountedMap { + value_ty, + query_type, + hasher_ty, + key_ty, + .. + } + | Self::Map { + value_ty, + query_type, + hasher_ty, + key_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let map_type = Ident::new( + match self { + Self::Map { .. } => "StorageMap", + _ => "CountedStorageMap", + }, + Span::call_site(), + ); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< + #storage_instance #storage_instance_generics, + #hasher_ty, + #key_ty, + #value_ty + #query_type + >; + } + } + Self::DoubleMap { + value_ty, + query_type, + hasher1_ty, + key1_ty, + hasher2_ty, + key2_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< + #storage_instance #storage_instance_generics, + #hasher1_ty, + #key1_ty, + #hasher2_ty, + #key2_ty, + #value_ty + #query_type + >; + } + } + Self::NMap { + value_ty, + query_type, + key_types, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let key_types = key_types.iter(); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< + #storage_instance #storage_instance_generics, + ( #( #key_types ),* ), + #value_ty + #query_type + >; + } + } + } + } + + /// The prefix for this storage type. + fn prefix(&self) -> &Type { + match self { + Self::Value { prefix, .. } + | Self::Map { prefix, .. } + | Self::CountedMap { prefix, .. } + | Self::NMap { prefix, .. } + | Self::DoubleMap { prefix, .. } => prefix, + } + } } impl Parse for StorageType { - fn parse(input: ParseStream<'_>) -> Result { - let lookahead = input.lookahead1(); - - let parse_query_type = |input: ParseStream<'_>| -> Result> { - if input.peek(Token![,]) && !input.peek2(Token![>]) { - Ok(Some((input.parse()?, input.parse()?))) - } else { - Ok(None) - } - }; - - if lookahead.peek(storage_types::StorageValue) { - Ok(Self::Value { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageMap) { - Ok(Self::Map { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::CountedStorageMap) { - Ok(Self::CountedMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageDoubleMap) { - Ok(Self::DoubleMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher1_comma: input.parse()?, - hasher1_ty: input.parse()?, - _key1_comma: input.parse()?, - key1_ty: input.parse()?, - _hasher2_comma: input.parse()?, - hasher2_ty: input.parse()?, - _key2_comma: input.parse()?, - key2_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageNMap) { - let content; - Ok(Self::NMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _paren_comma: input.parse()?, - _paren_token: parenthesized!(content in input), - key_types: Punctuated::parse_terminated(&content)?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream<'_>) -> Result { + let lookahead = input.lookahead1(); + + let parse_query_type = |input: ParseStream<'_>| -> Result> { + if input.peek(Token![,]) && !input.peek2(Token![>]) { + Ok(Some((input.parse()?, input.parse()?))) + } else { + Ok(None) + } + }; + + if lookahead.peek(storage_types::StorageValue) { + Ok(Self::Value { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageMap) { + Ok(Self::Map { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::CountedStorageMap) { + Ok(Self::CountedMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageDoubleMap) { + Ok(Self::DoubleMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher1_comma: input.parse()?, + hasher1_ty: input.parse()?, + _key1_comma: input.parse()?, + key1_ty: input.parse()?, + _hasher2_comma: input.parse()?, + hasher2_ty: input.parse()?, + _key2_comma: input.parse()?, + key2_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageNMap) { + let content; + Ok(Self::NMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _paren_comma: input.parse()?, + _paren_token: parenthesized!(content in input), + key_types: Punctuated::parse_terminated(&content)?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else { + Err(lookahead.error()) + } + } } /// The input expected by this macro. struct Input { - attributes: Vec, - visibility: Visibility, - _type: Token![type], - storage_name: Ident, - storage_generics: Option, - where_clause: Option, - _equal: Token![=], - storage_type: StorageType, - _semicolon: Token![;], + attributes: Vec, + visibility: Visibility, + _type: Token![type], + storage_name: Ident, + storage_generics: Option, + where_clause: Option, + _equal: Token![=], + storage_type: StorageType, + _semicolon: Token![;], } impl Parse for Input { - fn parse(input: ParseStream<'_>) -> Result { - let attributes = input.call(Attribute::parse_outer)?; - let visibility = input.parse()?; - let _type = input.parse()?; - let storage_name = input.parse()?; - - let lookahead = input.lookahead1(); - let storage_generics = if lookahead.peek(Token![<]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()) - }; - - let lookahead = input.lookahead1(); - let where_clause = if lookahead.peek(Token![where]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()) - }; - - let _equal = input.parse()?; - - let storage_type = input.parse()?; - - let _semicolon = input.parse()?; - - Ok(Self { - attributes, - visibility, - _type, - storage_name, - storage_generics, - _equal, - storage_type, - where_clause, - _semicolon, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + let attributes = input.call(Attribute::parse_outer)?; + let visibility = input.parse()?; + let _type = input.parse()?; + let storage_name = input.parse()?; + + let lookahead = input.lookahead1(); + let storage_generics = if lookahead.peek(Token![<]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()); + }; + + let lookahead = input.lookahead1(); + let where_clause = if lookahead.peek(Token![where]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()); + }; + + let _equal = input.parse()?; + + let storage_type = input.parse()?; + + let _semicolon = input.parse()?; + + Ok(Self { + attributes, + visibility, + _type, + storage_name, + storage_generics, + _equal, + storage_type, + where_clause, + _semicolon, + }) + } } /// Defines which type of prefix the storage alias is using. #[derive(Clone, Copy)] enum PrefixType { - /// An appropriate prefix will be determined automatically. - /// - /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. - /// Otherwise use the verbatim passed name as prefix. - Compatibility, - /// The provided ident/name will be used as the prefix. - Verbatim, - /// The provided type will be used to determine the prefix. This type must - /// implement `PalletInfoAccess` which specifies the proper name. This - /// name is then used as the prefix. - PalletName, - /// Uses the provided type implementing `Get<'static str>` to determine the prefix. - Dynamic, + /// An appropriate prefix will be determined automatically. + /// + /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. + /// Otherwise use the verbatim passed name as prefix. + Compatibility, + /// The provided ident/name will be used as the prefix. + Verbatim, + /// The provided type will be used to determine the prefix. This type must + /// implement `PalletInfoAccess` which specifies the proper name. This + /// name is then used as the prefix. + PalletName, + /// Uses the provided type implementing `Get<'static str>` to determine the prefix. + Dynamic, } /// Implementation of the `storage_alias` attribute macro. pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> Result { - let input = syn::parse2::(input)?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - - let prefix_type = if attributes.is_empty() { - PrefixType::Compatibility - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Verbatim - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::PalletName - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Dynamic - } else { - return Err(Error::new(attributes.span(), "Unknown attributes")) - }; - - let storage_instance = generate_storage_instance( - &crate_, - &input.storage_name, - input.storage_generics.as_ref(), - input.where_clause.as_ref(), - input.storage_type.prefix(), - &input.visibility, - matches!(input.storage_type, StorageType::CountedMap { .. }), - prefix_type, - )?; - - let definition = input.storage_type.generate_type_declaration( - &crate_, - &storage_instance, - &input.storage_name, - input.storage_generics.as_ref(), - &input.visibility, - &input.attributes, - ); - - let storage_instance_code = storage_instance.code; - - Ok(quote! { - #storage_instance_code - - #definition - }) + let input = syn::parse2::(input)?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + + let prefix_type = if attributes.is_empty() { + PrefixType::Compatibility + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Verbatim + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::PalletName + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Dynamic + } else { + return Err(Error::new(attributes.span(), "Unknown attributes")); + }; + + let storage_instance = generate_storage_instance( + &crate_, + &input.storage_name, + input.storage_generics.as_ref(), + input.where_clause.as_ref(), + input.storage_type.prefix(), + &input.visibility, + matches!(input.storage_type, StorageType::CountedMap { .. }), + prefix_type, + )?; + + let definition = input.storage_type.generate_type_declaration( + &crate_, + &storage_instance, + &input.storage_name, + input.storage_generics.as_ref(), + &input.visibility, + &input.attributes, + ); + + let storage_instance_code = storage_instance.code; + + Ok(quote! { + #storage_instance_code + + #definition + }) } /// The storage instance to use for the storage alias. struct StorageInstance { - name: Ident, - generics: TokenStream, - code: TokenStream, + name: Ident, + generics: TokenStream, + code: TokenStream, } /// Generate the [`StorageInstance`] for the storage alias. fn generate_storage_instance( - crate_: &syn::Path, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - storage_where_clause: Option<&WhereClause>, - prefix: &Type, - visibility: &Visibility, - is_counted_map: bool, - prefix_type: PrefixType, + crate_: &syn::Path, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + storage_where_clause: Option<&WhereClause>, + prefix: &Type, + visibility: &Visibility, + is_counted_map: bool, + prefix_type: PrefixType, ) -> Result { - if let Type::Infer(_) = prefix { - return Err(Error::new(prefix.span(), "`_` is not allowed as prefix by `storage_alias`.")) - } - - let impl_generics_used_by_prefix = storage_generics - .as_ref() - .map(|g| { - g.impl_generics() - .filter(|g| prefix.contains_ident(&g.ident)) - .collect::>() - }) - .unwrap_or_default(); - - let (pallet_prefix, impl_generics, type_generics) = match prefix_type { - PrefixType::Compatibility => - if !impl_generics_used_by_prefix.is_empty() { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - < #prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - } else if let Some(prefix) = prefix.get_ident() { - let prefix_str = prefix.to_string(); - - (quote!(#prefix_str), quote!(), quote!()) - } else { - return Err(Error::new_spanned( - prefix, - "If there are no generics, the prefix is only allowed to be an identifier.", - )) - }, - PrefixType::Verbatim => { - let prefix_str = match prefix.get_ident() { - Some(p) => p.to_string(), - None => - return Err(Error::new_spanned( - prefix, - "Prefix type `verbatim` requires that the prefix is an ident.", - )), - }; - - (quote!(#prefix_str), quote!(), quote!()) - }, - PrefixType::PalletName => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - }, - PrefixType::Dynamic => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::Get<_>>::get() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - }, - }; - - let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); - - let name_str = format!("{}_Storage_Instance", storage_name); - let name = Ident::new(&name_str, Span::call_site()); - let storage_name_str = storage_name.to_string(); - - let counter_code = is_counted_map.then(|| { - let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); - let counter_storage_name_str = counter_prefix(&storage_name_str); - let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); - - quote! { - #visibility struct #counter_name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #counter_name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #counter_storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance - for #name< #type_generics > #where_clause - { - type CounterPrefix = #counter_name < #type_generics >; - } - } - }); - - let storage_prefix_hash = helper::two128_str(&storage_name_str); - - // Implement `StorageInstance` trait. - let code = quote! { - #[allow(non_camel_case_types)] - #visibility struct #name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - #counter_code - }; - - Ok(StorageInstance { name, code, generics: quote!( < #type_generics > ) }) + if let Type::Infer(_) = prefix { + return Err(Error::new( + prefix.span(), + "`_` is not allowed as prefix by `storage_alias`.", + )); + } + + let impl_generics_used_by_prefix = storage_generics + .as_ref() + .map(|g| { + g.impl_generics() + .filter(|g| prefix.contains_ident(&g.ident)) + .collect::>() + }) + .unwrap_or_default(); + + let (pallet_prefix, impl_generics, type_generics) = match prefix_type { + PrefixType::Compatibility => { + if !impl_generics_used_by_prefix.is_empty() { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + < #prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } else if let Some(prefix) = prefix.get_ident() { + let prefix_str = prefix.to_string(); + + (quote!(#prefix_str), quote!(), quote!()) + } else { + return Err(Error::new_spanned( + prefix, + "If there are no generics, the prefix is only allowed to be an identifier.", + )); + } + } + PrefixType::Verbatim => { + let prefix_str = match prefix.get_ident() { + Some(p) => p.to_string(), + None => { + return Err(Error::new_spanned( + prefix, + "Prefix type `verbatim` requires that the prefix is an ident.", + )) + } + }; + + (quote!(#prefix_str), quote!(), quote!()) + } + PrefixType::PalletName => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } + PrefixType::Dynamic => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::Get<_>>::get() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } + }; + + let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); + + let name_str = format!("{}_Storage_Instance", storage_name); + let name = Ident::new(&name_str, Span::call_site()); + let storage_name_str = storage_name.to_string(); + + let counter_code = is_counted_map.then(|| { + let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); + let counter_storage_name_str = counter_prefix(&storage_name_str); + let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); + + quote! { + #visibility struct #counter_name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #counter_name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #counter_storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance + for #name< #type_generics > #where_clause + { + type CounterPrefix = #counter_name < #type_generics >; + } + } + }); + + let storage_prefix_hash = helper::two128_str(&storage_name_str); + + // Implement `StorageInstance` trait. + let code = quote! { + #[allow(non_camel_case_types)] + #visibility struct #name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + #counter_code + }; + + Ok(StorageInstance { + name, + code, + generics: quote!( < #type_generics > ), + }) } diff --git a/support/procedural-fork/src/transactional.rs b/support/procedural-fork/src/transactional.rs index e9d4f84b7..73a841d9b 100644 --- a/support/procedural-fork/src/transactional.rs +++ b/support/procedural-fork/src/transactional.rs @@ -21,40 +21,50 @@ use quote::quote; use syn::{ItemFn, Result}; pub fn transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - use #crate_::storage::{with_transaction, TransactionOutcome}; - with_transaction(|| { - let r = (|| { #block })(); - if r.is_ok() { - TransactionOutcome::Commit(r) - } else { - TransactionOutcome::Rollback(r) - } - }) - } - }; - - Ok(output.into()) + let ItemFn { + attrs, + vis, + sig, + block, + } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + use #crate_::storage::{with_transaction, TransactionOutcome}; + with_transaction(|| { + let r = (|| { #block })(); + if r.is_ok() { + TransactionOutcome::Commit(r) + } else { + TransactionOutcome::Rollback(r) + } + }) + } + }; + + Ok(output.into()) } pub fn require_transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - if !#crate_::storage::transactional::is_transactional() { - return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); - } - #block - } - }; - - Ok(output.into()) + let ItemFn { + attrs, + vis, + sig, + block, + } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + if !#crate_::storage::transactional::is_transactional() { + return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); + } + #block + } + }; + + Ok(output.into()) } diff --git a/support/procedural-fork/src/tt_macro.rs b/support/procedural-fork/src/tt_macro.rs index d37127421..3f280013f 100644 --- a/support/procedural-fork/src/tt_macro.rs +++ b/support/procedural-fork/src/tt_macro.rs @@ -22,29 +22,29 @@ use proc_macro2::{Ident, TokenStream}; use quote::format_ident; struct CreateTtReturnMacroDef { - name: Ident, - args: Vec<(Ident, TokenStream)>, + name: Ident, + args: Vec<(Ident, TokenStream)>, } impl syn::parse::Parse for CreateTtReturnMacroDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let name = input.parse()?; - let _ = input.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let name = input.parse()?; + let _ = input.parse::()?; - let mut args = Vec::new(); - while !input.is_empty() { - let mut value; - let key: Ident = input.parse()?; - let _ = input.parse::()?; - let _: syn::token::Bracket = syn::bracketed!(value in input); - let _: syn::token::Brace = syn::braced!(value in value); - let value: TokenStream = value.parse()?; + let mut args = Vec::new(); + while !input.is_empty() { + let mut value; + let key: Ident = input.parse()?; + let _ = input.parse::()?; + let _: syn::token::Bracket = syn::bracketed!(value in input); + let _: syn::token::Brace = syn::braced!(value in value); + let value: TokenStream = value.parse()?; - args.push((key, value)) - } + args.push((key, value)) + } - Ok(Self { name, args }) - } + Ok(Self { name, args }) + } } /// A proc macro that accepts a name and any number of key-value pairs, to be used to create a @@ -74,32 +74,32 @@ impl syn::parse::Parse for CreateTtReturnMacroDef { /// } /// ``` pub fn create_tt_return_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let CreateTtReturnMacroDef { name, args } = - syn::parse_macro_input!(input as CreateTtReturnMacroDef); + let CreateTtReturnMacroDef { name, args } = + syn::parse_macro_input!(input as CreateTtReturnMacroDef); - let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let unique_name = format_ident!("{}_{}", name, count); + let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let unique_name = format_ident!("{}_{}", name, count); - let decl_macro = quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #unique_name { - { - $caller:tt - $(your_tt_return = [{ $my_tt_macro:path }])? - } => { - $my_tt_return! { - $caller - #( - #keys = [{ #values }] - )* - } - } - } + let decl_macro = quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #unique_name { + { + $caller:tt + $(your_tt_return = [{ $my_tt_macro:path }])? + } => { + $my_tt_return! { + $caller + #( + #keys = [{ #values }] + )* + } + } + } - pub use #unique_name as #name; - }; + pub use #unique_name as #name; + }; - decl_macro.into() + decl_macro.into() } From b998f4bffe80f2ab219dd5f585f914ca4c5ae319 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 01:57:44 -0400 Subject: [PATCH 086/213] suppress warnings in test mode --- support/procedural-fork/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 2ac076636..71e8ecba8 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -11,6 +11,7 @@ //! version/tag name. #![recursion_limit = "512"] #![deny(rustdoc::broken_intra_doc_links)] +#![allow(unused)] extern crate proc_macro; From a1b057c2fd2974e0e269d9237a72891b4bf0a28b Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:06:38 -0400 Subject: [PATCH 087/213] cargo clippy --fix --workspace --all-features --- support/procedural-fork/src/benchmark.rs | 28 +++++++++---------- .../construct_runtime/expand/freeze_reason.rs | 2 +- .../construct_runtime/expand/hold_reason.rs | 2 +- .../src/construct_runtime/expand/lock_id.rs | 2 +- .../construct_runtime/expand/slash_reason.rs | 2 +- .../src/construct_runtime/expand/task.rs | 2 +- .../src/construct_runtime/mod.rs | 20 ++++--------- .../src/construct_runtime/parse.rs | 6 ++-- support/procedural-fork/src/derive_impl.rs | 4 +-- support/procedural-fork/src/dynamic_params.rs | 10 ++----- support/procedural-fork/src/lib.rs | 2 +- .../procedural-fork/src/no_bound/default.rs | 2 +- .../src/pallet/expand/config.rs | 2 +- .../src/pallet/expand/documentation.rs | 4 +-- .../src/pallet/expand/storage.rs | 2 +- .../src/pallet/expand/tasks.rs | 2 +- .../src/pallet/expand/warnings.rs | 2 +- support/procedural-fork/src/pallet/mod.rs | 2 +- .../procedural-fork/src/pallet/parse/call.rs | 6 ++-- .../src/pallet/parse/config.rs | 2 +- .../src/pallet/parse/helper.rs | 7 ++--- .../procedural-fork/src/pallet/parse/mod.rs | 12 ++++---- .../src/pallet/parse/storage.rs | 4 +-- .../procedural-fork/src/pallet/parse/tasks.rs | 12 ++++---- support/procedural-fork/src/pallet_error.rs | 4 +-- .../procedural-fork/src/runtime/expand/mod.rs | 19 +++++-------- support/procedural-fork/src/runtime/mod.rs | 2 +- .../procedural-fork/src/runtime/parse/mod.rs | 6 ++-- .../src/runtime/parse/pallet.rs | 7 ++--- 29 files changed, 76 insertions(+), 101 deletions(-) diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs index 376200d6e..a47d175af 100644 --- a/support/procedural-fork/src/benchmark.rs +++ b/support/procedural-fork/src/benchmark.rs @@ -166,7 +166,7 @@ impl syn::parse::Parse for PovEstimationMode { let lookahead = input.lookahead1(); if lookahead.peek(keywords::MaxEncodedLen) { let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; - return Ok(PovEstimationMode::MaxEncodedLen); + Ok(PovEstimationMode::MaxEncodedLen) } else if lookahead.peek(keywords::Measured) { let _measured: keywords::Measured = input.parse()?; return Ok(PovEstimationMode::Measured); @@ -204,7 +204,7 @@ impl syn::parse::Parse for BenchmarkAttrs { let mut extra = false; let mut skip_meta = false; let mut pov_mode = None; - let args = Punctuated::::parse_terminated(&input)?; + let args = Punctuated::::parse_terminated(input)?; for arg in args.into_iter() { match arg { @@ -294,7 +294,7 @@ struct ResultDef { /// Ensures that `ReturnType` is a `Result<(), BenchmarkError>`, if specified fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { if let ReturnType::Type(_, typ) = &item_fn.sig.output { - let non_unit = |span| return Err(Error::new(span, "expected `()`")); + let non_unit = |span| Err(Error::new(span, "expected `()`")); let Type::Path(TypePath { path, qself: _ }) = &**typ else { return Err(Error::new( typ.span(), @@ -328,10 +328,10 @@ fn parse_params(item_fn: &ItemFn) -> Result> { let mut params: Vec = Vec::new(); for arg in &item_fn.sig.inputs { let invalid_param = |span| { - return Err(Error::new( + Err(Error::new( span, "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", - )); + )) }; let FnArg::Typed(arg) = arg else { @@ -344,10 +344,10 @@ fn parse_params(item_fn: &ItemFn) -> Result> { // check param name let var_span = ident.span(); let invalid_param_name = || { - return Err(Error::new( + Err(Error::new( var_span, "Benchmark parameter names must consist of a single lowercase letter (a-z) and no other characters.", - )); + )) }; let name = ident.ident.to_token_stream().to_string(); if name.len() > 1 { @@ -385,10 +385,10 @@ fn parse_params(item_fn: &ItemFn) -> Result> { /// Used in several places where the `#[extrinsic_call]` or `#[body]` annotation is missing fn missing_call(item_fn: &ItemFn) -> Result { - return Err(Error::new( + Err(Error::new( item_fn.block.brace_token.span.join(), "No valid #[extrinsic_call] or #[block] annotation could be found in benchmark function body." - )); + )) } /// Finds the `BenchmarkCallDef` and its index (within the list of stmts for the fn) and @@ -447,7 +447,7 @@ impl BenchmarkDef { pub fn from(item_fn: &ItemFn) -> Result { let params = parse_params(item_fn)?; ensure_valid_return_type(item_fn)?; - let (i, call_def) = parse_call_def(&item_fn)?; + let (i, call_def) = parse_call_def(item_fn)?; let (verify_stmts, last_stmt) = match item_fn.sig.output { ReturnType::Default => @@ -961,11 +961,11 @@ fn expand_benchmark( // set up variables needed during quoting let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { Ok(ident) => ident, - Err(err) => return err.to_compile_error().into(), + Err(err) => return err.to_compile_error(), }; let frame_system = match generate_access_from_frame_or_crate("frame-system") { Ok(path) => path, - Err(err) => return err.to_compile_error().into(), + Err(err) => return err.to_compile_error(), }; let codec = quote!(#krate::__private::codec); let traits = quote!(#krate::__private::traits); @@ -973,7 +973,7 @@ fn expand_benchmark( let verify_stmts = benchmark_def.verify_stmts; let last_stmt = benchmark_def.last_stmt; let test_ident = Ident::new( - format!("test_benchmark_{}", name.to_string()).as_str(), + format!("test_benchmark_{}", name).as_str(), Span::call_site(), ); @@ -1106,7 +1106,7 @@ fn expand_benchmark( sig.generics.where_clause = parse_quote!(where #where_clause); } sig.ident = Ident::new( - format!("_{}", name.to_token_stream().to_string()).as_str(), + format!("_{}", name.to_token_stream()).as_str(), Span::call_site(), ); let mut fn_param_inputs: Vec = diff --git a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs index 131c919ef..f00269085 100644 --- a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs @@ -25,7 +25,7 @@ pub fn expand_outer_freeze_reason(pallet_decls: &[Pallet], scrate: &TokenStream) let mut freeze_reason_variants = Vec::new(); let mut freeze_reason_variants_count = Vec::new(); for decl in pallet_decls { - if let Some(_) = decl.find_part("FreezeReason") { + if decl.find_part("FreezeReason").is_some() { let variant_name = &decl.name; let path = &decl.path; let index = decl.index; diff --git a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs index 58870a321..5fc2ed1ee 100644 --- a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs @@ -25,7 +25,7 @@ pub fn expand_outer_hold_reason(pallet_decls: &[Pallet], scrate: &TokenStream) - let mut hold_reason_variants = Vec::new(); let mut hold_reason_variants_count = Vec::new(); for decl in pallet_decls { - if let Some(_) = decl.find_part("HoldReason") { + if decl.find_part("HoldReason").is_some() { let variant_name = &decl.name; let path = &decl.path; let index = decl.index; diff --git a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs index 67c2fb933..732fb7ac4 100644 --- a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs +++ b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs @@ -24,7 +24,7 @@ pub fn expand_outer_lock_id(pallet_decls: &[Pallet], scrate: &TokenStream) -> To let mut conversion_fns = Vec::new(); let mut lock_id_variants = Vec::new(); for decl in pallet_decls { - if let Some(_) = decl.find_part("LockId") { + if decl.find_part("LockId").is_some() { let variant_name = &decl.name; let path = &decl.path; let index = decl.index; diff --git a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs index 0695d8102..d9e9e9320 100644 --- a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs @@ -24,7 +24,7 @@ pub fn expand_outer_slash_reason(pallet_decls: &[Pallet], scrate: &TokenStream) let mut conversion_fns = Vec::new(); let mut slash_reason_variants = Vec::new(); for decl in pallet_decls { - if let Some(_) = decl.find_part("SlashReason") { + if decl.find_part("SlashReason").is_some() { let variant_name = &decl.name; let path = &decl.path; let index = decl.index; diff --git a/support/procedural-fork/src/construct_runtime/expand/task.rs b/support/procedural-fork/src/construct_runtime/expand/task.rs index 94a5f52bb..dd8d93c27 100644 --- a/support/procedural-fork/src/construct_runtime/expand/task.rs +++ b/support/procedural-fork/src/construct_runtime/expand/task.rs @@ -69,7 +69,7 @@ pub fn expand_outer_task( let prelude = quote!(#scrate::traits::tasks::__private); - const INCOMPLETE_MATCH_QED: &'static str = + const INCOMPLETE_MATCH_QED: &str = "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; let output = quote! { diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index de688b3d6..8d0933a51 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -388,18 +388,13 @@ fn construct_runtime_final_expansion( let features = pallets .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { + .filter(|&decl| (!decl.cfg_pattern.is_empty())).flat_map(|decl| decl.cfg_pattern.iter().flat_map(|attr| { attr.predicates().filter_map(|pred| match pred { Predicate::Feature(feat) => Some(feat), Predicate::Test => Some("test"), _ => None, }) - }) - }) - }) - .flatten() + })) .collect::>(); let hidden_crate_name = "construct_runtime"; @@ -439,9 +434,7 @@ fn construct_runtime_final_expansion( let integrity_test = decl_integrity_test(&scrate); let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - let warning = where_section.map_or(None, |where_section| { - Some( - proc_macro_warning::Warning::new_deprecated("WhereSection") + let warning = where_section.map(|where_section| proc_macro_warning::Warning::new_deprecated("WhereSection") .old("use a `where` clause in `construct_runtime`") .new( "use `frame_system::Config` to set the `Block` type and delete this clause. @@ -449,9 +442,7 @@ fn construct_runtime_final_expansion( ) .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) .span(where_section.span) - .build_or_panic(), - ) - }); + .build_or_panic()); let res = quote!( #warning @@ -545,8 +536,7 @@ pub(crate) fn decl_all_pallets<'a>( // Every feature set to the pallet names that should be included by this feature set. let mut features_to_names = features - .iter() - .map(|f| *f) + .iter().copied() .powerset() .map(|feat| (HashSet::from_iter(feat), Vec::new())) .collect::, Vec<_>)>>(); diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs index 173a8dd12..26fbb4dee 100644 --- a/support/procedural-fork/src/construct_runtime/parse.rs +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -298,7 +298,7 @@ impl Parse for PalletDeclaration { let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { let _: Token![::] = input.parse()?; let mut parts = parse_pallet_parts(input)?; - parts.extend(extra_parts.into_iter()); + parts.extend(extra_parts); Some(parts) } else if !input.peek(keyword::exclude_parts) && !input.peek(keyword::use_parts) @@ -740,8 +740,8 @@ fn convert_pallets(pallets: Vec) -> syn::Result { - assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params") - } - _ => (), + if let syn::Meta::Path(path) = &attr.meta { + assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params") } let runtime_params = &self.runtime_params; @@ -184,8 +181,7 @@ impl VisitMut for MacroInjectArgs { attr.meta = syn::parse2::(quote! { dynamic_pallet_params(#runtime_params, #params_pallet) }) - .unwrap() - .into(); + .unwrap(); } visit_mut::visit_item_mod_mut(self, item); diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 71e8ecba8..570e14727 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -41,7 +41,7 @@ pub(crate) const NUMBER_OF_INSTANCE: u8 = 16; thread_local! { /// A global counter, can be used to generate a relatively unique identifier. - static COUNTER: RefCell = RefCell::new(Counter(0)); + static COUNTER: RefCell = const { RefCell::new(Counter(0)) }; } /// Counter to generate a relatively unique identifier for macros. This is necessary because diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index 1c0d90531..6776d84d2 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -136,7 +136,7 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To err.extend( additional - .into_iter() + .iter() .map(|variant| syn::Error::new_spanned(variant, "additional default")), ); diff --git a/support/procedural-fork/src/pallet/expand/config.rs b/support/procedural-fork/src/pallet/expand/config.rs index 836c74ae7..55ac72537 100644 --- a/support/procedural-fork/src/pallet/expand/config.rs +++ b/support/procedural-fork/src/pallet/expand/config.rs @@ -51,7 +51,7 @@ Consequently, a runtime that wants to include this pallet must implement this tr // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is // impossible consequently. match &config.default_sub_trait { - Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { + Some(default_sub_trait) if !default_sub_trait.items.is_empty() => { let trait_items = &default_sub_trait .items .iter() diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs index adc4f7ce9..42891dab6 100644 --- a/support/procedural-fork/src/pallet/expand/documentation.rs +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -20,8 +20,8 @@ use proc_macro2::TokenStream; use quote::ToTokens; use syn::{spanned::Spanned, Attribute, Lit, LitStr}; -const DOC: &'static str = "doc"; -const PALLET_DOC: &'static str = "pallet_doc"; +const DOC: &str = "doc"; +const PALLET_DOC: &str = "pallet_doc"; /// Get the documentation file path from the `pallet_doc` attribute. /// diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs index b77e9846b..32752dc52 100644 --- a/support/procedural-fork/src/pallet/expand/storage.rs +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -183,7 +183,7 @@ pub fn process_generics(def: &mut Def) -> syn::Result TokenStream2 { if let Some(tasks_def) = &def.tasks { if def.task_enum.is_none() { def.task_enum = Some(TaskEnumDef::generate( - &tasks_def, + tasks_def, def.type_decl_bounded_generics(tasks_def.item_impl.span()), def.type_use_generics(tasks_def.item_impl.span()), )); diff --git a/support/procedural-fork/src/pallet/expand/warnings.rs b/support/procedural-fork/src/pallet/expand/warnings.rs index 3d71b83af..ece03a13a 100644 --- a/support/procedural-fork/src/pallet/expand/warnings.rs +++ b/support/procedural-fork/src/pallet/expand/warnings.rs @@ -43,7 +43,7 @@ pub(crate) fn weight_witness_warning( .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); for (_, arg_ident, _) in method.args.iter() { - if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { + if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), arg_ident) { continue; } diff --git a/support/procedural-fork/src/pallet/mod.rs b/support/procedural-fork/src/pallet/mod.rs index d3796662f..5b9bc621b 100644 --- a/support/procedural-fork/src/pallet/mod.rs +++ b/support/procedural-fork/src/pallet/mod.rs @@ -41,7 +41,7 @@ pub fn pallet( ) -> proc_macro::TokenStream { let mut dev_mode = false; if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { + if syn::parse::(attr.clone()).is_ok() { dev_mode = true; } else { let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ diff --git a/support/procedural-fork/src/pallet/parse/call.rs b/support/procedural-fork/src/pallet/parse/call.rs index 865c63473..0bcf38a4e 100644 --- a/support/procedural-fork/src/pallet/parse/call.rs +++ b/support/procedural-fork/src/pallet/parse/call.rs @@ -189,7 +189,7 @@ pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::R let result_origin_for = syn::parse2::(ty.to_token_stream()); let result_runtime_origin = syn::parse2::(ty.to_token_stream()); - return match (result_origin_for, result_runtime_origin) { + match (result_origin_for, result_runtime_origin) { (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), (_, Ok(_)) => Ok(()), (_, _) => { @@ -198,9 +198,9 @@ pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::R } else { "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" }; - return Err(syn::Error::new(ty.span(), msg)); + Err(syn::Error::new(ty.span(), msg)) } - }; + } } impl CallDef { diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs index cde565245..9ecdbddc3 100644 --- a/support/procedural-fork/src/pallet/parse/config.rs +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -290,7 +290,7 @@ fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool let mut expected_system_config = match ( is_using_frame_crate(&path), - is_using_frame_crate(&frame_system), + is_using_frame_crate(frame_system), ) { (true, false) => // We can't use the path to `frame_system` from `frame` if `frame_system` is not being diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs index f58c8d81c..3d39e0aa0 100644 --- a/support/procedural-fork/src/pallet/parse/helper.rs +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -547,10 +547,10 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result()?; } input.parse::]>()?; - return Ok(Self(Some(instance_usage))); + Ok(Self(Some(instance_usage))) } else { input.parse::()?; - return Ok(Self(None)); + Ok(Self(None)) } } } @@ -652,7 +652,7 @@ pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { } pub(crate) fn two128_str(s: &str) -> TokenStream { - bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) + bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes())) } pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream { @@ -661,5 +661,4 @@ pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream quote!( [ #( #bytes ),* ] ) - .into() } diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index 57c252473..c0f9eca20 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -287,8 +287,7 @@ impl Def { "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ task enum has been omitted", )); - } else { - } + } } _ => (), } @@ -310,7 +309,7 @@ impl Def { return Ok(()); }; let type_path = type_path.path.segments.iter().collect::>(); - let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { + let (Some(seg), None) = (type_path.first(), type_path.get(1)) else { return Ok(()); }; let mut result = None; @@ -356,7 +355,7 @@ impl Def { continue; }; let target_path = target_path.path.segments.iter().collect::>(); - let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + let (Some(target_ident), None) = (target_path.first(), target_path.get(1)) else { continue; }; let matches_task_enum = match task_enum { @@ -420,10 +419,9 @@ impl Def { instances.extend_from_slice(&genesis_config.instances[..]); } if let Some(genesis_build) = &self.genesis_build { - genesis_build + if let Some(i) = genesis_build .instances - .as_ref() - .map(|i| instances.extend_from_slice(&i)); + .as_ref() { instances.extend_from_slice(i) } } if let Some(extra_constants) = &self.extra_constants { instances.extend_from_slice(&extra_constants.instances[..]); diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs index 811832427..dac0782bd 100644 --- a/support/procedural-fork/src/pallet/parse/storage.rs +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -955,7 +955,7 @@ impl StorageDef { let msg = format!( "Invalid pallet::storage, unexpected generic args for ResultQuery, \ expected angle-bracketed arguments, found `{}`", - args.to_token_stream().to_string() + args.to_token_stream() ); return Err(syn::Error::new(args.span(), msg)); } @@ -1005,7 +1005,7 @@ impl StorageDef { let msg = format!( "Invalid pallet::storage, unexpected generic argument kind, expected a \ type path to a `PalletError` enum variant, found `{}`", - gen_arg.to_token_stream().to_string(), + gen_arg.to_token_stream(), ); Err(syn::Error::new(gen_arg.span(), msg)) } diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index f1728f824..4d9ad9d0d 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -104,7 +104,7 @@ impl syn::parse::Parse for TasksDef { // we require the path on the impl to be a TypePath let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; let segments = enum_path.path.segments.iter().collect::>(); - let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + let (Some(last_seg), None) = (segments.first(), segments.get(1)) else { return Err(Error::new( enum_path.span(), "if specified manually, the task enum must be defined locally in this \ @@ -456,7 +456,7 @@ impl TryFrom> for TaskIndexAttr { match value.meta { TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), _ => { - return Err(Error::new( + Err(Error::new( value.span(), format!( "`{:?}` cannot be converted to a `TaskIndexAttr`", @@ -478,7 +478,7 @@ impl TryFrom> for TaskConditionAttr { match value.meta { TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), _ => { - return Err(Error::new( + Err(Error::new( value.span(), format!( "`{:?}` cannot be converted to a `TaskConditionAttr`", @@ -500,7 +500,7 @@ impl TryFrom> for TaskWeightAttr { match value.meta { TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), _ => { - return Err(Error::new( + Err(Error::new( value.span(), format!( "`{:?}` cannot be converted to a `TaskWeightAttr`", @@ -522,7 +522,7 @@ impl TryFrom> for TaskListAttr { match value.meta { TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), _ => { - return Err(Error::new( + Err(Error::new( value.span(), format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), )) @@ -544,7 +544,7 @@ fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> .iter() .map(|seg| seg.ident.clone()) .collect::>(); - let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + let (Some(seg1), Some(_), None) = (segs.first(), segs.get(1), segs.get(2)) else { return true; }; if seg1 != "pallet" { diff --git a/support/procedural-fork/src/pallet_error.rs b/support/procedural-fork/src/pallet_error.rs index bdf8330cd..e78844c63 100644 --- a/support/procedural-fork/src/pallet_error.rs +++ b/support/procedural-fork/src/pallet_error.rs @@ -45,7 +45,7 @@ pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenS }) => { let maybe_field_tys = fields .iter() - .map(|f| generate_field_types(f, &frame_support)) + .map(|f| generate_field_types(f, frame_support)) .collect::>>(); let field_tys = match maybe_field_tys { Ok(tys) => tys.into_iter().flatten(), @@ -65,7 +65,7 @@ pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenS syn::Data::Enum(syn::DataEnum { variants, .. }) => { let field_tys = variants .iter() - .map(|variant| generate_variant_field_types(variant, &frame_support)) + .map(|variant| generate_variant_field_types(variant, frame_support)) .collect::>>, syn::Error>>(); let field_tys = match field_tys { diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index c26cbccb7..4ba2b0a74 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -46,7 +46,7 @@ pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { let (check_pallet_number_res, res) = match def.pallets { AllPalletsDeclaration::Implicit(ref decl) => ( check_pallet_number(input.clone(), decl.pallet_count), - construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), + construct_runtime_implicit_to_explicit(input, decl.clone(), legacy_ordering), ), AllPalletsDeclaration::Explicit(ref decl) => ( check_pallet_number(input, decl.pallets.len()), @@ -76,13 +76,13 @@ pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { res }; - let res = expander::Expander::new("construct_runtime") + + + expander::Expander::new("construct_runtime") .dry(std::env::var("FRAME_EXPAND").is_err()) .verbose(true) .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + .expect("Does not fail because of IO in OUT_DIR; qed") } fn construct_runtime_implicit_to_explicit( @@ -157,18 +157,13 @@ fn construct_runtime_final_expansion( let features = pallets .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { + .filter(|&decl| (!decl.cfg_pattern.is_empty())).flat_map(|decl| decl.cfg_pattern.iter().flat_map(|attr| { attr.predicates().filter_map(|pred| match pred { Predicate::Feature(feat) => Some(feat), Predicate::Test => Some("test"), _ => None, }) - }) - }) - }) - .flatten() + })) .collect::>(); let hidden_crate_name = "construct_runtime"; diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs index 589acff6c..888a15e11 100644 --- a/support/procedural-fork/src/runtime/mod.rs +++ b/support/procedural-fork/src/runtime/mod.rs @@ -216,7 +216,7 @@ mod keyword { pub fn runtime(attr: TokenStream, tokens: TokenStream) -> TokenStream { let mut legacy_ordering = false; if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { + if syn::parse::(attr.clone()).is_ok() { legacy_ordering = true; } else { let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs index 01245187f..c2b33fd76 100644 --- a/support/procedural-fork/src/runtime/parse/mod.rs +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -140,7 +140,7 @@ pub struct Def { impl Def { pub fn try_from(mut item: syn::ItemMod) -> syn::Result { - let input: TokenStream2 = item.to_token_stream().into(); + let input: TokenStream2 = item.to_token_stream(); let item_span = item.span(); let items = &mut item .content @@ -207,8 +207,8 @@ impl Def { { let msg = "Two pallets with the same name!"; - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); + let mut err = syn::Error::new(used_pallet, msg); + err.combine(syn::Error::new(pallet_decl.name.span(), msg)); return Err(err); } diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index 039e2631b..54662bfd7 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -77,9 +77,7 @@ impl Pallet { } } - pallet_parts = pallet_parts - .into_iter() - .filter(|part| { + pallet_parts.retain(|part| { if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { false } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = @@ -89,8 +87,7 @@ impl Pallet { } else { true } - }) - .collect(); + }); let cfg_pattern = vec![]; From f8f2b952b642d6bac18b4577bbfa579d00dd704e Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:09:31 -0400 Subject: [PATCH 088/213] supporess procedural-fork clippy warnings --- support/procedural-fork/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 570e14727..4680ba09a 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -10,8 +10,7 @@ //! on an as-needed, ad-hoc basis, and versions will matched the corresponding `polkadot-sdk` //! version/tag name. #![recursion_limit = "512"] -#![deny(rustdoc::broken_intra_doc_links)] -#![allow(unused)] +#![allow(warnings)] extern crate proc_macro; From 4a194d3aa00d76a0d2414bacf92a42a0bd3f35b7 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:11:00 -0400 Subject: [PATCH 089/213] allow more in procedural-fork --- support/procedural-fork/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 4680ba09a..ce493859c 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -11,6 +11,7 @@ //! version/tag name. #![recursion_limit = "512"] #![allow(warnings)] +#![allow(all)] extern crate proc_macro; From 6985939af0ce46ddcac65b6a842a22f2163dd1db Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:13:17 -0400 Subject: [PATCH 090/213] tweak --- support/procedural-fork/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index ce493859c..576f4b421 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -11,7 +11,7 @@ //! version/tag name. #![recursion_limit = "512"] #![allow(warnings)] -#![allow(all)] +#![allow(clippy)] extern crate proc_macro; From 28b74feb8737765283ef53d03226509ec49f8c8c Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:15:42 -0400 Subject: [PATCH 091/213] allow clippy::all in procedural-fork --- support/procedural-fork/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index 576f4b421..cef3891a0 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -11,7 +11,7 @@ //! version/tag name. #![recursion_limit = "512"] #![allow(warnings)] -#![allow(clippy)] +#![allow(clippy::all)] extern crate proc_macro; From 2a35d7839c0dec79e6d9b9d8c13a72b8abca943f Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:17:58 -0400 Subject: [PATCH 092/213] successfully suppress clippy in procedural-fork --- support/procedural-fork/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/support/procedural-fork/Cargo.toml b/support/procedural-fork/Cargo.toml index 61221ead8..503c81f1f 100644 --- a/support/procedural-fork/Cargo.toml +++ b/support/procedural-fork/Cargo.toml @@ -3,8 +3,8 @@ name = "procedural-fork" version = "1.10.0-rc3" edition = "2021" -[lints] -workspace = true +[lints.clippy] +all = "allow" [dependencies] derive-syn-parse = "0.2" From 3ffcdec175daa5f914910fa35fb82115f3b750ee Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:22:39 -0400 Subject: [PATCH 093/213] cargo fmt --all --- .../src/construct_runtime/mod.rs | 38 +-- .../procedural-fork/src/no_bound/default.rs | 242 +++++++++--------- .../procedural-fork/src/pallet/parse/mod.rs | 8 +- .../procedural-fork/src/pallet/parse/tasks.rs | 58 ++--- .../procedural-fork/src/runtime/expand/mod.rs | 19 +- .../src/runtime/parse/pallet.rs | 20 +- 6 files changed, 193 insertions(+), 192 deletions(-) diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index 8d0933a51..f01ebe0dd 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -388,13 +388,16 @@ fn construct_runtime_final_expansion( let features = pallets .iter() - .filter(|&decl| (!decl.cfg_pattern.is_empty())).flat_map(|decl| decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - })) + .filter(|&decl| (!decl.cfg_pattern.is_empty())) + .flat_map(|decl| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) .collect::>(); let hidden_crate_name = "construct_runtime"; @@ -434,15 +437,17 @@ fn construct_runtime_final_expansion( let integrity_test = decl_integrity_test(&scrate); let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - let warning = where_section.map(|where_section| proc_macro_warning::Warning::new_deprecated("WhereSection") - .old("use a `where` clause in `construct_runtime`") - .new( - "use `frame_system::Config` to set the `Block` type and delete this clause. + let warning = where_section.map(|where_section| { + proc_macro_warning::Warning::new_deprecated("WhereSection") + .old("use a `where` clause in `construct_runtime`") + .new( + "use `frame_system::Config` to set the `Block` type and delete this clause. It is planned to be removed in December 2023", - ) - .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) - .span(where_section.span) - .build_or_panic()); + ) + .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) + .span(where_section.span) + .build_or_panic() + }); let res = quote!( #warning @@ -536,7 +541,8 @@ pub(crate) fn decl_all_pallets<'a>( // Every feature set to the pallet names that should be included by this feature set. let mut features_to_names = features - .iter().copied() + .iter() + .copied() .powerset() .map(|feat| (HashSet::from_iter(feat), Vec::new())) .collect::, Vec<_>)>>(); diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index 6776d84d2..cb054878d 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -27,132 +27,134 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = - match input.data { - Data::Struct(struct_) => match struct_.fields { - Fields::Named(named) => { - let fields = named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span() => - #ident: ::core::default::Default::default() - } - }); + let impl_ = match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + } + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + } + Fields::Unit => { + quote!(Self) + } + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into(); + } - quote!(Self { #( #fields, )* }) - } - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| { + variant + .attrs + .iter() + .any(|attr| attr.path().is_ident("default")) + }) + .collect::>(); + + match &*default_variants { + [] => return syn::Error::new( + name.clone().span(), + "no default declared, make a variant default by placing `#[default]` above it", + ) + .into_compile_error() + .into(), + // only one variant with the #[default] attribute set + [default_variant] => { + let variant_attrs = default_variant + .attrs + .iter() + .filter(|a| a.path().is_ident("default")) + .collect::>(); + + // check that there is only one #[default] attribute on the variant + if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { + let mut err = + syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); + + err.combine(syn::Error::new_spanned( + first_attr, + "`#[default]` used here", + )); + + err.extend([second_attr].into_iter().chain(additional_attrs).map( + |variant| { + syn::Error::new_spanned(variant, "`#[default]` used again here") + }, + )); + + return err.into_compile_error().into(); + } + + let variant_ident = &default_variant.ident; + + let fully_qualified_variant_path = quote!(Self::#variant_ident); + + match &default_variant.fields { + Fields::Named(named) => { + let fields = + named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span()=> + #ident: ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path { #( #fields, )* }) } - }); - - quote!(Self( #( #fields, )* )) - } - Fields::Unit => { - quote!(Self) - } - }, - Data::Enum(enum_) => { - if enum_.variants.is_empty() { - return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") - .to_compile_error() - .into(); + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path( #( #fields, )* )) + } + Fields::Unit => fully_qualified_variant_path, + } } + [first, additional @ ..] => { + let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); - // all #[default] attrs with the variant they're on; i.e. a var - let default_variants = enum_ - .variants - .into_iter() - .filter(|variant| { - variant - .attrs + err.combine(syn::Error::new_spanned(first, "first default")); + + err.extend( + additional .iter() - .any(|attr| attr.path().is_ident("default")) - }) - .collect::>(); - - match &*default_variants { - [] => return syn::Error::new( - name.clone().span(), - "no default declared, make a variant default by placing `#[default]` above it", - ) - .into_compile_error() - .into(), - // only one variant with the #[default] attribute set - [default_variant] => { - let variant_attrs = default_variant - .attrs - .iter() - .filter(|a| a.path().is_ident("default")) - .collect::>(); - - // check that there is only one #[default] attribute on the variant - if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { - let mut err = - syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); - - err.combine(syn::Error::new_spanned(first_attr, "`#[default]` used here")); - - err.extend([second_attr].into_iter().chain(additional_attrs).map( - |variant| { - syn::Error::new_spanned(variant, "`#[default]` used again here") - }, - )); - - return err.into_compile_error().into() - } - - let variant_ident = &default_variant.ident; - - let fully_qualified_variant_path = quote!(Self::#variant_ident); - - match &default_variant.fields { - Fields::Named(named) => { - let fields = - named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span()=> - #ident: ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path { #( #fields, )* }) - }, - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path( #( #fields, )* )) - }, - Fields::Unit => fully_qualified_variant_path, - } - }, - [first, additional @ ..] => { - let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); - - err.combine(syn::Error::new_spanned(first, "first default")); - - err.extend( - additional - .iter() - .map(|variant| syn::Error::new_spanned(variant, "additional default")), - ); - - return err.into_compile_error().into() - }, - } - } - Data::Union(union_) => { - return syn::Error::new_spanned( - union_.union_token, - "Union type not supported by `derive(DefaultNoBound)`", - ) - .to_compile_error() - .into() + .map(|variant| syn::Error::new_spanned(variant, "additional default")), + ); + + return err.into_compile_error().into(); + } } - }; + } + Data::Union(union_) => { + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into() + } + }; quote!( const _: () = { diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index c0f9eca20..53e65fd12 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -287,7 +287,7 @@ impl Def { "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ task enum has been omitted", )); - } + } } _ => (), } @@ -419,9 +419,9 @@ impl Def { instances.extend_from_slice(&genesis_config.instances[..]); } if let Some(genesis_build) = &self.genesis_build { - if let Some(i) = genesis_build - .instances - .as_ref() { instances.extend_from_slice(i) } + if let Some(i) = genesis_build.instances.as_ref() { + instances.extend_from_slice(i) + } } if let Some(extra_constants) = &self.extra_constants { instances.extend_from_slice(&extra_constants.instances[..]); diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index 4d9ad9d0d..66ee1a7ef 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -455,15 +455,13 @@ impl TryFrom> for TaskIndexAttr { let colons = value.colons; match value.meta { TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => { - Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskIndexAttr`", - value.meta - ), - )) - } + _ => Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskIndexAttr`", + value.meta + ), + )), } } } @@ -477,15 +475,13 @@ impl TryFrom> for TaskConditionAttr { let colons = value.colons; match value.meta { TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => { - Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskConditionAttr`", - value.meta - ), - )) - } + _ => Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskConditionAttr`", + value.meta + ), + )), } } } @@ -499,15 +495,13 @@ impl TryFrom> for TaskWeightAttr { let colons = value.colons; match value.meta { TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => { - Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskWeightAttr`", - value.meta - ), - )) - } + _ => Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskWeightAttr`", + value.meta + ), + )), } } } @@ -521,12 +515,10 @@ impl TryFrom> for TaskListAttr { let colons = value.colons; match value.meta { TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => { - Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), - )) - } + _ => Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )), } } } diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index 4ba2b0a74..38d40964b 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -76,8 +76,6 @@ pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { res }; - - expander::Expander::new("construct_runtime") .dry(std::env::var("FRAME_EXPAND").is_err()) .verbose(true) @@ -157,13 +155,16 @@ fn construct_runtime_final_expansion( let features = pallets .iter() - .filter(|&decl| (!decl.cfg_pattern.is_empty())).flat_map(|decl| decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - })) + .filter(|&decl| (!decl.cfg_pattern.is_empty())) + .flat_map(|decl| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) .collect::>(); let hidden_crate_name = "construct_runtime"; diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index 54662bfd7..976aba764 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -78,16 +78,16 @@ impl Pallet { } pallet_parts.retain(|part| { - if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { - false - } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = - (disable_unsigned, &part.keyword) - { - false - } else { - true - } - }); + if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { + false + } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = + (disable_unsigned, &part.keyword) + { + false + } else { + true + } + }); let cfg_pattern = vec![]; From 47db25a6ac236b43f9ecf85dd51287c0821a97df Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:33:37 -0400 Subject: [PATCH 094/213] allow unwrap in pallet index tests --- support/linting/src/pallet_index.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/support/linting/src/pallet_index.rs b/support/linting/src/pallet_index.rs index b74e5a62c..8ed3627d5 100644 --- a/support/linting/src/pallet_index.rs +++ b/support/linting/src/pallet_index.rs @@ -106,6 +106,7 @@ impl ConstructRuntimeVisitor { } #[cfg(test)] +#[allow(clippy::unwrap_used)] mod tests { use super::*; use quote::quote; From 22ba77c818d37e6c4338f4d2caada7f74be6a540 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:44:00 -0400 Subject: [PATCH 095/213] fix lint check in CI --- .github/workflows/check-rust.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index b088744cb..95901979d 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -157,9 +157,9 @@ jobs: - name: check lints run: | - set -o pipefail # Ensure the pipeline fails if any command in the pipeline fails - cargo check 2>&1 | sed -r "s/\x1B\[[0-9;]*[mK]//g" | tee /dev/tty | grep -q "^warning:" && \ - (echo "Build emitted the following warnings:" >&2 && exit 1) || echo "No warnings found." + set -o pipefail + cargo check 2>&1 | sed -r "s/\x1B\[[0-9;]*[mK]//g" | tee /dev/tty | grep "warning:" && exit 1 + echo "No warnings found." cargo-clippy-all-features: name: cargo clippy --all-features From 3ea6cf6a570fc8f0db80e4308cbcc86c0d7c231d Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:46:54 -0400 Subject: [PATCH 096/213] fix lint check again --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 95901979d..d1796364b 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -158,7 +158,7 @@ jobs: - name: check lints run: | set -o pipefail - cargo check 2>&1 | sed -r "s/\x1B\[[0-9;]*[mK]//g" | tee /dev/tty | grep "warning:" && exit 1 + cargo check 2>&1 | sed -r "s/\x1B\[[0-9;]*[mK]//g" | grep "warning:" && exit 1 echo "No warnings found." cargo-clippy-all-features: From bbab17a07b7fa19a7b90c30fc90379ce1bf6a934 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 02:50:05 -0400 Subject: [PATCH 097/213] lint check confirmed working in CI, fixing canary! --- pallets/subtensor/tests/mock.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/tests/mock.rs b/pallets/subtensor/tests/mock.rs index 9555833de..6f3b44383 100644 --- a/pallets/subtensor/tests/mock.rs +++ b/pallets/subtensor/tests/mock.rs @@ -25,7 +25,7 @@ frame_support::construct_runtime!( pub enum Test { System: frame_system::{Pallet, Call, Config, Storage, Event} = 1, - Balances: pallet_balances::{Pallet, Call, Config, Storage, Event}, + Balances: pallet_balances::{Pallet, Call, Config, Storage, Event} = 2, Triumvirate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 3, TriumvirateMembers: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 4, Senate: pallet_collective::::{Pallet, Call, Storage, Origin, Event, Config} = 5, From b4b71756cb6ef5c8e45f5f847fae611de5324978 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 09:56:17 -0400 Subject: [PATCH 098/213] undo clippy changes to procedural-fork since we have silenced --- support/procedural-fork/src/benchmark.rs | 2121 ++++++++--------- .../src/construct_runtime/expand/call.rs | 393 ++- .../expand/composite_helper.rs | 132 +- .../src/construct_runtime/expand/config.rs | 208 +- .../construct_runtime/expand/freeze_reason.rs | 90 +- .../construct_runtime/expand/hold_reason.rs | 90 +- .../src/construct_runtime/expand/inherent.rs | 459 ++-- .../src/construct_runtime/expand/lock_id.rs | 72 +- .../src/construct_runtime/expand/metadata.rs | 399 ++-- .../src/construct_runtime/expand/origin.rs | 846 ++++--- .../construct_runtime/expand/outer_enums.rs | 379 ++- .../construct_runtime/expand/slash_reason.rs | 72 +- .../src/construct_runtime/expand/task.rs | 212 +- .../src/construct_runtime/expand/unsigned.rs | 113 +- .../src/construct_runtime/mod.rs | 1006 ++++---- .../src/construct_runtime/parse.rs | 1257 +++++----- support/procedural-fork/src/crate_version.rs | 36 +- support/procedural-fork/src/derive_impl.rs | 386 ++- .../procedural-fork/src/dummy_part_checker.rs | 98 +- support/procedural-fork/src/dynamic_params.rs | 418 ++-- support/procedural-fork/src/key_prefix.rs | 142 +- .../procedural-fork/src/match_and_insert.rs | 244 +- support/procedural-fork/src/no_bound/clone.rs | 162 +- support/procedural-fork/src/no_bound/debug.rs | 186 +- .../procedural-fork/src/no_bound/default.rs | 283 ++- support/procedural-fork/src/no_bound/ord.rs | 96 +- .../src/no_bound/partial_eq.rs | 214 +- .../src/no_bound/partial_ord.rs | 119 +- .../procedural-fork/src/pallet/expand/call.rs | 874 ++++--- .../src/pallet/expand/composite.rs | 20 +- .../src/pallet/expand/config.rs | 120 +- .../src/pallet/expand/constants.rs | 172 +- .../src/pallet/expand/doc_only.rs | 152 +- .../src/pallet/expand/documentation.rs | 193 +- .../src/pallet/expand/error.rs | 298 ++- .../src/pallet/expand/event.rs | 303 ++- .../src/pallet/expand/genesis_build.rs | 50 +- .../src/pallet/expand/genesis_config.rs | 239 +- .../src/pallet/expand/hooks.rs | 588 +++-- .../src/pallet/expand/inherent.rs | 59 +- .../src/pallet/expand/instances.rs | 32 +- .../procedural-fork/src/pallet/expand/mod.rs | 124 +- .../src/pallet/expand/origin.rs | 59 +- .../src/pallet/expand/pallet_struct.rs | 524 ++-- .../src/pallet/expand/storage.rs | 1415 ++++++----- .../src/pallet/expand/tasks.rs | 308 ++- .../src/pallet/expand/tt_default_parts.rs | 394 ++- .../src/pallet/expand/type_value.rs | 90 +- .../src/pallet/expand/validate_unsigned.rs | 60 +- .../src/pallet/expand/warnings.rs | 111 +- support/procedural-fork/src/pallet/mod.rs | 38 +- .../procedural-fork/src/pallet/parse/call.rs | 791 +++--- .../src/pallet/parse/composite.rs | 330 ++- .../src/pallet/parse/config.rs | 983 ++++---- .../procedural-fork/src/pallet/parse/error.rs | 148 +- .../procedural-fork/src/pallet/parse/event.rs | 206 +- .../src/pallet/parse/extra_constants.rs | 240 +- .../src/pallet/parse/genesis_build.rs | 69 +- .../src/pallet/parse/genesis_config.rs | 81 +- .../src/pallet/parse/helper.rs | 932 ++++---- .../procedural-fork/src/pallet/parse/hooks.rs | 104 +- .../src/pallet/parse/inherent.rs | 68 +- .../procedural-fork/src/pallet/parse/mod.rs | 1158 +++++---- .../src/pallet/parse/origin.rs | 80 +- .../src/pallet/parse/pallet_struct.rs | 220 +- .../src/pallet/parse/storage.rs | 1741 +++++++------- .../procedural-fork/src/pallet/parse/tasks.rs | 1481 ++++++------ .../src/pallet/parse/tests/mod.rs | 146 +- .../src/pallet/parse/tests/tasks.rs | 372 +-- .../src/pallet/parse/type_value.rs | 176 +- .../src/pallet/parse/validate_unsigned.rs | 64 +- support/procedural-fork/src/pallet_error.rs | 307 ++- .../procedural-fork/src/runtime/expand/mod.rs | 570 +++-- support/procedural-fork/src/runtime/mod.rs | 32 +- .../src/runtime/parse/helper.rs | 29 +- .../procedural-fork/src/runtime/parse/mod.rs | 345 ++- .../src/runtime/parse/pallet.rs | 143 +- .../src/runtime/parse/pallet_decl.rs | 77 +- .../src/runtime/parse/runtime_struct.rs | 25 +- .../src/runtime/parse/runtime_types.rs | 90 +- support/procedural-fork/src/storage_alias.rs | 1211 +++++----- support/procedural-fork/src/transactional.rs | 76 +- support/procedural-fork/src/tt_macro.rs | 82 +- 83 files changed, 13983 insertions(+), 14850 deletions(-) diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs index a47d175af..0a62c3f92 100644 --- a/support/procedural-fork/src/benchmark.rs +++ b/support/procedural-fork/src/benchmark.rs @@ -23,380 +23,343 @@ use proc_macro::TokenStream; use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use quote::{quote, ToTokens}; use syn::{ - parse::{Nothing, ParseStream}, - parse_quote, - punctuated::Punctuated, - spanned::Spanned, - token::{Comma, Gt, Lt, PathSep}, - Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, - PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, - Visibility, WhereClause, + parse::{Nothing, ParseStream}, + parse_quote, + punctuated::Punctuated, + spanned::Spanned, + token::{Comma, Gt, Lt, PathSep}, + Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, + PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, + Visibility, WhereClause, }; mod keywords { - use syn::custom_keyword; - - custom_keyword!(benchmark); - custom_keyword!(benchmarks); - custom_keyword!(block); - custom_keyword!(extra); - custom_keyword!(pov_mode); - custom_keyword!(extrinsic_call); - custom_keyword!(skip_meta); - custom_keyword!(BenchmarkError); - custom_keyword!(Result); - custom_keyword!(MaxEncodedLen); - custom_keyword!(Measured); - custom_keyword!(Ignored); - - pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); - pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); + use syn::custom_keyword; + + custom_keyword!(benchmark); + custom_keyword!(benchmarks); + custom_keyword!(block); + custom_keyword!(extra); + custom_keyword!(pov_mode); + custom_keyword!(extrinsic_call); + custom_keyword!(skip_meta); + custom_keyword!(BenchmarkError); + custom_keyword!(Result); + custom_keyword!(MaxEncodedLen); + custom_keyword!(Measured); + custom_keyword!(Ignored); + + pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); + pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); } /// This represents the raw parsed data for a param definition such as `x: Linear<10, 20>`. #[derive(Clone)] struct ParamDef { - name: String, - _typ: Type, - start: syn::GenericArgument, - end: syn::GenericArgument, + name: String, + _typ: Type, + start: syn::GenericArgument, + end: syn::GenericArgument, } /// Allows easy parsing of the `<10, 20>` component of `x: Linear<10, 20>`. #[derive(Parse)] struct RangeArgs { - _lt_token: Lt, - start: syn::GenericArgument, - _comma: Comma, - end: syn::GenericArgument, - _trailing_comma: Option, - _gt_token: Gt, + _lt_token: Lt, + start: syn::GenericArgument, + _comma: Comma, + end: syn::GenericArgument, + _trailing_comma: Option, + _gt_token: Gt, } #[derive(Clone, Debug)] struct BenchmarkAttrs { - skip_meta: bool, - extra: bool, - pov_mode: Option, + skip_meta: bool, + extra: bool, + pov_mode: Option, } /// Represents a single benchmark option enum BenchmarkAttr { - Extra, - SkipMeta, - /// How the PoV should be measured. - PoV(PovModeAttr), + Extra, + SkipMeta, + /// How the PoV should be measured. + PoV(PovModeAttr), } impl syn::parse::Parse for PovModeAttr { - fn parse(input: ParseStream) -> Result { - let _pov: keywords::pov_mode = input.parse()?; - let _eq: Token![=] = input.parse()?; - let root = PovEstimationMode::parse(input)?; - - let mut maybe_content = None; - let _ = || -> Result<()> { - let content; - syn::braced!(content in input); - maybe_content = Some(content); - Ok(()) - }(); - - let per_key = match maybe_content { - Some(content) => { - let per_key = Punctuated::::parse_terminated(&content)?; - per_key.into_iter().collect() - } - None => Vec::new(), - }; - - Ok(Self { root, per_key }) - } + fn parse(input: ParseStream) -> Result { + let _pov: keywords::pov_mode = input.parse()?; + let _eq: Token![=] = input.parse()?; + let root = PovEstimationMode::parse(input)?; + + let mut maybe_content = None; + let _ = || -> Result<()> { + let content; + syn::braced!(content in input); + maybe_content = Some(content); + Ok(()) + }(); + + let per_key = match maybe_content { + Some(content) => { + let per_key = Punctuated::::parse_terminated(&content)?; + per_key.into_iter().collect() + }, + None => Vec::new(), + }; + + Ok(Self { root, per_key }) + } } impl syn::parse::Parse for BenchmarkAttr { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::extra) { - let _extra: keywords::extra = input.parse()?; - Ok(BenchmarkAttr::Extra) - } else if lookahead.peek(keywords::skip_meta) { - let _skip_meta: keywords::skip_meta = input.parse()?; - Ok(BenchmarkAttr::SkipMeta) - } else if lookahead.peek(keywords::pov_mode) { - PovModeAttr::parse(input).map(BenchmarkAttr::PoV) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::extra) { + let _extra: keywords::extra = input.parse()?; + Ok(BenchmarkAttr::Extra) + } else if lookahead.peek(keywords::skip_meta) { + let _skip_meta: keywords::skip_meta = input.parse()?; + Ok(BenchmarkAttr::SkipMeta) + } else if lookahead.peek(keywords::pov_mode) { + PovModeAttr::parse(input).map(BenchmarkAttr::PoV) + } else { + Err(lookahead.error()) + } + } } /// A `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone)] struct PovModeAttr { - /// The root mode for this benchmarks. - root: PovEstimationMode, - /// The pov-mode for a specific key. This overwrites `root` for this key. - per_key: Vec, + /// The root mode for this benchmarks. + root: PovEstimationMode, + /// The pov-mode for a specific key. This overwrites `root` for this key. + per_key: Vec, } /// A single key-value pair inside the `{}` of a `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone, derive_syn_parse::Parse)] struct PovModeKeyAttr { - /// A specific storage key for which to set the PoV mode. - key: Path, - _underscore: Token![:], - /// The PoV mode for this key. - mode: PovEstimationMode, + /// A specific storage key for which to set the PoV mode. + key: Path, + _underscore: Token![:], + /// The PoV mode for this key. + mode: PovEstimationMode, } /// How the PoV should be estimated. #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum PovEstimationMode { - /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. - MaxEncodedLen, - /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. - Measured, - /// Do not estimate the PoV size for this storage item or benchmark. - Ignored, + /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. + MaxEncodedLen, + /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. + Measured, + /// Do not estimate the PoV size for this storage item or benchmark. + Ignored, } impl syn::parse::Parse for PovEstimationMode { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::MaxEncodedLen) { - let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; - Ok(PovEstimationMode::MaxEncodedLen) - } else if lookahead.peek(keywords::Measured) { - let _measured: keywords::Measured = input.parse()?; - return Ok(PovEstimationMode::Measured); - } else if lookahead.peek(keywords::Ignored) { - let _ignored: keywords::Ignored = input.parse()?; - return Ok(PovEstimationMode::Ignored); - } else { - return Err(lookahead.error()); - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::MaxEncodedLen) { + let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; + return Ok(PovEstimationMode::MaxEncodedLen) + } else if lookahead.peek(keywords::Measured) { + let _measured: keywords::Measured = input.parse()?; + return Ok(PovEstimationMode::Measured) + } else if lookahead.peek(keywords::Ignored) { + let _ignored: keywords::Ignored = input.parse()?; + return Ok(PovEstimationMode::Ignored) + } else { + return Err(lookahead.error()) + } + } } impl ToString for PovEstimationMode { - fn to_string(&self) -> String { - match self { - PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), - PovEstimationMode::Measured => "Measured".into(), - PovEstimationMode::Ignored => "Ignored".into(), - } - } + fn to_string(&self) -> String { + match self { + PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), + PovEstimationMode::Measured => "Measured".into(), + PovEstimationMode::Ignored => "Ignored".into(), + } + } } impl quote::ToTokens for PovEstimationMode { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), - PovEstimationMode::Measured => tokens.extend(quote!(Measured)), - PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), + PovEstimationMode::Measured => tokens.extend(quote!(Measured)), + PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), + } + } } impl syn::parse::Parse for BenchmarkAttrs { - fn parse(input: ParseStream) -> syn::Result { - let mut extra = false; - let mut skip_meta = false; - let mut pov_mode = None; - let args = Punctuated::::parse_terminated(input)?; - - for arg in args.into_iter() { - match arg { - BenchmarkAttr::Extra => { - if extra { - return Err(input.error("`extra` can only be specified once")); - } - extra = true; - } - BenchmarkAttr::SkipMeta => { - if skip_meta { - return Err(input.error("`skip_meta` can only be specified once")); - } - skip_meta = true; - } - BenchmarkAttr::PoV(mode) => { - if pov_mode.is_some() { - return Err(input.error("`pov_mode` can only be specified once")); - } - pov_mode = Some(mode); - } - } - } - Ok(BenchmarkAttrs { - extra, - skip_meta, - pov_mode, - }) - } + fn parse(input: ParseStream) -> syn::Result { + let mut extra = false; + let mut skip_meta = false; + let mut pov_mode = None; + let args = Punctuated::::parse_terminated(&input)?; + + for arg in args.into_iter() { + match arg { + BenchmarkAttr::Extra => { + if extra { + return Err(input.error("`extra` can only be specified once")) + } + extra = true; + }, + BenchmarkAttr::SkipMeta => { + if skip_meta { + return Err(input.error("`skip_meta` can only be specified once")) + } + skip_meta = true; + }, + BenchmarkAttr::PoV(mode) => { + if pov_mode.is_some() { + return Err(input.error("`pov_mode` can only be specified once")) + } + pov_mode = Some(mode); + }, + } + } + Ok(BenchmarkAttrs { extra, skip_meta, pov_mode }) + } } /// Represents the parsed extrinsic call for a benchmark #[derive(Clone)] enum BenchmarkCallDef { - ExtrinsicCall { - origin: Expr, - expr_call: ExprCall, - attr_span: Span, - }, // #[extrinsic_call] - Block { - block: ExprBlock, - attr_span: Span, - }, // #[block] + ExtrinsicCall { origin: Expr, expr_call: ExprCall, attr_span: Span }, // #[extrinsic_call] + Block { block: ExprBlock, attr_span: Span }, // #[block] } impl BenchmarkCallDef { - /// Returns the `span()` for attribute - fn attr_span(&self) -> Span { - match self { - BenchmarkCallDef::ExtrinsicCall { - origin: _, - expr_call: _, - attr_span, - } => *attr_span, - BenchmarkCallDef::Block { - block: _, - attr_span, - } => *attr_span, - } - } + /// Returns the `span()` for attribute + fn attr_span(&self) -> Span { + match self { + BenchmarkCallDef::ExtrinsicCall { origin: _, expr_call: _, attr_span } => *attr_span, + BenchmarkCallDef::Block { block: _, attr_span } => *attr_span, + } + } } /// Represents a parsed `#[benchmark]` or `#[instance_benchmark]` item. #[derive(Clone)] struct BenchmarkDef { - params: Vec, - setup_stmts: Vec, - call_def: BenchmarkCallDef, - verify_stmts: Vec, - last_stmt: Option, - fn_sig: Signature, - fn_vis: Visibility, - fn_attrs: Vec, + params: Vec, + setup_stmts: Vec, + call_def: BenchmarkCallDef, + verify_stmts: Vec, + last_stmt: Option, + fn_sig: Signature, + fn_vis: Visibility, + fn_attrs: Vec, } /// used to parse something compatible with `Result` #[derive(Parse)] struct ResultDef { - _result_kw: keywords::Result, - _lt: Token![<], - unit: Type, - _comma: Comma, - e_type: TypePath, - _gt: Token![>], + _result_kw: keywords::Result, + _lt: Token![<], + unit: Type, + _comma: Comma, + e_type: TypePath, + _gt: Token![>], } /// Ensures that `ReturnType` is a `Result<(), BenchmarkError>`, if specified fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { - if let ReturnType::Type(_, typ) = &item_fn.sig.output { - let non_unit = |span| Err(Error::new(span, "expected `()`")); - let Type::Path(TypePath { path, qself: _ }) = &**typ else { - return Err(Error::new( + if let ReturnType::Type(_, typ) = &item_fn.sig.output { + let non_unit = |span| return Err(Error::new(span, "expected `()`")); + let Type::Path(TypePath { path, qself: _ }) = &**typ else { + return Err(Error::new( typ.span(), "Only `Result<(), BenchmarkError>` or a blank return type is allowed on benchmark function definitions", - )); - }; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - let res: ResultDef = syn::parse2(seg.to_token_stream())?; - // ensure T in Result is () - let Type::Tuple(tup) = res.unit else { - return non_unit(res.unit.span()); - }; - if !tup.elems.is_empty() { - return non_unit(tup.span()); - } - let TypePath { path, qself: _ } = res.e_type; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - syn::parse2::(seg.to_token_stream())?; - } - Ok(()) + )) + }; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + let res: ResultDef = syn::parse2(seg.to_token_stream())?; + // ensure T in Result is () + let Type::Tuple(tup) = res.unit else { return non_unit(res.unit.span()) }; + if !tup.elems.is_empty() { + return non_unit(tup.span()) + } + let TypePath { path, qself: _ } = res.e_type; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + syn::parse2::(seg.to_token_stream())?; + } + Ok(()) } /// Parses params such as `x: Linear<0, 1>` fn parse_params(item_fn: &ItemFn) -> Result> { - let mut params: Vec = Vec::new(); - for arg in &item_fn.sig.inputs { - let invalid_param = |span| { - Err(Error::new( - span, - "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", - )) - }; - - let FnArg::Typed(arg) = arg else { - return invalid_param(arg.span()); - }; - let Pat::Ident(ident) = &*arg.pat else { - return invalid_param(arg.span()); - }; - - // check param name - let var_span = ident.span(); - let invalid_param_name = || { - Err(Error::new( + let mut params: Vec = Vec::new(); + for arg in &item_fn.sig.inputs { + let invalid_param = |span| { + return Err(Error::new( + span, + "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", + )) + }; + + let FnArg::Typed(arg) = arg else { return invalid_param(arg.span()) }; + let Pat::Ident(ident) = &*arg.pat else { return invalid_param(arg.span()) }; + + // check param name + let var_span = ident.span(); + let invalid_param_name = || { + return Err(Error::new( var_span, "Benchmark parameter names must consist of a single lowercase letter (a-z) and no other characters.", - )) - }; - let name = ident.ident.to_token_stream().to_string(); - if name.len() > 1 { - return invalid_param_name(); - }; - let Some(name_char) = name.chars().next() else { - return invalid_param_name(); - }; - if !name_char.is_alphabetic() || !name_char.is_lowercase() { - return invalid_param_name(); - } - - // parse type - let typ = &*arg.ty; - let Type::Path(tpath) = typ else { - return invalid_param(typ.span()); - }; - let Some(segment) = tpath.path.segments.last() else { - return invalid_param(typ.span()); - }; - let args = segment.arguments.to_token_stream().into(); - let Ok(args) = syn::parse::(args) else { - return invalid_param(typ.span()); - }; - - params.push(ParamDef { - name, - _typ: typ.clone(), - start: args.start, - end: args.end, - }); - } - Ok(params) + )); + }; + let name = ident.ident.to_token_stream().to_string(); + if name.len() > 1 { + return invalid_param_name() + }; + let Some(name_char) = name.chars().next() else { return invalid_param_name() }; + if !name_char.is_alphabetic() || !name_char.is_lowercase() { + return invalid_param_name() + } + + // parse type + let typ = &*arg.ty; + let Type::Path(tpath) = typ else { return invalid_param(typ.span()) }; + let Some(segment) = tpath.path.segments.last() else { return invalid_param(typ.span()) }; + let args = segment.arguments.to_token_stream().into(); + let Ok(args) = syn::parse::(args) else { return invalid_param(typ.span()) }; + + params.push(ParamDef { name, _typ: typ.clone(), start: args.start, end: args.end }); + } + Ok(params) } /// Used in several places where the `#[extrinsic_call]` or `#[body]` annotation is missing fn missing_call(item_fn: &ItemFn) -> Result { - Err(Error::new( + return Err(Error::new( item_fn.block.brace_token.span.join(), "No valid #[extrinsic_call] or #[block] annotation could be found in benchmark function body." - )) + )); } /// Finds the `BenchmarkCallDef` and its index (within the list of stmts for the fn) and /// returns them. Also handles parsing errors for invalid / extra call defs. AKA this is /// general handling for `#[extrinsic_call]` and `#[block]` fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { - // #[extrinsic_call] / #[block] handling - let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { + // #[extrinsic_call] / #[block] handling + let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { if let Stmt::Expr(Expr::Call(expr_call), _semi) = child { // #[extrinsic_call] case expr_call.attrs.iter().enumerate().find_map(|(k, attr)| { @@ -430,850 +393,810 @@ fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { None } }).collect::>>()?; - Ok(match &call_defs[..] { - [(i, call_def)] => (*i, call_def.clone()), // = 1 - [] => return missing_call(item_fn), - _ => { - return Err(Error::new( - call_defs[1].1.attr_span(), - "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", - )) - } - }) + Ok(match &call_defs[..] { + [(i, call_def)] => (*i, call_def.clone()), // = 1 + [] => return missing_call(item_fn), + _ => + return Err(Error::new( + call_defs[1].1.attr_span(), + "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", + )), + }) } impl BenchmarkDef { - /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. - pub fn from(item_fn: &ItemFn) -> Result { - let params = parse_params(item_fn)?; - ensure_valid_return_type(item_fn)?; - let (i, call_def) = parse_call_def(item_fn)?; - - let (verify_stmts, last_stmt) = match item_fn.sig.output { - ReturnType::Default => - // no return type, last_stmt should be None - { - ( - Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), - None, - ) - } - ReturnType::Type(_, _) => { - // defined return type, last_stmt should be Result<(), BenchmarkError> - // compatible and should not be included in verify_stmts - if i + 1 >= item_fn.block.stmts.len() { - return Err(Error::new( - item_fn.block.span(), - "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ + /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. + pub fn from(item_fn: &ItemFn) -> Result { + let params = parse_params(item_fn)?; + ensure_valid_return_type(item_fn)?; + let (i, call_def) = parse_call_def(&item_fn)?; + + let (verify_stmts, last_stmt) = match item_fn.sig.output { + ReturnType::Default => + // no return type, last_stmt should be None + (Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), None), + ReturnType::Type(_, _) => { + // defined return type, last_stmt should be Result<(), BenchmarkError> + // compatible and should not be included in verify_stmts + if i + 1 >= item_fn.block.stmts.len() { + return Err(Error::new( + item_fn.block.span(), + "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ last statement of your benchmark function definition if you have \ defined a return type. You should return something compatible \ with Result<(), BenchmarkError> (i.e. `Ok(())`) as the last statement \ or change your signature to a blank return type.", - )); - } - let Some(stmt) = item_fn.block.stmts.last() else { - return missing_call(item_fn); - }; - ( - Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), - Some(stmt.clone()), - ) - } - }; - - Ok(BenchmarkDef { - params, - setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), - call_def, - verify_stmts, - last_stmt, - fn_sig: item_fn.sig.clone(), - fn_vis: item_fn.vis.clone(), - fn_attrs: item_fn.attrs.clone(), - }) - } + )) + } + let Some(stmt) = item_fn.block.stmts.last() else { return missing_call(item_fn) }; + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), + Some(stmt.clone()), + ) + }, + }; + + Ok(BenchmarkDef { + params, + setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), + call_def, + verify_stmts, + last_stmt, + fn_sig: item_fn.sig.clone(), + fn_vis: item_fn.vis.clone(), + fn_attrs: item_fn.attrs.clone(), + }) + } } /// Parses and expands a `#[benchmarks]` or `#[instance_benchmarks]` invocation pub fn benchmarks( - attrs: TokenStream, - tokens: TokenStream, - instance: bool, + attrs: TokenStream, + tokens: TokenStream, + instance: bool, ) -> syn::Result { - let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; - // gather module info - let module: ItemMod = syn::parse(tokens)?; - let mod_span = module.span(); - let where_clause = match syn::parse::(attrs.clone()) { - Ok(_) => quote!(), - Err(_) => syn::parse::(attrs)? - .predicates - .to_token_stream(), - }; - let mod_vis = module.vis; - let mod_name = module.ident; - - // consume #[benchmarks] attribute by excluding it from mod_attrs - let mod_attrs: Vec<&Attribute> = module - .attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) - .collect(); - - let mut benchmark_names: Vec = Vec::new(); - let mut extra_benchmark_names: Vec = Vec::new(); - let mut skip_meta_benchmark_names: Vec = Vec::new(); - // Map benchmarks to PoV modes. - let mut pov_modes = Vec::new(); - - let (_brace, mut content) = module - .content - .ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; - - // find all function defs marked with #[benchmark] - let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { - // parse as a function def first - let Item::Fn(func) = stmt else { return None }; - - // find #[benchmark] attribute on function def - let benchmark_attr = func - .attrs - .iter() - .find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; - - Some((benchmark_attr.clone(), func.clone(), stmt)) - }); - - // parse individual benchmark defs and args - for (benchmark_attr, func, stmt) in benchmark_fn_metas { - // parse benchmark def - let benchmark_def = BenchmarkDef::from(&func)?; - - // record benchmark name - let name = &func.sig.ident; - benchmark_names.push(name.clone()); - - // Check if we need to parse any args - if benchmark_attr.meta.require_path_only().is_err() { - // parse any args provided to #[benchmark] - let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; - - // record name sets - if benchmark_attrs.extra { - extra_benchmark_names.push(name.clone()); - } else if benchmark_attrs.skip_meta { - skip_meta_benchmark_names.push(name.clone()); - } - - if let Some(mode) = benchmark_attrs.pov_mode { - let mut modes = Vec::new(); - // We cannot expand strings here since it is no-std, but syn does not expand bytes. - let name = name.to_string(); - let m = mode.root.to_string(); - modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); - - for attr in mode.per_key.iter() { - // syn always puts spaces in quoted paths: - let key = attr - .key - .clone() - .into_token_stream() - .to_string() - .replace(" ", ""); - let mode = attr.mode.to_string(); - modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); - } - - pov_modes.push( - quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), - ); - } - } - - // expand benchmark - let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); - - // replace original function def with expanded code - *stmt = Item::Verbatim(expanded); - } - - // generics - let type_use_generics = match instance { - false => quote!(T), - true => quote!(T, I), - }; - let type_impl_generics = match instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - - // benchmark name variables - let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); - let extra_benchmark_names_str: Vec = extra_benchmark_names - .iter() - .map(|n| n.to_string()) - .collect(); - let skip_meta_benchmark_names_str: Vec = skip_meta_benchmark_names - .iter() - .map(|n| n.to_string()) - .collect(); - let mut selected_benchmark_mappings: Vec = Vec::new(); - let mut benchmarks_by_name_mappings: Vec = Vec::new(); - let test_idents: Vec = benchmark_names_str - .iter() - .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) - .collect(); - for i in 0..benchmark_names.len() { - let name_ident = &benchmark_names[i]; - let name_str = &benchmark_names_str[i]; - let test_ident = &test_idents[i]; - selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); - benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) - } - - let impl_test_function = content - .iter_mut() - .find_map(|item| { - let Item::Macro(item_macro) = item else { - return None; - }; - - if !item_macro - .mac - .path - .segments - .iter() - .any(|s| s.ident == "impl_benchmark_test_suite") - { - return None; - } - - let tokens = item_macro.mac.tokens.clone(); - *item = Item::Verbatim(quote! {}); - - Some(quote! { - impl_test_function!( - (#( {} #benchmark_names )*) - (#( #extra_benchmark_names )*) - (#( #skip_meta_benchmark_names )*) - #tokens - ); - }) - }) - .unwrap_or(quote! {}); - - // emit final quoted tokens - let res = quote! { - #(#mod_attrs) - * - #mod_vis mod #mod_name { - #(#content) - * - - #[allow(non_camel_case_types)] - enum SelectedBenchmark { - #(#benchmark_names), - * - } - - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) - } - ) - * - } - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool, - ) -> Result< - #krate::__private::Box Result<(), #krate::BenchmarkError>>, - #krate::BenchmarkError, - > { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup< - #type_use_generics - >>::instance(&#benchmark_names, components, verify) - } - ) - * - } - } - } - #[cfg(any(feature = "runtime-benchmarks", test))] - impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> - where T: #frame_system::Config, #where_clause - { - fn benchmarks( - extra: bool, - ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { - let mut all_names = #krate::__private::vec![ - #(#benchmark_names_str), - * - ]; - if !extra { - let extra = [ - #(#extra_benchmark_names_str), - * - ]; - all_names.retain(|x| !extra.contains(x)); - } - let pov_modes: - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec - )>, - )> = #krate::__private::vec![ - #( #pov_modes ),* - ]; - all_names.into_iter().map(|benchmark| { - let selected_benchmark = match benchmark { - #(#selected_benchmark_mappings), - *, - _ => panic!("all benchmarks should be selectable") - }; - let components = >::components(&selected_benchmark); - let name = benchmark.as_bytes().to_vec(); - let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); - - #krate::BenchmarkMetadata { - name: benchmark.as_bytes().to_vec(), - components, - pov_modes: modes.unwrap_or_default(), - } - }).collect::<#krate::__private::Vec<_>>() - } - - fn run_benchmark( - extrinsic: &[u8], - c: &[(#krate::BenchmarkParameter, u32)], - whitelist: &[#krate::__private::TrackedStorageKey], - verify: bool, - internal_repeats: u32, - ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { - let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; - let selected_benchmark = match extrinsic { - #(#selected_benchmark_mappings), - *, - _ => return Err("Could not find extrinsic.".into()), - }; - let mut whitelist = whitelist.to_vec(); - let whitelisted_caller_key = <#frame_system::Account< - T, - > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( - #krate::whitelisted_caller::() - ); - whitelist.push(whitelisted_caller_key.into()); - let transactional_layer_key = #krate::__private::TrackedStorageKey::new( - #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), - ); - whitelist.push(transactional_layer_key); - // Whitelist the `:extrinsic_index`. - let extrinsic_index = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() - ); - whitelist.push(extrinsic_index); - // Whitelist the `:intrablock_entropy`. - let intrablock_entropy = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() - ); - whitelist.push(intrablock_entropy); - - #krate::benchmarking::set_whitelist(whitelist.clone()); - let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); - - // Always do at least one internal repeat... - for _ in 0 .. internal_repeats.max(1) { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the externalities environment for the setup we want to - // benchmark. - let closure_to_benchmark = < - SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> - >::instance(&selected_benchmark, c, verify)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Commit the externalities to the database, flushing the DB cache. - // This will enable worst case scenario for reading from the database. - #krate::benchmarking::commit_db(); - - // Access all whitelisted keys to get them into the proof recorder since the - // recorder does now have a whitelist. - for key in &whitelist { - #krate::__private::storage::unhashed::get_raw(&key.key); - } - - // Reset the read/write counter so we don't count operations in the setup process. - #krate::benchmarking::reset_read_write_count(); - - // Time the extrinsic logic. - #krate::__private::log::trace!( - target: "benchmark", - "Start Benchmark: {} ({:?})", - extrinsic, - c - ); - - let start_pov = #krate::benchmarking::proof_size(); - let start_extrinsic = #krate::benchmarking::current_time(); - - closure_to_benchmark()?; - - let finish_extrinsic = #krate::benchmarking::current_time(); - let end_pov = #krate::benchmarking::proof_size(); - - // Calculate the diff caused by the benchmark. - let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); - let diff_pov = match (start_pov, end_pov) { - (Some(start), Some(end)) => end.saturating_sub(start), - _ => Default::default(), - }; - - // Commit the changes to get proper write count - #krate::benchmarking::commit_db(); - #krate::__private::log::trace!( - target: "benchmark", - "End Benchmark: {} ns", elapsed_extrinsic - ); - let read_write_count = #krate::benchmarking::read_write_count(); - #krate::__private::log::trace!( - target: "benchmark", - "Read/Write Count {:?}", read_write_count - ); - - // Time the storage root recalculation. - let start_storage_root = #krate::benchmarking::current_time(); - #krate::__private::storage_root(#krate::__private::StateVersion::V1); - let finish_storage_root = #krate::benchmarking::current_time(); - let elapsed_storage_root = finish_storage_root - start_storage_root; - - let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; - let read_and_written_keys = if skip_meta.contains(&extrinsic) { - #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] - } else { - #krate::benchmarking::get_read_and_written_keys() - }; - - results.push(#krate::BenchmarkResult { - components: c.to_vec(), - extrinsic_time: elapsed_extrinsic, - storage_root_time: elapsed_storage_root, - reads: read_write_count.0, - repeat_reads: read_write_count.1, - writes: read_write_count.2, - repeat_writes: read_write_count.3, - proof_size: diff_pov, - keys: read_and_written_keys, - }); - } - - return Ok(results); - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - /// Test a particular benchmark by name. - /// - /// This isn't called `test_benchmark_by_name` just in case some end-user eventually - /// writes a benchmark, itself called `by_name`; the function would be shadowed in - /// that case. - /// - /// This is generally intended to be used by child test modules such as those created - /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet - /// author chooses not to implement benchmarks. - #[allow(unused)] - fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { - let name = #krate::__private::str::from_utf8(name) - .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; - match name { - #(#benchmarks_by_name_mappings), - *, - _ => Err("Could not find test for requested benchmark.".into()), - } - } - } - - #impl_test_function - } - #mod_vis use #mod_name::*; - }; - Ok(res.into()) + let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; + // gather module info + let module: ItemMod = syn::parse(tokens)?; + let mod_span = module.span(); + let where_clause = match syn::parse::(attrs.clone()) { + Ok(_) => quote!(), + Err(_) => syn::parse::(attrs)?.predicates.to_token_stream(), + }; + let mod_vis = module.vis; + let mod_name = module.ident; + + // consume #[benchmarks] attribute by excluding it from mod_attrs + let mod_attrs: Vec<&Attribute> = module + .attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) + .collect(); + + let mut benchmark_names: Vec = Vec::new(); + let mut extra_benchmark_names: Vec = Vec::new(); + let mut skip_meta_benchmark_names: Vec = Vec::new(); + // Map benchmarks to PoV modes. + let mut pov_modes = Vec::new(); + + let (_brace, mut content) = + module.content.ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; + + // find all function defs marked with #[benchmark] + let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { + // parse as a function def first + let Item::Fn(func) = stmt else { return None }; + + // find #[benchmark] attribute on function def + let benchmark_attr = + func.attrs.iter().find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; + + Some((benchmark_attr.clone(), func.clone(), stmt)) + }); + + // parse individual benchmark defs and args + for (benchmark_attr, func, stmt) in benchmark_fn_metas { + // parse benchmark def + let benchmark_def = BenchmarkDef::from(&func)?; + + // record benchmark name + let name = &func.sig.ident; + benchmark_names.push(name.clone()); + + // Check if we need to parse any args + if benchmark_attr.meta.require_path_only().is_err() { + // parse any args provided to #[benchmark] + let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; + + // record name sets + if benchmark_attrs.extra { + extra_benchmark_names.push(name.clone()); + } else if benchmark_attrs.skip_meta { + skip_meta_benchmark_names.push(name.clone()); + } + + if let Some(mode) = benchmark_attrs.pov_mode { + let mut modes = Vec::new(); + // We cannot expand strings here since it is no-std, but syn does not expand bytes. + let name = name.to_string(); + let m = mode.root.to_string(); + modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); + + for attr in mode.per_key.iter() { + // syn always puts spaces in quoted paths: + let key = attr.key.clone().into_token_stream().to_string().replace(" ", ""); + let mode = attr.mode.to_string(); + modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); + } + + pov_modes.push( + quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), + ); + } + } + + // expand benchmark + let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); + + // replace original function def with expanded code + *stmt = Item::Verbatim(expanded); + } + + // generics + let type_use_generics = match instance { + false => quote!(T), + true => quote!(T, I), + }; + let type_impl_generics = match instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + + // benchmark name variables + let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); + let extra_benchmark_names_str: Vec = + extra_benchmark_names.iter().map(|n| n.to_string()).collect(); + let skip_meta_benchmark_names_str: Vec = + skip_meta_benchmark_names.iter().map(|n| n.to_string()).collect(); + let mut selected_benchmark_mappings: Vec = Vec::new(); + let mut benchmarks_by_name_mappings: Vec = Vec::new(); + let test_idents: Vec = benchmark_names_str + .iter() + .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) + .collect(); + for i in 0..benchmark_names.len() { + let name_ident = &benchmark_names[i]; + let name_str = &benchmark_names_str[i]; + let test_ident = &test_idents[i]; + selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); + benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) + } + + let impl_test_function = content + .iter_mut() + .find_map(|item| { + let Item::Macro(item_macro) = item else { + return None; + }; + + if !item_macro + .mac + .path + .segments + .iter() + .any(|s| s.ident == "impl_benchmark_test_suite") + { + return None; + } + + let tokens = item_macro.mac.tokens.clone(); + *item = Item::Verbatim(quote! {}); + + Some(quote! { + impl_test_function!( + (#( {} #benchmark_names )*) + (#( #extra_benchmark_names )*) + (#( #skip_meta_benchmark_names )*) + #tokens + ); + }) + }) + .unwrap_or(quote! {}); + + // emit final quoted tokens + let res = quote! { + #(#mod_attrs) + * + #mod_vis mod #mod_name { + #(#content) + * + + #[allow(non_camel_case_types)] + enum SelectedBenchmark { + #(#benchmark_names), + * + } + + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) + } + ) + * + } + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool, + ) -> Result< + #krate::__private::Box Result<(), #krate::BenchmarkError>>, + #krate::BenchmarkError, + > { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup< + #type_use_generics + >>::instance(&#benchmark_names, components, verify) + } + ) + * + } + } + } + #[cfg(any(feature = "runtime-benchmarks", test))] + impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> + where T: #frame_system::Config, #where_clause + { + fn benchmarks( + extra: bool, + ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { + let mut all_names = #krate::__private::vec![ + #(#benchmark_names_str), + * + ]; + if !extra { + let extra = [ + #(#extra_benchmark_names_str), + * + ]; + all_names.retain(|x| !extra.contains(x)); + } + let pov_modes: + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec + )>, + )> = #krate::__private::vec![ + #( #pov_modes ),* + ]; + all_names.into_iter().map(|benchmark| { + let selected_benchmark = match benchmark { + #(#selected_benchmark_mappings), + *, + _ => panic!("all benchmarks should be selectable") + }; + let components = >::components(&selected_benchmark); + let name = benchmark.as_bytes().to_vec(); + let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); + + #krate::BenchmarkMetadata { + name: benchmark.as_bytes().to_vec(), + components, + pov_modes: modes.unwrap_or_default(), + } + }).collect::<#krate::__private::Vec<_>>() + } + + fn run_benchmark( + extrinsic: &[u8], + c: &[(#krate::BenchmarkParameter, u32)], + whitelist: &[#krate::__private::TrackedStorageKey], + verify: bool, + internal_repeats: u32, + ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { + let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; + let selected_benchmark = match extrinsic { + #(#selected_benchmark_mappings), + *, + _ => return Err("Could not find extrinsic.".into()), + }; + let mut whitelist = whitelist.to_vec(); + let whitelisted_caller_key = <#frame_system::Account< + T, + > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( + #krate::whitelisted_caller::() + ); + whitelist.push(whitelisted_caller_key.into()); + let transactional_layer_key = #krate::__private::TrackedStorageKey::new( + #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), + ); + whitelist.push(transactional_layer_key); + // Whitelist the `:extrinsic_index`. + let extrinsic_index = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() + ); + whitelist.push(extrinsic_index); + // Whitelist the `:intrablock_entropy`. + let intrablock_entropy = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() + ); + whitelist.push(intrablock_entropy); + + #krate::benchmarking::set_whitelist(whitelist.clone()); + let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); + + // Always do at least one internal repeat... + for _ in 0 .. internal_repeats.max(1) { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the externalities environment for the setup we want to + // benchmark. + let closure_to_benchmark = < + SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> + >::instance(&selected_benchmark, c, verify)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Commit the externalities to the database, flushing the DB cache. + // This will enable worst case scenario for reading from the database. + #krate::benchmarking::commit_db(); + + // Access all whitelisted keys to get them into the proof recorder since the + // recorder does now have a whitelist. + for key in &whitelist { + #krate::__private::storage::unhashed::get_raw(&key.key); + } + + // Reset the read/write counter so we don't count operations in the setup process. + #krate::benchmarking::reset_read_write_count(); + + // Time the extrinsic logic. + #krate::__private::log::trace!( + target: "benchmark", + "Start Benchmark: {} ({:?})", + extrinsic, + c + ); + + let start_pov = #krate::benchmarking::proof_size(); + let start_extrinsic = #krate::benchmarking::current_time(); + + closure_to_benchmark()?; + + let finish_extrinsic = #krate::benchmarking::current_time(); + let end_pov = #krate::benchmarking::proof_size(); + + // Calculate the diff caused by the benchmark. + let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); + let diff_pov = match (start_pov, end_pov) { + (Some(start), Some(end)) => end.saturating_sub(start), + _ => Default::default(), + }; + + // Commit the changes to get proper write count + #krate::benchmarking::commit_db(); + #krate::__private::log::trace!( + target: "benchmark", + "End Benchmark: {} ns", elapsed_extrinsic + ); + let read_write_count = #krate::benchmarking::read_write_count(); + #krate::__private::log::trace!( + target: "benchmark", + "Read/Write Count {:?}", read_write_count + ); + + // Time the storage root recalculation. + let start_storage_root = #krate::benchmarking::current_time(); + #krate::__private::storage_root(#krate::__private::StateVersion::V1); + let finish_storage_root = #krate::benchmarking::current_time(); + let elapsed_storage_root = finish_storage_root - start_storage_root; + + let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; + let read_and_written_keys = if skip_meta.contains(&extrinsic) { + #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] + } else { + #krate::benchmarking::get_read_and_written_keys() + }; + + results.push(#krate::BenchmarkResult { + components: c.to_vec(), + extrinsic_time: elapsed_extrinsic, + storage_root_time: elapsed_storage_root, + reads: read_write_count.0, + repeat_reads: read_write_count.1, + writes: read_write_count.2, + repeat_writes: read_write_count.3, + proof_size: diff_pov, + keys: read_and_written_keys, + }); + } + + return Ok(results); + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + /// Test a particular benchmark by name. + /// + /// This isn't called `test_benchmark_by_name` just in case some end-user eventually + /// writes a benchmark, itself called `by_name`; the function would be shadowed in + /// that case. + /// + /// This is generally intended to be used by child test modules such as those created + /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet + /// author chooses not to implement benchmarks. + #[allow(unused)] + fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { + let name = #krate::__private::str::from_utf8(name) + .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; + match name { + #(#benchmarks_by_name_mappings), + *, + _ => Err("Could not find test for requested benchmark.".into()), + } + } + } + + #impl_test_function + } + #mod_vis use #mod_name::*; + }; + Ok(res.into()) } /// Prepares a [`Vec`] to be interpolated by [`quote!`] by creating easily-iterable /// arrays formatted in such a way that they can be interpolated directly. struct UnrolledParams { - param_ranges: Vec, - param_names: Vec, + param_ranges: Vec, + param_names: Vec, } impl UnrolledParams { - /// Constructs an [`UnrolledParams`] from a [`Vec`] - fn from(params: &Vec) -> UnrolledParams { - let param_ranges: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - let start = &p.start; - let end = &p.end; - quote!(#name, #start, #end) - }) - .collect(); - let param_names: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - quote!(#name) - }) - .collect(); - UnrolledParams { - param_ranges, - param_names, - } - } + /// Constructs an [`UnrolledParams`] from a [`Vec`] + fn from(params: &Vec) -> UnrolledParams { + let param_ranges: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + let start = &p.start; + let end = &p.end; + quote!(#name, #start, #end) + }) + .collect(); + let param_names: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + quote!(#name) + }) + .collect(); + UnrolledParams { param_ranges, param_names } + } } /// Performs expansion of an already-parsed [`BenchmarkDef`]. fn expand_benchmark( - benchmark_def: BenchmarkDef, - name: &Ident, - is_instance: bool, - where_clause: TokenStream2, + benchmark_def: BenchmarkDef, + name: &Ident, + is_instance: bool, + where_clause: TokenStream2, ) -> TokenStream2 { - // set up variables needed during quoting - let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { - Ok(ident) => ident, - Err(err) => return err.to_compile_error(), - }; - let frame_system = match generate_access_from_frame_or_crate("frame-system") { - Ok(path) => path, - Err(err) => return err.to_compile_error(), - }; - let codec = quote!(#krate::__private::codec); - let traits = quote!(#krate::__private::traits); - let setup_stmts = benchmark_def.setup_stmts; - let verify_stmts = benchmark_def.verify_stmts; - let last_stmt = benchmark_def.last_stmt; - let test_ident = Ident::new( - format!("test_benchmark_{}", name).as_str(), - Span::call_site(), - ); - - // unroll params (prepare for quoting) - let unrolled = UnrolledParams::from(&benchmark_def.params); - let param_names = unrolled.param_names; - let param_ranges = unrolled.param_ranges; - - let type_use_generics = match is_instance { - false => quote!(T), - true => quote!(T, I), - }; - - let type_impl_generics = match is_instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - // used in the benchmarking impls - let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { - BenchmarkCallDef::ExtrinsicCall { - origin, - expr_call, - attr_span: _, - } => { - let mut expr_call = expr_call.clone(); - - // remove first arg from expr_call - let mut final_args = Punctuated::::new(); - let args: Vec<&Expr> = expr_call.args.iter().collect(); - for arg in &args[1..] { - final_args.push((*(*arg)).clone()); - } - expr_call.args = final_args; - - let origin = match origin { - Expr::Cast(t) => { - let ty = t.ty.clone(); - quote! { - <::RuntimeOrigin as From<#ty>>::from(#origin); - } - } - _ => quote! { - #origin.into(); - }, - }; - - // determine call name (handles `_` and normal call syntax) - let expr_span = expr_call.span(); - let call_err = || { - syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") - .to_compile_error() - }; - let call_name = match *expr_call.func { - Expr::Path(expr_path) => { - // normal function call - let Some(segment) = expr_path.path.segments.last() else { - return call_err(); - }; - segment.ident.to_string() - } - Expr::Infer(_) => { - // `_` style - // replace `_` with fn name - name.to_string() - } - _ => return call_err(), - }; - - // modify extrinsic call to be prefixed with "new_call_variant" - let call_name = format!("new_call_variant_{}", call_name); - let mut punct: Punctuated = Punctuated::new(); - punct.push(PathSegment { - arguments: PathArguments::None, - ident: Ident::new(call_name.as_str(), Span::call_site()), - }); - *expr_call.func = Expr::Path(ExprPath { - attrs: vec![], - qself: None, - path: Path { - leading_colon: None, - segments: punct, - }, - }); - let pre_call = quote! { - let __call = Call::<#type_use_generics>::#expr_call; - let __benchmarked_call_encoded = #codec::Encode::encode(&__call); - }; - let post_call = quote! { - let __call_decoded = as #codec::Decode> - ::decode(&mut &__benchmarked_call_encoded[..]) - .expect("call is encoded above, encoding must be correct"); - let __origin = #origin; - as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( - __call_decoded, - __origin, - ) - }; - ( - // (pre_call, post_call, fn_call_body): - pre_call.clone(), - quote!(#post_call?;), - quote! { - #pre_call - #post_call.unwrap(); - }, - ) - } - BenchmarkCallDef::Block { - block, - attr_span: _, - } => (quote!(), quote!(#block), quote!(#block)), - }; - - let vis = benchmark_def.fn_vis; - - // remove #[benchmark] attribute - let fn_attrs = benchmark_def - .fn_attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); - - // modify signature generics, ident, and inputs, e.g: - // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` - // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), - // BenchmarkError>` - let mut sig = benchmark_def.fn_sig; - sig.generics = parse_quote!(<#type_impl_generics>); - if !where_clause.is_empty() { - sig.generics.where_clause = parse_quote!(where #where_clause); - } - sig.ident = Ident::new( - format!("_{}", name.to_token_stream()).as_str(), - Span::call_site(), - ); - let mut fn_param_inputs: Vec = - param_names.iter().map(|name| quote!(#name: u32)).collect(); - fn_param_inputs.push(quote!(verify: bool)); - sig.inputs = parse_quote!(#(#fn_param_inputs),*); - - // used in instance() impl - let impl_last_stmt = match &last_stmt { - Some(stmt) => quote!(#stmt), - None => quote!(Ok(())), - }; - let fn_attrs_clone = fn_attrs.clone(); - - let fn_def = quote! { - #( - #fn_attrs_clone - )* - #vis #sig { - #( - #setup_stmts - )* - #fn_call_body - if verify { - #( - #verify_stmts - )* - } - #last_stmt - } - }; - - // generate final quoted tokens - let res = quote! { - // benchmark function definition - #fn_def - - #[allow(non_camel_case_types)] - #( - #fn_attrs - )* - struct #name; - - #[allow(unused_variables)] - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> - for #name where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - #krate::__private::vec! [ - #( - (#krate::BenchmarkParameter::#param_ranges) - ),* - ] - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool - ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { - #( - // prepare instance #param_names - let #param_names = components.iter() - .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) - .ok_or("Could not find component during benchmark preparation.")? - .1; - )* - - // benchmark setup code - #( - #setup_stmts - )* - #pre_call - Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { - #post_call - if verify { - #( - #verify_stmts - )* - } - #impl_last_stmt - })) - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - #[allow(unused)] - fn #test_ident() -> Result<(), #krate::BenchmarkError> { - let selected_benchmark = SelectedBenchmark::#name; - let components = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::components(&selected_benchmark); - let execute_benchmark = | - c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> - | -> Result<(), #krate::BenchmarkError> { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the benchmark, return execution + verification function. - let closure_to_verify = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::instance(&selected_benchmark, &c, true)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Run execution + verification - closure_to_verify() - }; - - if components.is_empty() { - execute_benchmark(Default::default())?; - } else { - let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { - ev.parse().map_err(|_| { - #krate::BenchmarkError::Stop( - "Could not parse env var `VALUES_PER_COMPONENT` as u32." - ) - })? - } else { - 6 - }; - - if num_values < 2 { - return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); - } - - for (name, low, high) in components.clone().into_iter() { - // Test the lowest, highest (if its different from the lowest) - // and up to num_values-2 more equidistant values in between. - // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] - if high < low { - return Err("The start of a `ParamRange` must be less than or equal to the end".into()); - } - - let mut values = #krate::__private::vec![low]; - let diff = (high - low).min(num_values - 1); - let slope = (high - low) as f32 / diff as f32; - - for i in 1..=diff { - let value = ((low as f32 + slope * i as f32) as u32) - .clamp(low, high); - values.push(value); - } - - for component_value in values { - // Select the max value for all the other components. - let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components - .iter() - .map(|(n, _, h)| - if *n == name { - (*n, component_value) - } else { - (*n, *h) - } - ) - .collect(); - - execute_benchmark(c)?; - } - } - } - return Ok(()); - } - } - }; - res + // set up variables needed during quoting + let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { + Ok(ident) => ident, + Err(err) => return err.to_compile_error().into(), + }; + let frame_system = match generate_access_from_frame_or_crate("frame-system") { + Ok(path) => path, + Err(err) => return err.to_compile_error().into(), + }; + let codec = quote!(#krate::__private::codec); + let traits = quote!(#krate::__private::traits); + let setup_stmts = benchmark_def.setup_stmts; + let verify_stmts = benchmark_def.verify_stmts; + let last_stmt = benchmark_def.last_stmt; + let test_ident = + Ident::new(format!("test_benchmark_{}", name.to_string()).as_str(), Span::call_site()); + + // unroll params (prepare for quoting) + let unrolled = UnrolledParams::from(&benchmark_def.params); + let param_names = unrolled.param_names; + let param_ranges = unrolled.param_ranges; + + let type_use_generics = match is_instance { + false => quote!(T), + true => quote!(T, I), + }; + + let type_impl_generics = match is_instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + // used in the benchmarking impls + let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { + BenchmarkCallDef::ExtrinsicCall { origin, expr_call, attr_span: _ } => { + let mut expr_call = expr_call.clone(); + + // remove first arg from expr_call + let mut final_args = Punctuated::::new(); + let args: Vec<&Expr> = expr_call.args.iter().collect(); + for arg in &args[1..] { + final_args.push((*(*arg)).clone()); + } + expr_call.args = final_args; + + let origin = match origin { + Expr::Cast(t) => { + let ty = t.ty.clone(); + quote! { + <::RuntimeOrigin as From<#ty>>::from(#origin); + } + }, + _ => quote! { + #origin.into(); + }, + }; + + // determine call name (handles `_` and normal call syntax) + let expr_span = expr_call.span(); + let call_err = || { + syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") + .to_compile_error() + }; + let call_name = match *expr_call.func { + Expr::Path(expr_path) => { + // normal function call + let Some(segment) = expr_path.path.segments.last() else { return call_err() }; + segment.ident.to_string() + }, + Expr::Infer(_) => { + // `_` style + // replace `_` with fn name + name.to_string() + }, + _ => return call_err(), + }; + + // modify extrinsic call to be prefixed with "new_call_variant" + let call_name = format!("new_call_variant_{}", call_name); + let mut punct: Punctuated = Punctuated::new(); + punct.push(PathSegment { + arguments: PathArguments::None, + ident: Ident::new(call_name.as_str(), Span::call_site()), + }); + *expr_call.func = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: Path { leading_colon: None, segments: punct }, + }); + let pre_call = quote! { + let __call = Call::<#type_use_generics>::#expr_call; + let __benchmarked_call_encoded = #codec::Encode::encode(&__call); + }; + let post_call = quote! { + let __call_decoded = as #codec::Decode> + ::decode(&mut &__benchmarked_call_encoded[..]) + .expect("call is encoded above, encoding must be correct"); + let __origin = #origin; + as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( + __call_decoded, + __origin, + ) + }; + ( + // (pre_call, post_call, fn_call_body): + pre_call.clone(), + quote!(#post_call?;), + quote! { + #pre_call + #post_call.unwrap(); + }, + ) + }, + BenchmarkCallDef::Block { block, attr_span: _ } => + (quote!(), quote!(#block), quote!(#block)), + }; + + let vis = benchmark_def.fn_vis; + + // remove #[benchmark] attribute + let fn_attrs = benchmark_def + .fn_attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); + + // modify signature generics, ident, and inputs, e.g: + // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` + // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), + // BenchmarkError>` + let mut sig = benchmark_def.fn_sig; + sig.generics = parse_quote!(<#type_impl_generics>); + if !where_clause.is_empty() { + sig.generics.where_clause = parse_quote!(where #where_clause); + } + sig.ident = + Ident::new(format!("_{}", name.to_token_stream().to_string()).as_str(), Span::call_site()); + let mut fn_param_inputs: Vec = + param_names.iter().map(|name| quote!(#name: u32)).collect(); + fn_param_inputs.push(quote!(verify: bool)); + sig.inputs = parse_quote!(#(#fn_param_inputs),*); + + // used in instance() impl + let impl_last_stmt = match &last_stmt { + Some(stmt) => quote!(#stmt), + None => quote!(Ok(())), + }; + let fn_attrs_clone = fn_attrs.clone(); + + let fn_def = quote! { + #( + #fn_attrs_clone + )* + #vis #sig { + #( + #setup_stmts + )* + #fn_call_body + if verify { + #( + #verify_stmts + )* + } + #last_stmt + } + }; + + // generate final quoted tokens + let res = quote! { + // benchmark function definition + #fn_def + + #[allow(non_camel_case_types)] + #( + #fn_attrs + )* + struct #name; + + #[allow(unused_variables)] + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> + for #name where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + #krate::__private::vec! [ + #( + (#krate::BenchmarkParameter::#param_ranges) + ),* + ] + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool + ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { + #( + // prepare instance #param_names + let #param_names = components.iter() + .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) + .ok_or("Could not find component during benchmark preparation.")? + .1; + )* + + // benchmark setup code + #( + #setup_stmts + )* + #pre_call + Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { + #post_call + if verify { + #( + #verify_stmts + )* + } + #impl_last_stmt + })) + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + #[allow(unused)] + fn #test_ident() -> Result<(), #krate::BenchmarkError> { + let selected_benchmark = SelectedBenchmark::#name; + let components = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::components(&selected_benchmark); + let execute_benchmark = | + c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> + | -> Result<(), #krate::BenchmarkError> { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the benchmark, return execution + verification function. + let closure_to_verify = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::instance(&selected_benchmark, &c, true)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Run execution + verification + closure_to_verify() + }; + + if components.is_empty() { + execute_benchmark(Default::default())?; + } else { + let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { + ev.parse().map_err(|_| { + #krate::BenchmarkError::Stop( + "Could not parse env var `VALUES_PER_COMPONENT` as u32." + ) + })? + } else { + 6 + }; + + if num_values < 2 { + return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); + } + + for (name, low, high) in components.clone().into_iter() { + // Test the lowest, highest (if its different from the lowest) + // and up to num_values-2 more equidistant values in between. + // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] + if high < low { + return Err("The start of a `ParamRange` must be less than or equal to the end".into()); + } + + let mut values = #krate::__private::vec![low]; + let diff = (high - low).min(num_values - 1); + let slope = (high - low) as f32 / diff as f32; + + for i in 1..=diff { + let value = ((low as f32 + slope * i as f32) as u32) + .clamp(low, high); + values.push(value); + } + + for component_value in values { + // Select the max value for all the other components. + let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components + .iter() + .map(|(n, _, h)| + if *n == name { + (*n, component_value) + } else { + (*n, *h) + } + ) + .collect(); + + execute_benchmark(c)?; + } + } + } + return Ok(()); + } + } + }; + res } diff --git a/support/procedural-fork/src/construct_runtime/expand/call.rs b/support/procedural-fork/src/construct_runtime/expand/call.rs index 7e8c2e856..b0041ccc0 100644 --- a/support/procedural-fork/src/construct_runtime/expand/call.rs +++ b/support/procedural-fork/src/construct_runtime/expand/call.rs @@ -22,205 +22,202 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_dispatch( - runtime: &Ident, - system_pallet: &Pallet, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut variant_defs = TokenStream::new(); - let mut variant_patterns = Vec::new(); - let mut query_call_part_macros = Vec::new(); - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let system_path = &system_pallet.path; - - let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); - - for pallet_declaration in pallets_with_call { - let name = &pallet_declaration.name; - let path = &pallet_declaration.path; - let index = pallet_declaration.index; - let attr = - pallet_declaration - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - variant_defs.extend(quote! { - #attr - #[codec(index = #index)] - #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), - }); - variant_patterns.push(quote!(RuntimeCall::#name(call))); - pallet_names.push(name); - pallet_attrs.push(attr); - query_call_part_macros.push(quote! { - #path::__substrate_call_check::is_call_part_defined!(#name); - }); - } - - quote! { - #( #query_call_part_macros )* - - #[derive( - Clone, PartialEq, Eq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeCall { - #variant_defs - } - #[cfg(test)] - impl RuntimeCall { - /// Return a list of the module names together with their size in memory. - pub const fn sizes() -> &'static [( &'static str, usize )] { - use #scrate::dispatch::Callable; - use core::mem::size_of; - &[#( - #pallet_attrs - ( - stringify!(#pallet_names), - size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), - ), - )*] - } - - /// Panics with diagnostic information if the size is greater than the given `limit`. - pub fn assert_size_under(limit: usize) { - let size = core::mem::size_of::(); - let call_oversize = size > limit; - if call_oversize { - println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); - let mut sizes = Self::sizes().to_vec(); - sizes.sort_by_key(|x| -(x.1 as isize)); - for (i, &(name, size)) in sizes.iter().enumerate().take(5) { - println!("Offender #{}: {} at {} bytes", i + 1, name, size); - } - if let Some((_, next_size)) = sizes.get(5) { - println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); - } - panic!( - "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the + let mut variant_defs = TokenStream::new(); + let mut variant_patterns = Vec::new(); + let mut query_call_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let system_path = &system_pallet.path; + + let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); + + for pallet_declaration in pallets_with_call { + let name = &pallet_declaration.name; + let path = &pallet_declaration.path; + let index = pallet_declaration.index; + let attr = + pallet_declaration.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + variant_defs.extend(quote! { + #attr + #[codec(index = #index)] + #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), + }); + variant_patterns.push(quote!(RuntimeCall::#name(call))); + pallet_names.push(name); + pallet_attrs.push(attr); + query_call_part_macros.push(quote! { + #path::__substrate_call_check::is_call_part_defined!(#name); + }); + } + + quote! { + #( #query_call_part_macros )* + + #[derive( + Clone, PartialEq, Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeCall { + #variant_defs + } + #[cfg(test)] + impl RuntimeCall { + /// Return a list of the module names together with their size in memory. + pub const fn sizes() -> &'static [( &'static str, usize )] { + use #scrate::dispatch::Callable; + use core::mem::size_of; + &[#( + #pallet_attrs + ( + stringify!(#pallet_names), + size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), + ), + )*] + } + + /// Panics with diagnostic information if the size is greater than the given `limit`. + pub fn assert_size_under(limit: usize) { + let size = core::mem::size_of::(); + let call_oversize = size > limit; + if call_oversize { + println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); + let mut sizes = Self::sizes().to_vec(); + sizes.sort_by_key(|x| -(x.1 as isize)); + for (i, &(name, size)) in sizes.iter().enumerate().take(5) { + println!("Offender #{}: {} at {} bytes", i + 1, name, size); + } + if let Some((_, next_size)) = sizes.get(5) { + println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); + } + panic!( + "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the size of `Call`. If the limit is too strong, maybe consider providing a higher limit." - ); - } - } - } - impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { - fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { - match self { - #( - #pallet_attrs - #variant_patterns => call.get_dispatch_info(), - )* - } - } - } - - impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { - type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match self { - #( - #pallet_attrs - #variant_patterns => call.is_feeless(origin), - )* - } - } - } - - impl #scrate::traits::GetCallMetadata for RuntimeCall { - fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { - use #scrate::traits::GetCallName; - match self { - #( - #pallet_attrs - #variant_patterns => { - let function_name = call.get_call_name(); - let pallet_name = stringify!(#pallet_names); - #scrate::traits::CallMetadata { function_name, pallet_name } - } - )* - } - } - - fn get_module_names() -> &'static [&'static str] { - &[#( - #pallet_attrs - stringify!(#pallet_names), - )*] - } - - fn get_call_names(module: &str) -> &'static [&'static str] { - use #scrate::{dispatch::Callable, traits::GetCallName}; - match module { - #( - #pallet_attrs - stringify!(#pallet_names) => - <<#pallet_names as Callable<#runtime>>::RuntimeCall - as GetCallName>::get_call_names(), - )* - _ => unreachable!(), - } - } - } - impl #scrate::__private::Dispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - type Config = RuntimeCall; - type Info = #scrate::dispatch::DispatchInfo; - type PostInfo = #scrate::dispatch::PostDispatchInfo; - fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - if !::filter_call(&origin, &self) { - return ::core::result::Result::Err( - #system_path::Error::<#runtime>::CallFiltered.into() - ); - } - - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) - } - } - impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - match self { - #( - #pallet_attrs - #variant_patterns => - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), - )* - } - } - } - - #( - #pallet_attrs - impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - #[allow(unreachable_patterns)] - fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { - match self { - #variant_patterns => Some(call), - // May be unreachable - _ => None, - } - } - } - - #pallet_attrs - impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { - #variant_patterns - } - } - )* - } + ); + } + } + } + impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { + fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { + match self { + #( + #pallet_attrs + #variant_patterns => call.get_dispatch_info(), + )* + } + } + } + + impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { + type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match self { + #( + #pallet_attrs + #variant_patterns => call.is_feeless(origin), + )* + } + } + } + + impl #scrate::traits::GetCallMetadata for RuntimeCall { + fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { + use #scrate::traits::GetCallName; + match self { + #( + #pallet_attrs + #variant_patterns => { + let function_name = call.get_call_name(); + let pallet_name = stringify!(#pallet_names); + #scrate::traits::CallMetadata { function_name, pallet_name } + } + )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[#( + #pallet_attrs + stringify!(#pallet_names), + )*] + } + + fn get_call_names(module: &str) -> &'static [&'static str] { + use #scrate::{dispatch::Callable, traits::GetCallName}; + match module { + #( + #pallet_attrs + stringify!(#pallet_names) => + <<#pallet_names as Callable<#runtime>>::RuntimeCall + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } + impl #scrate::__private::Dispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + type Config = RuntimeCall; + type Info = #scrate::dispatch::DispatchInfo; + type PostInfo = #scrate::dispatch::PostDispatchInfo; + fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + if !::filter_call(&origin, &self) { + return ::core::result::Result::Err( + #system_path::Error::<#runtime>::CallFiltered.into() + ); + } + + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) + } + } + impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + match self { + #( + #pallet_attrs + #variant_patterns => + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), + )* + } + } + } + + #( + #pallet_attrs + impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + #[allow(unreachable_patterns)] + fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { + match self { + #variant_patterns => Some(call), + // May be unreachable + _ => None, + } + } + } + + #pallet_attrs + impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { + #variant_patterns + } + } + )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs index be6b2f085..101a476fb 100644 --- a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs +++ b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs @@ -20,82 +20,82 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; pub(crate) fn expand_conversion_fn( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); - let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); + let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); - if let Some(inst) = instance { - quote! { - impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { - fn from(hr: #path::#composite_name<#path::#inst>) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } else { - quote! { - impl From<#path::#composite_name> for #runtime_composite_name { - fn from(hr: #path::#composite_name) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } + if let Some(inst) = instance { + quote! { + impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { + fn from(hr: #path::#composite_name<#path::#inst>) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } else { + quote! { + impl From<#path::#composite_name> for #runtime_composite_name { + fn from(hr: #path::#composite_name) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } } pub(crate) fn expand_variant( - composite_name: &str, - index: u8, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + index: u8, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name<#path::#inst>), - } - } else { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name), - } - } + if let Some(inst) = instance { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name<#path::#inst>), + } + } else { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name), + } + } } pub(crate) fn expand_variant_count( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #path::#composite_name::<#path::#inst>::VARIANT_COUNT - } - } else { - // Wrapped `<`..`>` means: use default type parameter for enum. - // - // This is used for pallets without instance support or pallets with instance support when - // we don't specify instance: - // - // ``` - // pub struct Pallet{..} - // - // #[pallet::composite_enum] - // pub enum HoldReason {..} - // - // Pallet1: pallet_x, // <- default type parameter - // ``` - quote! { - <#path::#composite_name>::VARIANT_COUNT - } - } + if let Some(inst) = instance { + quote! { + #path::#composite_name::<#path::#inst>::VARIANT_COUNT + } + } else { + // Wrapped `<`..`>` means: use default type parameter for enum. + // + // This is used for pallets without instance support or pallets with instance support when + // we don't specify instance: + // + // ``` + // pub struct Pallet{..} + // + // #[pallet::composite_enum] + // pub enum HoldReason {..} + // + // Pallet1: pallet_x, // <- default type parameter + // ``` + quote! { + <#path::#composite_name>::VARIANT_COUNT + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/config.rs b/support/procedural-fork/src/construct_runtime/expand/config.rs index ff715e584..dbbe6ba6e 100644 --- a/support/procedural-fork/src/construct_runtime/expand/config.rs +++ b/support/procedural-fork/src/construct_runtime/expand/config.rs @@ -23,135 +23,125 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_config( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut types = TokenStream::new(); - let mut fields = TokenStream::new(); - let mut genesis_build_calls = TokenStream::new(); - let mut query_genesis_config_part_macros = Vec::new(); + let mut types = TokenStream::new(); + let mut fields = TokenStream::new(); + let mut genesis_build_calls = TokenStream::new(); + let mut query_genesis_config_part_macros = Vec::new(); - for decl in pallet_decls { - if let Some(pallet_entry) = decl.find_part("Config") { - let path = &decl.path; - let pallet_name = &decl.name; - let path_str = path.into_token_stream().to_string(); - let config = format_ident!("{}Config", pallet_name); - let field_name = - &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); - let part_is_generic = !pallet_entry.generics.params.is_empty(); - let attr = &decl - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for decl in pallet_decls { + if let Some(pallet_entry) = decl.find_part("Config") { + let path = &decl.path; + let pallet_name = &decl.name; + let path_str = path.into_token_stream().to_string(); + let config = format_ident!("{}Config", pallet_name); + let field_name = + &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); + let part_is_generic = !pallet_entry.generics.params.is_empty(); + let attr = &decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - types.extend(expand_config_types( - attr, - runtime, - decl, - &config, - part_is_generic, - )); - fields.extend(quote!(#attr pub #field_name: #config,)); - genesis_build_calls.extend(expand_config_build_storage_call( - scrate, &config, attr, field_name, - )); - query_genesis_config_part_macros.push(quote! { + types.extend(expand_config_types(attr, runtime, decl, &config, part_is_generic)); + fields.extend(quote!(#attr pub #field_name: #config,)); + genesis_build_calls + .extend(expand_config_build_storage_call(scrate, &config, attr, field_name)); + query_genesis_config_part_macros.push(quote! { #path::__substrate_genesis_config_check::is_genesis_config_defined!(#pallet_name); #[cfg(feature = "std")] #path::__substrate_genesis_config_check::is_std_enabled_for_genesis!(#pallet_name, #path_str); }); - } - } + } + } - quote! { - #( #query_genesis_config_part_macros )* + quote! { + #( #query_genesis_config_part_macros )* - #types + #types - use #scrate::__private::serde as __genesis_config_serde_import__; - #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] - #[serde(rename_all = "camelCase")] - #[serde(deny_unknown_fields)] - #[serde(crate = "__genesis_config_serde_import__")] - pub struct RuntimeGenesisConfig { - #fields - } + use #scrate::__private::serde as __genesis_config_serde_import__; + #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] + #[serde(rename_all = "camelCase")] + #[serde(deny_unknown_fields)] + #[serde(crate = "__genesis_config_serde_import__")] + pub struct RuntimeGenesisConfig { + #fields + } - #[cfg(any(feature = "std", test))] - impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { - fn assimilate_storage( - &self, - storage: &mut #scrate::sp_runtime::Storage, - ) -> std::result::Result<(), String> { - #scrate::__private::BasicExternalities::execute_with_storage(storage, || { - ::build(&self); - Ok(()) - }) - } - } + #[cfg(any(feature = "std", test))] + impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { + fn assimilate_storage( + &self, + storage: &mut #scrate::sp_runtime::Storage, + ) -> std::result::Result<(), String> { + #scrate::__private::BasicExternalities::execute_with_storage(storage, || { + ::build(&self); + Ok(()) + }) + } + } - impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { - fn build(&self) { - #genesis_build_calls - ::on_genesis(); - } - } + impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { + fn build(&self) { + #genesis_build_calls + ::on_genesis(); + } + } - /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. - #[cfg(test)] - #[test] - fn test_genesis_config_builds() { - #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { - ::build( - &RuntimeGenesisConfig::default() - ); - }); - } - } + /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. + #[cfg(test)] + #[test] + fn test_genesis_config_builds() { + #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { + ::build( + &RuntimeGenesisConfig::default() + ); + }); + } + } } fn expand_config_types( - attr: &TokenStream, - runtime: &Ident, - decl: &Pallet, - config: &Ident, - part_is_generic: bool, + attr: &TokenStream, + runtime: &Ident, + decl: &Pallet, + config: &Ident, + part_is_generic: bool, ) -> TokenStream { - let path = &decl.path; + let path = &decl.path; - match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; - }, - (None, true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime>; - }, - (_, false) => quote! { - #attr - pub type #config = #path::GenesisConfig; - }, - } + match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; + }, + (None, true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime>; + }, + (_, false) => quote! { + #attr + pub type #config = #path::GenesisConfig; + }, + } } fn expand_config_build_storage_call( - scrate: &TokenStream, - pallet_genesis_config: &Ident, - attr: &TokenStream, - field_name: &Ident, + scrate: &TokenStream, + pallet_genesis_config: &Ident, + attr: &TokenStream, + field_name: &Ident, ) -> TokenStream { - quote! { - #attr - <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); - } + quote! { + #attr + <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs index f00269085..f12f99526 100644 --- a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_freeze_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut freeze_reason_variants = Vec::new(); - let mut freeze_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if decl.find_part("FreezeReason").is_some() { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut freeze_reason_variants = Vec::new(); + let mut freeze_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("FreezeReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "FreezeReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "FreezeReason", + path, + instance, + variant_name, + )); - freeze_reason_variants.push(composite_helper::expand_variant( - "FreezeReason", - index, - path, - instance, - variant_name, - )); + freeze_reason_variants.push(composite_helper::expand_variant( + "FreezeReason", + index, + path, + instance, + variant_name, + )); - freeze_reason_variants_count.push(composite_helper::expand_variant_count( - "FreezeReason", - path, - instance, - )); - } - } + freeze_reason_variants_count.push(composite_helper::expand_variant_count( + "FreezeReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a freeze on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeFreezeReason { - #( #freeze_reason_variants )* - } + quote! { + /// A reason for placing a freeze on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeFreezeReason { + #( #freeze_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeFreezeReason { - const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeFreezeReason { + const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs index 5fc2ed1ee..cdab92712 100644 --- a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_hold_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut hold_reason_variants = Vec::new(); - let mut hold_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if decl.find_part("HoldReason").is_some() { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut hold_reason_variants = Vec::new(); + let mut hold_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("HoldReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "HoldReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "HoldReason", + path, + instance, + variant_name, + )); - hold_reason_variants.push(composite_helper::expand_variant( - "HoldReason", - index, - path, - instance, - variant_name, - )); + hold_reason_variants.push(composite_helper::expand_variant( + "HoldReason", + index, + path, + instance, + variant_name, + )); - hold_reason_variants_count.push(composite_helper::expand_variant_count( - "HoldReason", - path, - instance, - )); - } - } + hold_reason_variants_count.push(composite_helper::expand_variant_count( + "HoldReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a hold on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeHoldReason { - #( #hold_reason_variants )* - } + quote! { + /// A reason for placing a hold on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeHoldReason { + #( #hold_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeHoldReason { - const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeHoldReason { + const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/inherent.rs b/support/procedural-fork/src/construct_runtime/expand/inherent.rs index b58d540fe..da483fa6c 100644 --- a/support/procedural-fork/src/construct_runtime/expand/inherent.rs +++ b/support/procedural-fork/src/construct_runtime/expand/inherent.rs @@ -22,236 +22,233 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_inherent( - runtime: &Ident, - block: &TokenStream, - unchecked_extrinsic: &TokenStream, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + block: &TokenStream, + unchecked_extrinsic: &TokenStream, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_inherent_part_macros = Vec::new(); - - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("Inherent") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - pallet_names.push(name); - pallet_attrs.push(attr); - query_inherent_part_macros.push(quote! { - #path::__substrate_inherent_check::is_inherent_part_defined!(#name); - }); - } - } - - quote! { - #( #query_inherent_part_macros )* - - trait InherentDataExt { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; - } - - impl InherentDataExt for #scrate::inherent::InherentData { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> - { - use #scrate::inherent::ProvideInherent; - - let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); - - #( - #pallet_attrs - if let Some(inherent) = #pallet_names::create_inherent(self) { - let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( - inherent.into(), - None, - ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ - `Some`; qed"); - - inherents.push(inherent); - } - )* - - inherents - } - - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { - use #scrate::inherent::{ProvideInherent, IsFatalError}; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - use #scrate::__private::{sp_inherents::Error, log}; - - let mut result = #scrate::inherent::CheckInherentsResult::new(); - - // This handle assume we abort on the first fatal error. - fn handle_put_error_result(res: Result<(), Error>) { - const LOG_TARGET: &str = "runtime::inherent"; - match res { - Ok(()) => (), - Err(Error::InherentDataExists(id)) => - log::debug!( - target: LOG_TARGET, - "Some error already reported for inherent {:?}, new non fatal \ - error is ignored", - id - ), - Err(Error::FatalErrorReported) => - log::error!( - target: LOG_TARGET, - "Fatal error already reported, unexpected considering there is \ - only one fatal error", - ), - Err(_) => - log::error!( - target: LOG_TARGET, - "Unexpected error from `put_error` operation", - ), - } - } - - for xt in block.extrinsics() { - // Inherents are before any other extrinsics. - // And signed extrinsics are not inherents. - if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { - break - } - - let mut is_inherent = false; - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if #pallet_names::is_inherent(call) { - is_inherent = true; - if let Err(e) = #pallet_names::check_inherent(call, self) { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - } - } - } - )* - - // Inherents are before any other extrinsics. - // No module marked it as inherent thus it is not. - if !is_inherent { - break - } - } - - #( - #pallet_attrs - match #pallet_names::is_inherent_required(self) { - Ok(Some(e)) => { - let found = block.extrinsics().iter().any(|xt| { - let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) - .unwrap_or(false); - - if !is_signed { - let call = < - #unchecked_extrinsic as ExtrinsicCall - >::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - #pallet_names::is_inherent(&call) - } else { - false - } - } else { - // Signed extrinsics are not inherents. - false - } - }); - - if !found { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - }, - Ok(None) => (), - Err(e) => { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - }, - } - )* - - result - } - } - - impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { - fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - - if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { - // Signed extrinsics are never inherents. - return false - } - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if <#pallet_names as ProvideInherent>::is_inherent(&call) { - return true; - } - } - } - )* - false - } - } - - impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { - fn ensure_inherents_are_first(block: &#block) -> Result { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - - let mut num_inherents = 0u32; - - for (i, xt) in block.extrinsics().iter().enumerate() { - if >::is_inherent(xt) { - if num_inherents != i as u32 { - return Err(i as u32); - } - - num_inherents += 1; // Safe since we are in an `enumerate` loop. - } - } - - Ok(num_inherents) - } - } - } + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_inherent_part_macros = Vec::new(); + + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("Inherent") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + pallet_names.push(name); + pallet_attrs.push(attr); + query_inherent_part_macros.push(quote! { + #path::__substrate_inherent_check::is_inherent_part_defined!(#name); + }); + } + } + + quote! { + #( #query_inherent_part_macros )* + + trait InherentDataExt { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; + } + + impl InherentDataExt for #scrate::inherent::InherentData { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> + { + use #scrate::inherent::ProvideInherent; + + let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); + + #( + #pallet_attrs + if let Some(inherent) = #pallet_names::create_inherent(self) { + let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( + inherent.into(), + None, + ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ + `Some`; qed"); + + inherents.push(inherent); + } + )* + + inherents + } + + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { + use #scrate::inherent::{ProvideInherent, IsFatalError}; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + use #scrate::__private::{sp_inherents::Error, log}; + + let mut result = #scrate::inherent::CheckInherentsResult::new(); + + // This handle assume we abort on the first fatal error. + fn handle_put_error_result(res: Result<(), Error>) { + const LOG_TARGET: &str = "runtime::inherent"; + match res { + Ok(()) => (), + Err(Error::InherentDataExists(id)) => + log::debug!( + target: LOG_TARGET, + "Some error already reported for inherent {:?}, new non fatal \ + error is ignored", + id + ), + Err(Error::FatalErrorReported) => + log::error!( + target: LOG_TARGET, + "Fatal error already reported, unexpected considering there is \ + only one fatal error", + ), + Err(_) => + log::error!( + target: LOG_TARGET, + "Unexpected error from `put_error` operation", + ), + } + } + + for xt in block.extrinsics() { + // Inherents are before any other extrinsics. + // And signed extrinsics are not inherents. + if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { + break + } + + let mut is_inherent = false; + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if #pallet_names::is_inherent(call) { + is_inherent = true; + if let Err(e) = #pallet_names::check_inherent(call, self) { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + } + } + } + )* + + // Inherents are before any other extrinsics. + // No module marked it as inherent thus it is not. + if !is_inherent { + break + } + } + + #( + #pallet_attrs + match #pallet_names::is_inherent_required(self) { + Ok(Some(e)) => { + let found = block.extrinsics().iter().any(|xt| { + let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) + .unwrap_or(false); + + if !is_signed { + let call = < + #unchecked_extrinsic as ExtrinsicCall + >::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + #pallet_names::is_inherent(&call) + } else { + false + } + } else { + // Signed extrinsics are not inherents. + false + } + }); + + if !found { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + }, + Ok(None) => (), + Err(e) => { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + }, + } + )* + + result + } + } + + impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { + fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + + if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { + // Signed extrinsics are never inherents. + return false + } + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if <#pallet_names as ProvideInherent>::is_inherent(&call) { + return true; + } + } + } + )* + false + } + } + + impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { + fn ensure_inherents_are_first(block: &#block) -> Result { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + + let mut num_inherents = 0u32; + + for (i, xt) in block.extrinsics().iter().enumerate() { + if >::is_inherent(xt) { + if num_inherents != i as u32 { + return Err(i as u32); + } + + num_inherents += 1; // Safe since we are in an `enumerate` loop. + } + } + + Ok(num_inherents) + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs index 732fb7ac4..e67c0da00 100644 --- a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs +++ b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_lock_id(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut lock_id_variants = Vec::new(); - for decl in pallet_decls { - if decl.find_part("LockId").is_some() { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut lock_id_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("LockId") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "LockId", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "LockId", + path, + instance, + variant_name, + )); - lock_id_variants.push(composite_helper::expand_variant( - "LockId", - index, - path, - instance, - variant_name, - )); - } - } + lock_id_variants.push(composite_helper::expand_variant( + "LockId", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// An identifier for each lock placed on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeLockId { - #( #lock_id_variants )* - } + quote! { + /// An identifier for each lock placed on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeLockId { + #( #lock_id_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/metadata.rs b/support/procedural-fork/src/construct_runtime/expand/metadata.rs index f98c719ca..0e76f9a92 100644 --- a/support/procedural-fork/src/construct_runtime/expand/metadata.rs +++ b/support/procedural-fork/src/construct_runtime/expand/metadata.rs @@ -22,240 +22,237 @@ use std::str::FromStr; use syn::Ident; pub fn expand_runtime_metadata( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream, - extrinsic: &TokenStream, - system_path: &PalletPath, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream, + extrinsic: &TokenStream, + system_path: &PalletPath, ) -> TokenStream { - let pallets = pallet_declarations - .iter() - .filter_map(|pallet_declaration| { - pallet_declaration.find_part("Pallet").map(|_| { - let filtered_names: Vec<_> = pallet_declaration - .pallet_parts() - .iter() - .filter(|part| part.name() != "Pallet") - .map(|part| part.name()) - .collect(); - (pallet_declaration, filtered_names) - }) - }) - .map(|(decl, filtered_names)| { - let name = &decl.name; - let index = &decl.index; - let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); - let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); - let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); - let constants = expand_pallet_metadata_constants(runtime, decl); - let errors = expand_pallet_metadata_errors(runtime, decl); - let docs = expand_pallet_metadata_docs(runtime, decl); - let attr = decl - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let pallets = pallet_declarations + .iter() + .filter_map(|pallet_declaration| { + pallet_declaration.find_part("Pallet").map(|_| { + let filtered_names: Vec<_> = pallet_declaration + .pallet_parts() + .iter() + .filter(|part| part.name() != "Pallet") + .map(|part| part.name()) + .collect(); + (pallet_declaration, filtered_names) + }) + }) + .map(|(decl, filtered_names)| { + let name = &decl.name; + let index = &decl.index; + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); + let docs = expand_pallet_metadata_docs(runtime, decl); + let attr = decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - #scrate::__private::metadata_ir::PalletMetadataIR { - name: stringify!(#name), - index: #index, - storage: #storage, - calls: #calls, - event: #event, - constants: #constants, - error: #errors, - docs: #docs, - } - } - }) - .collect::>(); + quote! { + #attr + #scrate::__private::metadata_ir::PalletMetadataIR { + name: stringify!(#name), + index: #index, + storage: #storage, + calls: #calls, + event: #event, + constants: #constants, + error: #errors, + docs: #docs, + } + } + }) + .collect::>(); - quote! { - impl #runtime { - fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - // - // `Deref` needs a reference for resolving the function call. - let rt = #runtime; + quote! { + impl #runtime { + fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + // + // `Deref` needs a reference for resolving the function call. + let rt = #runtime; - let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); - let address_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress - >(); - let call_ty = #scrate::__private::scale_info::meta_type::< - <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call - >(); - let signature_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature - >(); - let extra_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra - >(); + let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); + let address_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress + >(); + let call_ty = #scrate::__private::scale_info::meta_type::< + <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call + >(); + let signature_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature + >(); + let extra_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra + >(); - #scrate::__private::metadata_ir::MetadataIR { - pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], - extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { - ty, - version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - address_ty, - call_ty, - signature_ty, - extra_ty, - signed_extensions: < - < - #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata - >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension - >::metadata() - .into_iter() - .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { - identifier: meta.identifier, - ty: meta.ty, - additional_signed: meta.additional_signed, - }) - .collect(), - }, - ty: #scrate::__private::scale_info::meta_type::<#runtime>(), - apis: (&rt).runtime_metadata(), - outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { - call_enum_ty: #scrate::__private::scale_info::meta_type::< - <#runtime as #system_path::Config>::RuntimeCall - >(), - event_enum_ty: #scrate::__private::scale_info::meta_type::(), - error_enum_ty: #scrate::__private::scale_info::meta_type::(), - } - } - } + #scrate::__private::metadata_ir::MetadataIR { + pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], + extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { + ty, + version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + address_ty, + call_ty, + signature_ty, + extra_ty, + signed_extensions: < + < + #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension + >::metadata() + .into_iter() + .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, + }) + .collect(), + }, + ty: #scrate::__private::scale_info::meta_type::<#runtime>(), + apis: (&rt).runtime_metadata(), + outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { + call_enum_ty: #scrate::__private::scale_info::meta_type::< + <#runtime as #system_path::Config>::RuntimeCall + >(), + event_enum_ty: #scrate::__private::scale_info::meta_type::(), + error_enum_ty: #scrate::__private::scale_info::meta_type::(), + } + } + } - pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { - // Note: this always returns the V14 version. The runtime API function - // must be deprecated. - #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) - } + pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { + // Note: this always returns the V14 version. The runtime API function + // must be deprecated. + #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) + } - pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { - #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { - #scrate::__private::OpaqueMetadata::new(prefixed.into()) - }) - } + pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { + #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { + #scrate::__private::OpaqueMetadata::new(prefixed.into()) + }) + } - pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { - #scrate::__private::metadata_ir::supported_versions() - } - } - } + pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { + #scrate::__private::metadata_ir::supported_versions() + } + } + } } fn expand_pallet_metadata_storage( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Storage") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Storage") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_calls( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Call") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Call") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_events( - filtered_names: &[&'static str], - runtime: &Ident, - scrate: &TokenStream, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + scrate: &TokenStream, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Event") { - let path = &decl.path; - let part_is_generic = !decl - .find_part("Event") - .expect("Event part exists; qed") - .generics - .params - .is_empty(); - let pallet_event = match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::Event::<#path::#inst>), - (None, true) => quote!(#path::Event::<#runtime>), - (None, false) => quote!(#path::Event), - }; + if filtered_names.contains(&"Event") { + let path = &decl.path; + let part_is_generic = !decl + .find_part("Event") + .expect("Event part exists; qed") + .generics + .params + .is_empty(); + let pallet_event = match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::Event::<#path::#inst>), + (None, true) => quote!(#path::Event::<#runtime>), + (None, false) => quote!(#path::Event), + }; - quote! { - Some( - #scrate::__private::metadata_ir::PalletEventMetadataIR { - ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() - } - ) - } - } else { - quote!(None) - } + quote! { + Some( + #scrate::__private::metadata_ir::PalletEventMetadataIR { + ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() + } + ) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() + } } fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() + } } fn expand_pallet_metadata_docs(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/origin.rs b/support/procedural-fork/src/construct_runtime/expand/origin.rs index 2d50777bf..83049919d 100644 --- a/support/procedural-fork/src/construct_runtime/expand/origin.rs +++ b/support/procedural-fork/src/construct_runtime/expand/origin.rs @@ -22,448 +22,434 @@ use std::str::FromStr; use syn::{Generics, Ident}; pub fn expand_outer_origin( - runtime: &Ident, - system_pallet: &Pallet, - pallets: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallets: &[Pallet], + scrate: &TokenStream, ) -> syn::Result { - let mut caller_variants = TokenStream::new(); - let mut pallet_conversions = TokenStream::new(); - let mut query_origin_part_macros = Vec::new(); - - for pallet_decl in pallets - .iter() - .filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) - { - if let Some(pallet_entry) = pallet_decl.find_part("Origin") { - let instance = pallet_decl.instance.as_ref(); - let index = pallet_decl.index; - let generics = &pallet_entry.generics; - let name = &pallet_decl.name; - let path = &pallet_decl.path; - - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `Origin` cannot \ + let mut caller_variants = TokenStream::new(); + let mut pallet_conversions = TokenStream::new(); + let mut query_origin_part_macros = Vec::new(); + + for pallet_decl in pallets.iter().filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) { + if let Some(pallet_entry) = pallet_decl.find_part("Origin") { + let instance = pallet_decl.instance.as_ref(); + let index = pallet_decl.index; + let generics = &pallet_entry.generics; + let name = &pallet_decl.name; + let path = &pallet_decl.path; + + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `Origin` cannot \ be constructed: pallet `{}` must have generic `Origin`", - name - ); - return Err(syn::Error::new(name.span(), msg)); - } - - caller_variants.extend(expand_origin_caller_variant( - runtime, - pallet_decl, - index, - instance, - generics, - )); - pallet_conversions.extend(expand_origin_pallet_conversions( - scrate, - runtime, - pallet_decl, - instance, - generics, - )); - query_origin_part_macros.push(quote! { - #path::__substrate_origin_check::is_origin_part_defined!(#name); - }); - } - } - - let system_path = &system_pallet.path; - - let system_index = system_pallet.index; - - let system_path_name = system_path.module_name(); - - let doc_string = get_intra_doc_string( - "Origin is always created with the base filter configured in", - &system_path_name, - ); - - let doc_string_none_origin = - get_intra_doc_string("Create with system none origin and", &system_path_name); - - let doc_string_root_origin = - get_intra_doc_string("Create with system root origin and", &system_path_name); - - let doc_string_signed_origin = - get_intra_doc_string("Create with system signed origin and", &system_path_name); - - let doc_string_runtime_origin = get_intra_doc_string( - "Convert to runtime origin, using as filter:", - &system_path_name, - ); - - let doc_string_runtime_origin_with_caller = get_intra_doc_string( - "Convert to runtime origin with caller being system signed or none and use filter", - &system_path_name, - ); - - Ok(quote! { - #( #query_origin_part_macros )* - - /// The runtime origin type representing the origin of a call. - /// - #[doc = #doc_string] - #[derive(Clone)] - pub struct RuntimeOrigin { - pub caller: OriginCaller, - filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, - } - - #[cfg(not(feature = "std"))] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.write_str("") - } - } - - #[cfg(feature = "std")] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.debug_struct("Origin") - .field("caller", &self.caller) - .field("filter", &"[function ptr]") - .finish() - } - } - - impl #scrate::traits::OriginTrait for RuntimeOrigin { - type Call = <#runtime as #system_path::Config>::RuntimeCall; - type PalletsOrigin = OriginCaller; - type AccountId = <#runtime as #system_path::Config>::AccountId; - - fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { - let f = self.filter.clone(); - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { - f(call) && filter(call) - })); - } - - fn reset_filter(&mut self) { - let filter = < - <#runtime as #system_path::Config>::BaseCallFilter - as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> - >::contains; - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); - } - - fn set_caller_from(&mut self, other: impl Into) { - self.caller = other.into().caller; - } - - fn filter_call(&self, call: &Self::Call) -> bool { - match self.caller { - // Root bypasses all filters - OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, - _ => (self.filter)(call), - } - } - - fn caller(&self) -> &Self::PalletsOrigin { - &self.caller - } - - fn into_caller(self) -> Self::PalletsOrigin { - self.caller - } - - fn try_with_caller( - mut self, - f: impl FnOnce(Self::PalletsOrigin) -> Result, - ) -> Result { - match f(self.caller) { - Ok(r) => Ok(r), - Err(caller) => { self.caller = caller; Err(self) } - } - } - - fn none() -> Self { - #system_path::RawOrigin::None.into() - } - - fn root() -> Self { - #system_path::RawOrigin::Root.into() - } - - fn signed(by: Self::AccountId) -> Self { - #system_path::RawOrigin::Signed(by).into() - } - } - - #[derive( - Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, - )] - #[allow(non_camel_case_types)] - pub enum OriginCaller { - #[codec(index = #system_index)] - system(#system_path::Origin<#runtime>), - #caller_variants - #[allow(dead_code)] - Void(#scrate::__private::Void) - } - - // For backwards compatibility and ease of accessing these functions. - #[allow(dead_code)] - impl RuntimeOrigin { - #[doc = #doc_string_none_origin] - pub fn none() -> Self { - ::none() - } - - #[doc = #doc_string_root_origin] - pub fn root() -> Self { - ::root() - } - - #[doc = #doc_string_signed_origin] - pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { - ::signed(by) - } - } - - impl From<#system_path::Origin<#runtime>> for OriginCaller { - fn from(x: #system_path::Origin<#runtime>) -> Self { - OriginCaller::system(x) - } - } - - impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { - fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match self { - OriginCaller::system(x) => Some(x), - _ => None, - } - } - fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match &self { - OriginCaller::system(o) => Some(o), - _ => None, - } - } - } - - impl TryFrom for #system_path::Origin<#runtime> { - type Error = OriginCaller; - fn try_from(x: OriginCaller) - -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> - { - if let OriginCaller::system(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { - - #[doc = #doc_string_runtime_origin] - fn from(x: #system_path::Origin<#runtime>) -> Self { - let o: OriginCaller = x.into(); - o.into() - } - } - - impl From for RuntimeOrigin { - fn from(x: OriginCaller) -> Self { - let mut o = RuntimeOrigin { - caller: x, - filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), - }; - - #scrate::traits::OriginTrait::reset_filter(&mut o); - - o - } - } - - impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::system(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - impl From::AccountId>> for RuntimeOrigin { - #[doc = #doc_string_runtime_origin_with_caller] - fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { - <#system_path::Origin<#runtime>>::from(x).into() - } - } - - #pallet_conversions - }) + name + ); + return Err(syn::Error::new(name.span(), msg)) + } + + caller_variants.extend(expand_origin_caller_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); + pallet_conversions.extend(expand_origin_pallet_conversions( + scrate, + runtime, + pallet_decl, + instance, + generics, + )); + query_origin_part_macros.push(quote! { + #path::__substrate_origin_check::is_origin_part_defined!(#name); + }); + } + } + + let system_path = &system_pallet.path; + + let system_index = system_pallet.index; + + let system_path_name = system_path.module_name(); + + let doc_string = get_intra_doc_string( + "Origin is always created with the base filter configured in", + &system_path_name, + ); + + let doc_string_none_origin = + get_intra_doc_string("Create with system none origin and", &system_path_name); + + let doc_string_root_origin = + get_intra_doc_string("Create with system root origin and", &system_path_name); + + let doc_string_signed_origin = + get_intra_doc_string("Create with system signed origin and", &system_path_name); + + let doc_string_runtime_origin = + get_intra_doc_string("Convert to runtime origin, using as filter:", &system_path_name); + + let doc_string_runtime_origin_with_caller = get_intra_doc_string( + "Convert to runtime origin with caller being system signed or none and use filter", + &system_path_name, + ); + + Ok(quote! { + #( #query_origin_part_macros )* + + /// The runtime origin type representing the origin of a call. + /// + #[doc = #doc_string] + #[derive(Clone)] + pub struct RuntimeOrigin { + pub caller: OriginCaller, + filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, + } + + #[cfg(not(feature = "std"))] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.write_str("") + } + } + + #[cfg(feature = "std")] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.debug_struct("Origin") + .field("caller", &self.caller) + .field("filter", &"[function ptr]") + .finish() + } + } + + impl #scrate::traits::OriginTrait for RuntimeOrigin { + type Call = <#runtime as #system_path::Config>::RuntimeCall; + type PalletsOrigin = OriginCaller; + type AccountId = <#runtime as #system_path::Config>::AccountId; + + fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { + let f = self.filter.clone(); + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { + f(call) && filter(call) + })); + } + + fn reset_filter(&mut self) { + let filter = < + <#runtime as #system_path::Config>::BaseCallFilter + as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> + >::contains; + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); + } + + fn set_caller_from(&mut self, other: impl Into) { + self.caller = other.into().caller; + } + + fn filter_call(&self, call: &Self::Call) -> bool { + match self.caller { + // Root bypasses all filters + OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, + _ => (self.filter)(call), + } + } + + fn caller(&self) -> &Self::PalletsOrigin { + &self.caller + } + + fn into_caller(self) -> Self::PalletsOrigin { + self.caller + } + + fn try_with_caller( + mut self, + f: impl FnOnce(Self::PalletsOrigin) -> Result, + ) -> Result { + match f(self.caller) { + Ok(r) => Ok(r), + Err(caller) => { self.caller = caller; Err(self) } + } + } + + fn none() -> Self { + #system_path::RawOrigin::None.into() + } + + fn root() -> Self { + #system_path::RawOrigin::Root.into() + } + + fn signed(by: Self::AccountId) -> Self { + #system_path::RawOrigin::Signed(by).into() + } + } + + #[derive( + Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, + )] + #[allow(non_camel_case_types)] + pub enum OriginCaller { + #[codec(index = #system_index)] + system(#system_path::Origin<#runtime>), + #caller_variants + #[allow(dead_code)] + Void(#scrate::__private::Void) + } + + // For backwards compatibility and ease of accessing these functions. + #[allow(dead_code)] + impl RuntimeOrigin { + #[doc = #doc_string_none_origin] + pub fn none() -> Self { + ::none() + } + + #[doc = #doc_string_root_origin] + pub fn root() -> Self { + ::root() + } + + #[doc = #doc_string_signed_origin] + pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { + ::signed(by) + } + } + + impl From<#system_path::Origin<#runtime>> for OriginCaller { + fn from(x: #system_path::Origin<#runtime>) -> Self { + OriginCaller::system(x) + } + } + + impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { + fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match self { + OriginCaller::system(x) => Some(x), + _ => None, + } + } + fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match &self { + OriginCaller::system(o) => Some(o), + _ => None, + } + } + } + + impl TryFrom for #system_path::Origin<#runtime> { + type Error = OriginCaller; + fn try_from(x: OriginCaller) + -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> + { + if let OriginCaller::system(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { + + #[doc = #doc_string_runtime_origin] + fn from(x: #system_path::Origin<#runtime>) -> Self { + let o: OriginCaller = x.into(); + o.into() + } + } + + impl From for RuntimeOrigin { + fn from(x: OriginCaller) -> Self { + let mut o = RuntimeOrigin { + caller: x, + filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), + }; + + #scrate::traits::OriginTrait::reset_filter(&mut o); + + o + } + } + + impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::system(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + impl From::AccountId>> for RuntimeOrigin { + #[doc = #doc_string_runtime_origin_with_caller] + fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { + <#system_path::Origin<#runtime>>::from(x).into() + } + } + + #pallet_conversions + }) } fn expand_origin_caller_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let part_is_generic = !generics.params.is_empty(); - let variant_name = &pallet.name; - let path = &pallet.path; - let attr = pallet - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin), - }, - } + let part_is_generic = !generics.params.is_empty(); + let variant_name = &pallet.name; + let path = &pallet.path; + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin), + }, + } } fn expand_origin_pallet_conversions( - scrate: &TokenStream, - runtime: &Ident, - pallet: &Pallet, - instance: Option<&Ident>, - generics: &Generics, + scrate: &TokenStream, + runtime: &Ident, + pallet: &Pallet, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - - let part_is_generic = !generics.params.is_empty(); - let pallet_origin = match instance { - Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), - Some(inst) => quote!(#path::Origin<#path::#inst>), - None if part_is_generic => quote!(#path::Origin<#runtime>), - None => quote!(#path::Origin), - }; - - let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); - let attr = pallet - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - quote! { - #attr - impl From<#pallet_origin> for OriginCaller { - fn from(x: #pallet_origin) -> Self { - OriginCaller::#variant_name(x) - } - } - - #attr - impl From<#pallet_origin> for RuntimeOrigin { - #[doc = #doc_string] - fn from(x: #pallet_origin) -> Self { - let x: OriginCaller = x.into(); - x.into() - } - } - - #attr - impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::#variant_name(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - - #attr - impl TryFrom for #pallet_origin { - type Error = OriginCaller; - fn try_from( - x: OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { - if let OriginCaller::#variant_name(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - #attr - impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = x { - Ok(&l) - } else { - Err(()) - } - } - } - - #attr - impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a RuntimeOrigin, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = &x.caller { - Ok(&l) - } else { - Err(()) - } - } - } - } + let path = &pallet.path; + let variant_name = &pallet.name; + + let part_is_generic = !generics.params.is_empty(); + let pallet_origin = match instance { + Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), + Some(inst) => quote!(#path::Origin<#path::#inst>), + None if part_is_generic => quote!(#path::Origin<#runtime>), + None => quote!(#path::Origin), + }; + + let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + impl From<#pallet_origin> for OriginCaller { + fn from(x: #pallet_origin) -> Self { + OriginCaller::#variant_name(x) + } + } + + #attr + impl From<#pallet_origin> for RuntimeOrigin { + #[doc = #doc_string] + fn from(x: #pallet_origin) -> Self { + let x: OriginCaller = x.into(); + x.into() + } + } + + #attr + impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::#variant_name(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + + #attr + impl TryFrom for #pallet_origin { + type Error = OriginCaller; + fn try_from( + x: OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { + if let OriginCaller::#variant_name(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + #attr + impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = x { + Ok(&l) + } else { + Err(()) + } + } + } + + #attr + impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a RuntimeOrigin, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = &x.caller { + Ok(&l) + } else { + Err(()) + } + } + } + } } // Get the actual documentation using the doc information and system path name fn get_intra_doc_string(doc_info: &str, system_path_name: &String) -> String { - format!( - " {} [`{}::Config::BaseCallFilter`].", - doc_info, system_path_name - ) + format!(" {} [`{}::Config::BaseCallFilter`].", doc_info, system_path_name) } diff --git a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs index 28e39c7a2..80b242ccb 100644 --- a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs +++ b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs @@ -24,37 +24,37 @@ use syn::{Generics, Ident}; /// Represents the types supported for creating an outer enum. #[derive(Clone, Copy, PartialEq)] pub enum OuterEnumType { - /// Collects the Event enums from all pallets. - Event, - /// Collects the Error enums from all pallets. - Error, + /// Collects the Event enums from all pallets. + Event, + /// Collects the Error enums from all pallets. + Error, } impl OuterEnumType { - /// The name of the structure this enum represents. - fn struct_name(&self) -> &str { - match self { - OuterEnumType::Event => "RuntimeEvent", - OuterEnumType::Error => "RuntimeError", - } - } + /// The name of the structure this enum represents. + fn struct_name(&self) -> &str { + match self { + OuterEnumType::Event => "RuntimeEvent", + OuterEnumType::Error => "RuntimeError", + } + } - /// The name of the variant (ie `Event` or `Error`). - fn variant_name(&self) -> &str { - match self { - OuterEnumType::Event => "Event", - OuterEnumType::Error => "Error", - } - } + /// The name of the variant (ie `Event` or `Error`). + fn variant_name(&self) -> &str { + match self { + OuterEnumType::Event => "Event", + OuterEnumType::Error => "Error", + } + } } impl ToTokens for OuterEnumType { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - OuterEnumType::Event => quote!(Event).to_tokens(tokens), - OuterEnumType::Error => quote!(Error).to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + OuterEnumType::Event => quote!(Event).to_tokens(tokens), + OuterEnumType::Error => quote!(Error).to_tokens(tokens), + } + } } /// Create an outer enum that encapsulates all pallets as variants. @@ -84,207 +84,196 @@ impl ToTokens for OuterEnumType { /// /// Notice that the pallet index is preserved using the `#[codec(index = ..)]` attribute. pub fn expand_outer_enum( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, + enum_ty: OuterEnumType, ) -> syn::Result { - // Stores all pallet variants. - let mut enum_variants = TokenStream::new(); - // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. - let mut enum_conversions = TokenStream::new(); - // Specific for events to query via `is_event_part_defined!`. - let mut query_enum_part_macros = Vec::new(); + // Stores all pallet variants. + let mut enum_variants = TokenStream::new(); + // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. + let mut enum_conversions = TokenStream::new(); + // Specific for events to query via `is_event_part_defined!`. + let mut query_enum_part_macros = Vec::new(); - let enum_name_str = enum_ty.variant_name(); - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_str = enum_ty.variant_name(); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - for pallet_decl in pallet_decls { - let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { - continue; - }; + for pallet_decl in pallet_decls { + let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { continue }; - let path = &pallet_decl.path; - let pallet_name = &pallet_decl.name; - let index = pallet_decl.index; - let instance = pallet_decl.instance.as_ref(); - let generics = &pallet_entry.generics; + let path = &pallet_decl.path; + let pallet_name = &pallet_decl.name; + let index = pallet_decl.index; + let instance = pallet_decl.instance.as_ref(); + let generics = &pallet_entry.generics; - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `{}` cannot \ + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `{}` cannot \ be constructed: pallet `{}` must have generic `{}`", - enum_name_str, pallet_name, enum_name_str, - ); - return Err(syn::Error::new(pallet_name.span(), msg)); - } + enum_name_str, pallet_name, enum_name_str, + ); + return Err(syn::Error::new(pallet_name.span(), msg)) + } - let part_is_generic = !generics.params.is_empty(); - let pallet_enum = match (instance, part_is_generic) { - (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), - (None, true) => quote!(#path::#enum_ty::<#runtime>), - (None, false) => quote!(#path::#enum_ty), - }; + let part_is_generic = !generics.params.is_empty(); + let pallet_enum = match (instance, part_is_generic) { + (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), + (None, true) => quote!(#path::#enum_ty::<#runtime>), + (None, false) => quote!(#path::#enum_ty), + }; - enum_variants.extend(expand_enum_variant( - runtime, - pallet_decl, - index, - instance, - generics, - enum_ty, - )); - enum_conversions.extend(expand_enum_conversion( - pallet_decl, - &pallet_enum, - &enum_name_ident, - )); + enum_variants.extend(expand_enum_variant( + runtime, + pallet_decl, + index, + instance, + generics, + enum_ty, + )); + enum_conversions.extend(expand_enum_conversion( + pallet_decl, + &pallet_enum, + &enum_name_ident, + )); - if enum_ty == OuterEnumType::Event { - query_enum_part_macros.push(quote! { - #path::__substrate_event_check::is_event_part_defined!(#pallet_name); - }); - } - } + if enum_ty == OuterEnumType::Event { + query_enum_part_macros.push(quote! { + #path::__substrate_event_check::is_event_part_defined!(#pallet_name); + }); + } + } - // Derives specific for the event. - let event_custom_derives = if enum_ty == OuterEnumType::Event { - quote!(Clone, PartialEq, Eq,) - } else { - quote!() - }; + // Derives specific for the event. + let event_custom_derives = + if enum_ty == OuterEnumType::Event { quote!(Clone, PartialEq, Eq,) } else { quote!() }; - // Implementation specific for errors. - let error_custom_impl = generate_error_impl(scrate, enum_ty); + // Implementation specific for errors. + let error_custom_impl = generate_error_impl(scrate, enum_ty); - Ok(quote! { - #( #query_enum_part_macros )* + Ok(quote! { + #( #query_enum_part_macros )* - #[derive( - #event_custom_derives - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - #[allow(non_camel_case_types)] - pub enum #enum_name_ident { - #enum_variants - } + #[derive( + #event_custom_derives + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + #[allow(non_camel_case_types)] + pub enum #enum_name_ident { + #enum_variants + } - #enum_conversions + #enum_conversions - #error_custom_impl - }) + #error_custom_impl + }) } fn expand_enum_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, + enum_ty: OuterEnumType, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - let part_is_generic = !generics.params.is_empty(); - let attr = pallet - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let path = &pallet.path; + let variant_name = &pallet.name; + let part_is_generic = !generics.params.is_empty(); + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty), - }, - } + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty), + }, + } } fn expand_enum_conversion( - pallet: &Pallet, - pallet_enum: &TokenStream, - enum_name_ident: &Ident, + pallet: &Pallet, + pallet_enum: &TokenStream, + enum_name_ident: &Ident, ) -> TokenStream { - let variant_name = &pallet.name; - let attr = pallet - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let variant_name = &pallet.name; + let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - impl From<#pallet_enum> for #enum_name_ident { - fn from(x: #pallet_enum) -> Self { - #enum_name_ident - ::#variant_name(x) - } - } - #attr - impl TryInto<#pallet_enum> for #enum_name_ident { - type Error = (); + quote! { + #attr + impl From<#pallet_enum> for #enum_name_ident { + fn from(x: #pallet_enum) -> Self { + #enum_name_ident + ::#variant_name(x) + } + } + #attr + impl TryInto<#pallet_enum> for #enum_name_ident { + type Error = (); - fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { - match self { - Self::#variant_name(evt) => Ok(evt), - _ => Err(()), - } - } - } - } + fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { + match self { + Self::#variant_name(evt) => Ok(evt), + _ => Err(()), + } + } + } + } } fn generate_error_impl(scrate: &TokenStream, enum_ty: OuterEnumType) -> TokenStream { - // Implementation is specific to `Error`s. - if enum_ty == OuterEnumType::Event { - return quote! {}; - } + // Implementation is specific to `Error`s. + if enum_ty == OuterEnumType::Event { + return quote! {} + } - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - quote! { - impl #enum_name_ident { - /// Optionally convert the `DispatchError` into the `RuntimeError`. - /// - /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. - pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { - let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; + quote! { + impl #enum_name_ident { + /// Optionally convert the `DispatchError` into the `RuntimeError`. + /// + /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. + pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { + let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; - let bytes = #scrate::__private::codec::Encode::encode(&module_error); - #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() - } - } - } + let bytes = #scrate::__private::codec::Encode::encode(&module_error); + #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs index d9e9e9320..892b842b1 100644 --- a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_slash_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut slash_reason_variants = Vec::new(); - for decl in pallet_decls { - if decl.find_part("SlashReason").is_some() { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut slash_reason_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("SlashReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "SlashReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "SlashReason", + path, + instance, + variant_name, + )); - slash_reason_variants.push(composite_helper::expand_variant( - "SlashReason", - index, - path, - instance, - variant_name, - )); - } - } + slash_reason_variants.push(composite_helper::expand_variant( + "SlashReason", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// A reason for slashing funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeSlashReason { - #( #slash_reason_variants )* - } + quote! { + /// A reason for slashing funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeSlashReason { + #( #slash_reason_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/task.rs b/support/procedural-fork/src/construct_runtime/expand/task.rs index dd8d93c27..6531c0e9e 100644 --- a/support/procedural-fork/src/construct_runtime/expand/task.rs +++ b/support/procedural-fork/src/construct_runtime/expand/task.rs @@ -21,111 +21,111 @@ use quote::quote; /// Expands aggregate `RuntimeTask` enum. pub fn expand_outer_task( - runtime_name: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime_name: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let mut from_impls = Vec::new(); - let mut task_variants = Vec::new(); - let mut variant_names = Vec::new(); - let mut task_paths = Vec::new(); - for decl in pallet_decls { - if decl.find_part("Task").is_none() { - continue; - } - - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - - from_impls.push(quote! { - impl From<#path::Task<#runtime_name>> for RuntimeTask { - fn from(hr: #path::Task<#runtime_name>) -> Self { - RuntimeTask::#variant_name(hr) - } - } - - impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { - type Error = (); - - fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { - match self { - RuntimeTask::#variant_name(hr) => Ok(hr), - _ => Err(()), - } - } - } - }); - - task_variants.push(quote! { - #[codec(index = #index)] - #variant_name(#path::Task<#runtime_name>), - }); - - variant_names.push(quote!(#variant_name)); - - task_paths.push(quote!(#path::Task)); - } - - let prelude = quote!(#scrate::traits::tasks::__private); - - const INCOMPLETE_MATCH_QED: &str = - "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; - - let output = quote! { - /// An aggregation of all `Task` enums across all pallets included in the current runtime. - #[derive( - Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeTask { - #( #task_variants )* - } - - #[automatically_derived] - impl #scrate::traits::Task for RuntimeTask { - type Enumeration = #prelude::IntoIter; - - fn is_valid(&self) -> bool { - match self { - #(RuntimeTask::#variant_names(val) => val.is_valid(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { - match self { - #(RuntimeTask::#variant_names(val) => val.run(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn weight(&self) -> #scrate::pallet_prelude::Weight { - match self { - #(RuntimeTask::#variant_names(val) => val.weight(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn task_index(&self) -> u32 { - match self { - #(RuntimeTask::#variant_names(val) => val.task_index(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn iter() -> Self::Enumeration { - let mut all_tasks = Vec::new(); - #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* - all_tasks.into_iter() - } - } - - #( #from_impls )* - }; - - output + let mut from_impls = Vec::new(); + let mut task_variants = Vec::new(); + let mut variant_names = Vec::new(); + let mut task_paths = Vec::new(); + for decl in pallet_decls { + if decl.find_part("Task").is_none() { + continue + } + + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + + from_impls.push(quote! { + impl From<#path::Task<#runtime_name>> for RuntimeTask { + fn from(hr: #path::Task<#runtime_name>) -> Self { + RuntimeTask::#variant_name(hr) + } + } + + impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { + type Error = (); + + fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { + match self { + RuntimeTask::#variant_name(hr) => Ok(hr), + _ => Err(()), + } + } + } + }); + + task_variants.push(quote! { + #[codec(index = #index)] + #variant_name(#path::Task<#runtime_name>), + }); + + variant_names.push(quote!(#variant_name)); + + task_paths.push(quote!(#path::Task)); + } + + let prelude = quote!(#scrate::traits::tasks::__private); + + const INCOMPLETE_MATCH_QED: &'static str = + "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; + + let output = quote! { + /// An aggregation of all `Task` enums across all pallets included in the current runtime. + #[derive( + Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeTask { + #( #task_variants )* + } + + #[automatically_derived] + impl #scrate::traits::Task for RuntimeTask { + type Enumeration = #prelude::IntoIter; + + fn is_valid(&self) -> bool { + match self { + #(RuntimeTask::#variant_names(val) => val.is_valid(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { + match self { + #(RuntimeTask::#variant_names(val) => val.run(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self { + #(RuntimeTask::#variant_names(val) => val.weight(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn task_index(&self) -> u32 { + match self { + #(RuntimeTask::#variant_names(val) => val.task_index(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn iter() -> Self::Enumeration { + let mut all_tasks = Vec::new(); + #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* + all_tasks.into_iter() + } + } + + #( #from_impls )* + }; + + output } diff --git a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs index 109f7081c..33aadba0d 100644 --- a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs +++ b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs @@ -22,71 +22,68 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_validate_unsigned( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_validate_unsigned_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_validate_unsigned_part_macros = Vec::new(); - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("ValidateUnsigned") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl - .cfg_pattern - .iter() - .fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("ValidateUnsigned") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - pallet_names.push(name); - pallet_attrs.push(attr); - query_validate_unsigned_part_macros.push(quote! { + pallet_names.push(name); + pallet_attrs.push(attr); + query_validate_unsigned_part_macros.push(quote! { #path::__substrate_validate_unsigned_check::is_validate_unsigned_part_defined!(#name); }); - } - } + } + } - quote! { - #( #query_validate_unsigned_part_macros )* + quote! { + #( #query_validate_unsigned_part_macros )* - impl #scrate::unsigned::ValidateUnsigned for #runtime { - type Call = RuntimeCall; + impl #scrate::unsigned::ValidateUnsigned for #runtime { + type Call = RuntimeCall; - fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), - )* - // pre-dispatch should not stop inherent extrinsics, validation should prevent - // including arbitrary (non-inherent) extrinsics to blocks. - _ => Ok(()), - } - } + fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), + )* + // pre-dispatch should not stop inherent extrinsics, validation should prevent + // including arbitrary (non-inherent) extrinsics to blocks. + _ => Ok(()), + } + } - fn validate_unsigned( - #[allow(unused_variables)] - source: #scrate::unsigned::TransactionSource, - call: &Self::Call, - ) -> #scrate::unsigned::TransactionValidity { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), - )* - _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), - } - } - } - } + fn validate_unsigned( + #[allow(unused_variables)] + source: #scrate::unsigned::TransactionSource, + call: &Self::Call, + ) -> #scrate::unsigned::TransactionValidity { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), + )* + _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), + } + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index f01ebe0dd..b083abbb2 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -214,7 +214,7 @@ pub(crate) mod parse; use crate::pallet::parse::helper::two128_str; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use itertools::Itertools; use parse::{ExplicitRuntimeDeclaration, ImplicitRuntimeDeclaration, Pallet, RuntimeDeclaration}; @@ -230,48 +230,48 @@ const SYSTEM_PALLET_NAME: &str = "System"; /// Implementation of `construct_runtime` macro. Either expand to some code which will call /// `construct_runtime` again, or expand to the final runtime definition. pub fn construct_runtime(input: TokenStream) -> TokenStream { - let input_copy = input.clone(); - let definition = syn::parse_macro_input!(input as RuntimeDeclaration); - - let (check_pallet_number_res, res) = match definition { - RuntimeDeclaration::Implicit(implicit_def) => ( - check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), - construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), - ), - RuntimeDeclaration::Explicit(explicit_decl) => ( - check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), - construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), - ), - RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( - check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), - construct_runtime_final_expansion(explicit_decl), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - let res = expander::Expander::new("construct_runtime") - .dry(std::env::var("EXPAND_MACROS").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + let input_copy = input.clone(); + let definition = syn::parse_macro_input!(input as RuntimeDeclaration); + + let (check_pallet_number_res, res) = match definition { + RuntimeDeclaration::Implicit(implicit_def) => ( + check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), + construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), + ), + RuntimeDeclaration::Explicit(explicit_decl) => ( + check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), + construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), + ), + RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( + check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), + construct_runtime_final_expansion(explicit_decl), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("EXPAND_MACROS").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } /// All pallets that have implicit pallet parts (ie `System: frame_system`) are @@ -282,37 +282,30 @@ pub fn construct_runtime(input: TokenStream) -> TokenStream { /// /// For more details, please refer to the root documentation. fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitRuntimeDeclaration, + input: TokenStream2, + definition: ImplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition - .pallets - .iter() - .filter(|pallet| pallet.pallet_parts.is_none()) - { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet - .instance - .as_ref() - .map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition.pallets.iter().filter(|pallet| pallet.pallet_parts.is_none()) { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets that have @@ -325,279 +318,264 @@ fn construct_runtime_implicit_to_explicit( /// /// For more details, please refer to the root documentation. fn construct_runtime_explicit_to_explicit_expanded( - input: TokenStream2, - definition: ExplicitRuntimeDeclaration, + input: TokenStream2, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition - .pallets - .iter() - .filter(|pallet| !pallet.is_expanded) - { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet - .instance - .as_ref() - .map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_extra_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition.pallets.iter().filter(|pallet| !pallet.is_expanded) { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_extra_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets have explicit definition of parts, this will expand to the runtime declaration. fn construct_runtime_final_expansion( - definition: ExplicitRuntimeDeclaration, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let ExplicitRuntimeDeclaration { - name, - pallets, - pallets_token, - where_section, - } = definition; - - let system_pallet = pallets - .iter() - .find(|decl| decl.name == SYSTEM_PALLET_NAME) - .ok_or_else(|| { - syn::Error::new( - pallets_token.span.join(), - "`System` pallet declaration is missing. \ + let ExplicitRuntimeDeclaration { name, pallets, pallets_token, where_section } = definition; + + let system_pallet = + pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { + syn::Error::new( + pallets_token.span.join(), + "`System` pallet declaration is missing. \ Please add this line: `System: frame_system,`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )); - } - - let features = pallets - .iter() - .filter(|&decl| (!decl.cfg_pattern.is_empty())) - .flat_map(|decl| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let outer_event = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; - let outer_error = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; - - let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); - let tasks = expand::expand_outer_task(&name, &pallets, &scrate); - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); - let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); - let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); - let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let warning = where_section.map(|where_section| { - proc_macro_warning::Warning::new_deprecated("WhereSection") - .old("use a `where` clause in `construct_runtime`") - .new( - "use `frame_system::Config` to set the `Block` type and delete this clause. + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )) + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let outer_event = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; + let outer_error = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; + + let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); + let tasks = expand::expand_outer_task(&name, &pallets, &scrate); + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); + let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); + let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); + let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let warning = where_section.map_or(None, |where_section| { + Some( + proc_macro_warning::Warning::new_deprecated("WhereSection") + .old("use a `where` clause in `construct_runtime`") + .new( + "use `frame_system::Config` to set the `Block` type and delete this clause. It is planned to be removed in December 2023", - ) - .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) - .span(where_section.span) - .build_or_panic() - }); - - let res = quote!( - #warning - - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} + ) + .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) + .span(where_section.span) + .build_or_panic(), + ) + }); + + let res = quote!( + #warning + + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} - #outer_event + #outer_event - #outer_error + #outer_error - #outer_origin + #outer_origin - #all_pallets + #all_pallets - #pallet_to_index + #pallet_to_index - #dispatch + #dispatch - #tasks + #tasks - #metadata + #metadata - #outer_config + #outer_config - #inherent + #inherent - #validate_unsigned + #validate_unsigned - #freeze_reason + #freeze_reason - #hold_reason + #hold_reason - #lock_id + #lock_id - #slash_reason + #slash_reason - #integrity_test + #integrity_test - #static_assertions - ); + #static_assertions + ); - Ok(res) + Ok(res) } pub(crate) fn decl_all_pallets<'a>( - runtime: &'a Ident, - pallet_declarations: impl Iterator, - features: &HashSet<&str>, + runtime: &'a Ident, + pallet_declarations: impl Iterator, + features: &HashSet<&str>, ) -> TokenStream2 { - let mut types = TokenStream2::new(); - - // Every feature set to the pallet names that should be included by this feature set. - let mut features_to_names = features - .iter() - .copied() - .powerset() - .map(|feat| (HashSet::from_iter(feat), Vec::new())) - .collect::, Vec<_>)>>(); - - for pallet_declaration in pallet_declarations { - let type_name = &pallet_declaration.name; - let pallet = &pallet_declaration.path; - let mut generics = vec![quote!(#runtime)]; - generics.extend( - pallet_declaration - .instance - .iter() - .map(|name| quote!(#pallet::#name)), - ); - let mut attrs = Vec::new(); - for cfg in &pallet_declaration.cfg_pattern { - let feat = format!("#[cfg({})]\n", cfg.original()); - attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); - } - let type_decl = quote!( - #(#attrs)* - pub type #type_name = #pallet::Pallet <#(#generics),*>; - ); - types.extend(type_decl); - - if pallet_declaration.cfg_pattern.is_empty() { - for (_, names) in features_to_names.iter_mut() { - names.push(&pallet_declaration.name); - } - } else { - for (feature_set, names) in &mut features_to_names { - // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the - // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, - // meaning that all of them must match in order to activate the item - let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { - expr.eval(|pred| match pred { - Predicate::Feature(f) => feature_set.contains(f), - Predicate::Test => feature_set.contains(&"test"), - _ => false, - }) - }); - - if is_feature_active { - names.push(&pallet_declaration.name); - } - } - } - } - - // All possible features. This will be used below for the empty feature set. - let mut all_features = features_to_names - .iter() - .flat_map(|f| f.0.iter().cloned()) - .collect::>(); - let attribute_to_names = features_to_names + let mut types = TokenStream2::new(); + + // Every feature set to the pallet names that should be included by this feature set. + let mut features_to_names = features + .iter() + .map(|f| *f) + .powerset() + .map(|feat| (HashSet::from_iter(feat), Vec::new())) + .collect::, Vec<_>)>>(); + + for pallet_declaration in pallet_declarations { + let type_name = &pallet_declaration.name; + let pallet = &pallet_declaration.path; + let mut generics = vec![quote!(#runtime)]; + generics.extend(pallet_declaration.instance.iter().map(|name| quote!(#pallet::#name))); + let mut attrs = Vec::new(); + for cfg in &pallet_declaration.cfg_pattern { + let feat = format!("#[cfg({})]\n", cfg.original()); + attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); + } + let type_decl = quote!( + #(#attrs)* + pub type #type_name = #pallet::Pallet <#(#generics),*>; + ); + types.extend(type_decl); + + if pallet_declaration.cfg_pattern.is_empty() { + for (_, names) in features_to_names.iter_mut() { + names.push(&pallet_declaration.name); + } + } else { + for (feature_set, names) in &mut features_to_names { + // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the + // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, + // meaning that all of them must match in order to activate the item + let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { + expr.eval(|pred| match pred { + Predicate::Feature(f) => feature_set.contains(f), + Predicate::Test => feature_set.contains(&"test"), + _ => false, + }) + }); + + if is_feature_active { + names.push(&pallet_declaration.name); + } + } + } + } + + // All possible features. This will be used below for the empty feature set. + let mut all_features = features_to_names + .iter() + .flat_map(|f| f.0.iter().cloned()) + .collect::>(); + let attribute_to_names = features_to_names .into_iter() .map(|(mut features, names)| { // If this is the empty feature set, it needs to be changed to negate all available @@ -620,222 +598,212 @@ pub(crate) fn decl_all_pallets<'a>( }) .collect::>(); - let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { - let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - /// Excludes the System pallet. - pub type AllPalletsWithoutSystem = ( #(#names,)* ); - } - }); - - let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - pub type AllPalletsWithSystem = ( #(#names,)* ); - } - }); - - quote!( - #types - - #( #all_pallets_with_system )* - - #( #all_pallets_without_system )* - ) + let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { + let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + /// Excludes the System pallet. + pub type AllPalletsWithoutSystem = ( #(#names,)* ); + } + }); + + let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + pub type AllPalletsWithSystem = ( #(#names,)* ); + } + }); + + quote!( + #types + + #( #all_pallets_with_system )* + + #( #all_pallets_without_system )* + ) } pub(crate) fn decl_pallet_runtime_setup( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let names = pallet_declarations - .iter() - .map(|d| &d.name) - .collect::>(); - let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); - let name_hashes = pallet_declarations - .iter() - .map(|d| two128_str(&d.name.to_string())); - let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); - let indices = pallet_declarations - .iter() - .map(|pallet| pallet.index as usize); - let pallet_structs = pallet_declarations - .iter() - .map(|pallet| { - let path = &pallet.path; - match pallet.instance.as_ref() { - Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), - None => quote!(#path::Pallet<#runtime>), - } - }) - .collect::>(); - let pallet_attrs = pallet_declarations - .iter() - .map(|pallet| { - pallet - .cfg_pattern - .iter() - .fold(TokenStream2::new(), |acc, pattern| { - let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }) - }) - .collect::>(); - - quote!( - /// Provides an implementation of `PalletInfo` to provide information - /// about the pallet setup in the runtime. - pub struct PalletInfo; - - impl #scrate::traits::PalletInfo for PalletInfo { - - fn index() -> Option { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#indices) - } - )* - - None - } - - fn name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_strings) - } - )* - - None - } - - fn name_hash() -> Option<[u8; 16]> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_hashes) - } - )* - - None - } - - fn module_name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#module_names) - } - )* - - None - } - - fn crate_version() -> Option<#scrate::traits::CrateVersion> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some( - <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() - ) - } - )* - - None - } - } - ) + let names = pallet_declarations.iter().map(|d| &d.name).collect::>(); + let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); + let name_hashes = pallet_declarations.iter().map(|d| two128_str(&d.name.to_string())); + let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); + let indices = pallet_declarations.iter().map(|pallet| pallet.index as usize); + let pallet_structs = pallet_declarations + .iter() + .map(|pallet| { + let path = &pallet.path; + match pallet.instance.as_ref() { + Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), + None => quote!(#path::Pallet<#runtime>), + } + }) + .collect::>(); + let pallet_attrs = pallet_declarations + .iter() + .map(|pallet| { + pallet.cfg_pattern.iter().fold(TokenStream2::new(), |acc, pattern| { + let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }) + }) + .collect::>(); + + quote!( + /// Provides an implementation of `PalletInfo` to provide information + /// about the pallet setup in the runtime. + pub struct PalletInfo; + + impl #scrate::traits::PalletInfo for PalletInfo { + + fn index() -> Option { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#indices) + } + )* + + None + } + + fn name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_strings) + } + )* + + None + } + + fn name_hash() -> Option<[u8; 16]> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_hashes) + } + )* + + None + } + + fn module_name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#module_names) + } + )* + + None + } + + fn crate_version() -> Option<#scrate::traits::CrateVersion> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some( + <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() + ) + } + )* + + None + } + } + ) } pub(crate) fn decl_integrity_test(scrate: &TokenStream2) -> TokenStream2 { - quote!( - #[cfg(test)] - mod __construct_runtime_integrity_test { - use super::*; - - #[test] - pub fn runtime_integrity_tests() { - #scrate::__private::sp_tracing::try_init_simple(); - ::integrity_test(); - } - } - ) + quote!( + #[cfg(test)] + mod __construct_runtime_integrity_test { + use super::*; + + #[test] + pub fn runtime_integrity_tests() { + #scrate::__private::sp_tracing::try_init_simple(); + ::integrity_test(); + } + } + ) } pub(crate) fn decl_static_assertions( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let error_encoded_size_check = pallet_decls.iter().map(|decl| { - let path = &decl.path; - let assert_message = format!( - "The maximum encoded size of the error type in the `{}` pallet exceeds \ + let error_encoded_size_check = pallet_decls.iter().map(|decl| { + let path = &decl.path; + let assert_message = format!( + "The maximum encoded size of the error type in the `{}` pallet exceeds \ `MAX_MODULE_ERROR_ENCODED_SIZE`", - decl.name, - ); - - quote! { - #scrate::__private::tt_call! { - macro = [{ #path::tt_error_token }] - your_tt_return = [{ #scrate::__private::tt_return }] - ~~> #scrate::assert_error_encoded_size! { - path = [{ #path }] - runtime = [{ #runtime }] - assert_message = [{ #assert_message }] - } - } - } - }); - - quote! { - #(#error_encoded_size_check)* - } + decl.name, + ); + + quote! { + #scrate::__private::tt_call! { + macro = [{ #path::tt_error_token }] + your_tt_return = [{ #scrate::__private::tt_return }] + ~~> #scrate::assert_error_encoded_size! { + path = [{ #path }] + runtime = [{ #runtime }] + assert_message = [{ #assert_message }] + } + } + } + }); + + quote! { + #(#error_encoded_size_check)* + } } pub(crate) fn check_pallet_number(input: TokenStream2, pallet_num: usize) -> Result<()> { - let max_pallet_num = { - if cfg!(feature = "tuples-96") { - 96 - } else if cfg!(feature = "tuples-128") { - 128 - } else { - 64 - } - }; - - if pallet_num > max_pallet_num { - let no_feature = max_pallet_num == 128; - return Err(syn::Error::new( - input.span(), - format!( - "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", - "The number of pallets exceeds the maximum number of tuple elements.", - max_pallet_num + 32, - if no_feature { - "If the feature does not exist - it needs to be implemented." - } else { - "" - }, - ), - )); - } - - Ok(()) + let max_pallet_num = { + if cfg!(feature = "tuples-96") { + 96 + } else if cfg!(feature = "tuples-128") { + 128 + } else { + 64 + } + }; + + if pallet_num > max_pallet_num { + let no_feature = max_pallet_num == 128; + return Err(syn::Error::new( + input.span(), + format!( + "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", + "The number of pallets exceeds the maximum number of tuple elements.", + max_pallet_num + 32, + if no_feature { + "If the feature does not exist - it needs to be implemented." + } else { + "" + }, + ), + )) + } + + Ok(()) } diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs index 26fbb4dee..31866c787 100644 --- a/support/procedural-fork/src/construct_runtime/parse.rs +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -20,34 +20,34 @@ use proc_macro2::{Span, TokenStream}; use quote::ToTokens; use std::collections::{HashMap, HashSet}; use syn::{ - ext::IdentExt, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, Attribute, Error, Ident, Path, Result, Token, + ext::IdentExt, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, Attribute, Error, Ident, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(Block); - syn::custom_keyword!(NodeBlock); - syn::custom_keyword!(UncheckedExtrinsic); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(Call); - syn::custom_keyword!(Storage); - syn::custom_keyword!(Event); - syn::custom_keyword!(Error); - syn::custom_keyword!(Config); - syn::custom_keyword!(Origin); - syn::custom_keyword!(Inherent); - syn::custom_keyword!(ValidateUnsigned); - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(Task); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(exclude_parts); - syn::custom_keyword!(use_parts); - syn::custom_keyword!(expanded); + syn::custom_keyword!(Block); + syn::custom_keyword!(NodeBlock); + syn::custom_keyword!(UncheckedExtrinsic); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(Call); + syn::custom_keyword!(Storage); + syn::custom_keyword!(Event); + syn::custom_keyword!(Error); + syn::custom_keyword!(Config); + syn::custom_keyword!(Origin); + syn::custom_keyword!(Inherent); + syn::custom_keyword!(ValidateUnsigned); + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(Task); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(exclude_parts); + syn::custom_keyword!(use_parts); + syn::custom_keyword!(expanded); } /// Declaration of a runtime. @@ -57,298 +57,266 @@ mod keyword { /// implicit. #[derive(Debug)] pub enum RuntimeDeclaration { - Implicit(ImplicitRuntimeDeclaration), - Explicit(ExplicitRuntimeDeclaration), - ExplicitExpanded(ExplicitRuntimeDeclaration), + Implicit(ImplicitRuntimeDeclaration), + Explicit(ExplicitRuntimeDeclaration), + ExplicitExpanded(ExplicitRuntimeDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug)] pub struct ImplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug)] pub struct ExplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, - pub pallets_token: token::Brace, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, + pub pallets_token: token::Brace, } impl Parse for RuntimeDeclaration { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - // Support either `enum` or `struct`. - if input.peek(Token![struct]) { - input.parse::()?; - } else { - input.parse::()?; - } - - let name = input.parse::()?; - let where_section = if input.peek(token::Where) { - Some(input.parse()?) - } else { - None - }; - let pallets = - input.parse::>>()?; - let pallets_token = pallets.token; - - match convert_pallets(pallets.content.inner.into_iter().collect())? { - PalletsConversion::Implicit(pallets) => { - Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { - name, - where_section, - pallets, - })) - } - PalletsConversion::Explicit(pallets) => { - Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - })) - } - PalletsConversion::ExplicitExpanded(pallets) => Ok( - RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - }), - ), - } - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + // Support either `enum` or `struct`. + if input.peek(Token![struct]) { + input.parse::()?; + } else { + input.parse::()?; + } + + let name = input.parse::()?; + let where_section = if input.peek(token::Where) { Some(input.parse()?) } else { None }; + let pallets = + input.parse::>>()?; + let pallets_token = pallets.token; + + match convert_pallets(pallets.content.inner.into_iter().collect())? { + PalletsConversion::Implicit(pallets) => + Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { + name, + where_section, + pallets, + })), + PalletsConversion::Explicit(pallets) => + Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })), + PalletsConversion::ExplicitExpanded(pallets) => + Ok(RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })), + } + } } #[derive(Debug)] pub struct WhereSection { - pub span: Span, - pub block: syn::TypePath, - pub node_block: syn::TypePath, - pub unchecked_extrinsic: syn::TypePath, + pub span: Span, + pub block: syn::TypePath, + pub node_block: syn::TypePath, + pub unchecked_extrinsic: syn::TypePath, } impl Parse for WhereSection { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - let mut definitions = Vec::new(); - while !input.peek(token::Brace) { - let definition: WhereDefinition = input.parse()?; - definitions.push(definition); - if !input.peek(Token![,]) { - if !input.peek(token::Brace) { - return Err(input.error("Expected `,` or `{`")); - } - break; - } - input.parse::()?; - } - let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; - let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; - let unchecked_extrinsic = - remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; - if let Some(WhereDefinition { - ref kind_span, - ref kind, - .. - }) = definitions.first() - { - let msg = format!( - "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", - kind, kind - ); - return Err(Error::new(*kind_span, msg)); - } - Ok(Self { - span: input.span(), - block, - node_block, - unchecked_extrinsic, - }) - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + let mut definitions = Vec::new(); + while !input.peek(token::Brace) { + let definition: WhereDefinition = input.parse()?; + definitions.push(definition); + if !input.peek(Token![,]) { + if !input.peek(token::Brace) { + return Err(input.error("Expected `,` or `{`")) + } + break + } + input.parse::()?; + } + let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; + let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; + let unchecked_extrinsic = + remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; + if let Some(WhereDefinition { ref kind_span, ref kind, .. }) = definitions.first() { + let msg = format!( + "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", + kind, kind + ); + return Err(Error::new(*kind_span, msg)) + } + Ok(Self { span: input.span(), block, node_block, unchecked_extrinsic }) + } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum WhereKind { - Block, - NodeBlock, - UncheckedExtrinsic, + Block, + NodeBlock, + UncheckedExtrinsic, } #[derive(Debug)] pub struct WhereDefinition { - pub kind_span: Span, - pub kind: WhereKind, - pub value: syn::TypePath, + pub kind_span: Span, + pub kind: WhereKind, + pub value: syn::TypePath, } impl Parse for WhereDefinition { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - let (kind_span, kind) = if lookahead.peek(keyword::Block) { - (input.parse::()?.span(), WhereKind::Block) - } else if lookahead.peek(keyword::NodeBlock) { - ( - input.parse::()?.span(), - WhereKind::NodeBlock, - ) - } else if lookahead.peek(keyword::UncheckedExtrinsic) { - ( - input.parse::()?.span(), - WhereKind::UncheckedExtrinsic, - ) - } else { - return Err(lookahead.error()); - }; - - Ok(Self { - kind_span, - kind, - value: { - let _: Token![=] = input.parse()?; - input.parse()? - }, - }) - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + let (kind_span, kind) = if lookahead.peek(keyword::Block) { + (input.parse::()?.span(), WhereKind::Block) + } else if lookahead.peek(keyword::NodeBlock) { + (input.parse::()?.span(), WhereKind::NodeBlock) + } else if lookahead.peek(keyword::UncheckedExtrinsic) { + (input.parse::()?.span(), WhereKind::UncheckedExtrinsic) + } else { + return Err(lookahead.error()) + }; + + Ok(Self { + kind_span, + kind, + value: { + let _: Token![=] = input.parse()?; + input.parse()? + }, + }) + } } /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// Optional fixed index, e.g. `MyPallet ... = 3,`. - pub index: Option, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The declared pallet parts, - /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` - /// or `None` for `System: system`. - pub pallet_parts: Option>, - /// The specified parts, either use_parts or exclude_parts. - pub specified_parts: SpecifiedParts, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// Optional fixed index, e.g. `MyPallet ... = 3,`. + pub index: Option, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The declared pallet parts, + /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` + /// or `None` for `System: system`. + pub pallet_parts: Option>, + /// The specified parts, either use_parts or exclude_parts. + pub specified_parts: SpecifiedParts, } /// The possible declaration of pallet parts to use. #[derive(Debug, Clone)] pub enum SpecifiedParts { - /// Use all the pallet parts except those specified. - Exclude(Vec), - /// Use only the specified pallet parts. - Use(Vec), - /// Use the all the pallet parts. - All, + /// Use all the pallet parts except those specified. + Exclude(Vec), + /// Use only the specified pallet parts. + Use(Vec), + /// Use the all the pallet parts. + All, } impl Parse for PalletDeclaration { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - - let name = input.parse()?; - let _: Token![:] = input.parse()?; - let path = input.parse()?; - - // Parse for instance. - let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { - let _: Token![::] = input.parse()?; - let _: Token![<] = input.parse()?; - let res = Some(input.parse()?); - let _: Token![>] = input.parse()?; - res - } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) - && !input.peek(keyword::expanded) - && !input.peek(keyword::exclude_parts) - && !input.peek(keyword::use_parts) - && !input.peek(Token![=]) - && !input.peek(Token![,]) - && !input.is_empty() - { - return Err(input.error( + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + + let name = input.parse()?; + let _: Token![:] = input.parse()?; + let path = input.parse()?; + + // Parse for instance. + let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { + let _: Token![::] = input.parse()?; + let _: Token![<] = input.parse()?; + let res = Some(input.parse()?); + let _: Token![>] = input.parse()?; + res + } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) && + !input.peek(keyword::expanded) && + !input.peek(keyword::exclude_parts) && + !input.peek(keyword::use_parts) && + !input.peek(Token![=]) && + !input.peek(Token![,]) && + !input.is_empty() + { + return Err(input.error( "Unexpected tokens, expected one of `::$ident` `::{`, `exclude_parts`, `use_parts`, `=`, `,`", )); - } else { - None - }; - - // Check if the pallet is fully expanded. - let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { - let _: keyword::expanded = input.parse()?; - let _: Token![::] = input.parse()?; - (true, parse_pallet_parts(input)?) - } else { - (false, vec![]) - }; - - // Parse for explicit parts - let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { - let _: Token![::] = input.parse()?; - let mut parts = parse_pallet_parts(input)?; - parts.extend(extra_parts); - Some(parts) - } else if !input.peek(keyword::exclude_parts) - && !input.peek(keyword::use_parts) - && !input.peek(Token![=]) - && !input.peek(Token![,]) - && !input.is_empty() - { - return Err(input.error( - "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", - )); - } else { - is_expanded.then_some(extra_parts) - }; - - // Parse for specified parts - let specified_parts = if input.peek(keyword::exclude_parts) { - let _: keyword::exclude_parts = input.parse()?; - SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) - } else if input.peek(keyword::use_parts) { - let _: keyword::use_parts = input.parse()?; - SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) - } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")); - } else { - SpecifiedParts::All - }; - - // Parse for pallet index - let index = if input.peek(Token![=]) { - input.parse::()?; - let index = input.parse::()?; - let index = index.base10_parse::()?; - Some(index) - } else if !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `=`, `,`")); - } else { - None - }; - - Ok(Self { - is_expanded, - attrs, - name, - path, - instance, - pallet_parts, - specified_parts, - index, - }) - } + } else { + None + }; + + // Check if the pallet is fully expanded. + let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { + let _: keyword::expanded = input.parse()?; + let _: Token![::] = input.parse()?; + (true, parse_pallet_parts(input)?) + } else { + (false, vec![]) + }; + + // Parse for explicit parts + let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { + let _: Token![::] = input.parse()?; + let mut parts = parse_pallet_parts(input)?; + parts.extend(extra_parts.into_iter()); + Some(parts) + } else if !input.peek(keyword::exclude_parts) && + !input.peek(keyword::use_parts) && + !input.peek(Token![=]) && + !input.peek(Token![,]) && + !input.is_empty() + { + return Err(input.error( + "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", + )) + } else { + is_expanded.then_some(extra_parts) + }; + + // Parse for specified parts + let specified_parts = if input.peek(keyword::exclude_parts) { + let _: keyword::exclude_parts = input.parse()?; + SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) + } else if input.peek(keyword::use_parts) { + let _: keyword::use_parts = input.parse()?; + SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) + } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")) + } else { + SpecifiedParts::All + }; + + // Parse for pallet index + let index = if input.peek(Token![=]) { + input.parse::()?; + let index = input.parse::()?; + let index = index.base10_parse::()?; + Some(index) + } else if !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `=`, `,`")) + } else { + None + }; + + Ok(Self { is_expanded, attrs, name, path, instance, pallet_parts, specified_parts, index }) + } } /// A struct representing a path to a pallet. `PalletPath` is almost identical to the standard @@ -357,312 +325,303 @@ impl Parse for PalletDeclaration { /// - Path segments can only consist of identifiers separated by colons #[derive(Debug, Clone)] pub struct PalletPath { - pub inner: Path, + pub inner: Path, } impl PalletPath { - pub fn module_name(&self) -> String { - self.inner - .segments - .iter() - .fold(String::new(), |mut acc, segment| { - if !acc.is_empty() { - acc.push_str("::"); - } - acc.push_str(&segment.ident.to_string()); - acc - }) - } + pub fn module_name(&self) -> String { + self.inner.segments.iter().fold(String::new(), |mut acc, segment| { + if !acc.is_empty() { + acc.push_str("::"); + } + acc.push_str(&segment.ident.to_string()); + acc + }) + } } impl Parse for PalletPath { - fn parse(input: ParseStream) -> Result { - let mut res = PalletPath { - inner: Path { - leading_colon: None, - segments: Punctuated::new(), - }, - }; - - let lookahead = input.lookahead1(); - if lookahead.peek(Token![crate]) - || lookahead.peek(Token![self]) - || lookahead.peek(Token![super]) - || lookahead.peek(Ident) - { - let ident = input.call(Ident::parse_any)?; - res.inner.segments.push(ident.into()); - } else { - return Err(lookahead.error()); - } - - while input.peek(Token![::]) && input.peek3(Ident) { - input.parse::()?; - let ident = input.parse::()?; - res.inner.segments.push(ident.into()); - } - Ok(res) - } + fn parse(input: ParseStream) -> Result { + let mut res = + PalletPath { inner: Path { leading_colon: None, segments: Punctuated::new() } }; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![crate]) || + lookahead.peek(Token![self]) || + lookahead.peek(Token![super]) || + lookahead.peek(Ident) + { + let ident = input.call(Ident::parse_any)?; + res.inner.segments.push(ident.into()); + } else { + return Err(lookahead.error()) + } + + while input.peek(Token![::]) && input.peek3(Ident) { + input.parse::()?; + let ident = input.parse::()?; + res.inner.segments.push(ident.into()); + } + Ok(res) + } } impl quote::ToTokens for PalletPath { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.inner.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.inner.to_tokens(tokens); + } } /// Parse [`PalletPart`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = input.parse()?; + let pallet_parts: ext::Braces> = input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.name(), - ); - return Err(Error::new(part.keyword.span(), msg)); - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.name(), + ); + return Err(Error::new(part.keyword.span(), msg)) + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } #[derive(Debug, Clone)] pub enum PalletPartKeyword { - Pallet(keyword::Pallet), - Call(keyword::Call), - Storage(keyword::Storage), - Event(keyword::Event), - Error(keyword::Error), - Config(keyword::Config), - Origin(keyword::Origin), - Inherent(keyword::Inherent), - ValidateUnsigned(keyword::ValidateUnsigned), - FreezeReason(keyword::FreezeReason), - HoldReason(keyword::HoldReason), - Task(keyword::Task), - LockId(keyword::LockId), - SlashReason(keyword::SlashReason), + Pallet(keyword::Pallet), + Call(keyword::Call), + Storage(keyword::Storage), + Event(keyword::Event), + Error(keyword::Error), + Config(keyword::Config), + Origin(keyword::Origin), + Inherent(keyword::Inherent), + ValidateUnsigned(keyword::ValidateUnsigned), + FreezeReason(keyword::FreezeReason), + HoldReason(keyword::HoldReason), + Task(keyword::Task), + LockId(keyword::LockId), + SlashReason(keyword::SlashReason), } impl Parse for PalletPartKeyword { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - - if lookahead.peek(keyword::Pallet) { - Ok(Self::Pallet(input.parse()?)) - } else if lookahead.peek(keyword::Call) { - Ok(Self::Call(input.parse()?)) - } else if lookahead.peek(keyword::Storage) { - Ok(Self::Storage(input.parse()?)) - } else if lookahead.peek(keyword::Event) { - Ok(Self::Event(input.parse()?)) - } else if lookahead.peek(keyword::Error) { - Ok(Self::Error(input.parse()?)) - } else if lookahead.peek(keyword::Config) { - Ok(Self::Config(input.parse()?)) - } else if lookahead.peek(keyword::Origin) { - Ok(Self::Origin(input.parse()?)) - } else if lookahead.peek(keyword::Inherent) { - Ok(Self::Inherent(input.parse()?)) - } else if lookahead.peek(keyword::ValidateUnsigned) { - Ok(Self::ValidateUnsigned(input.parse()?)) - } else if lookahead.peek(keyword::FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(keyword::Task) { - Ok(Self::Task(input.parse()?)) - } else if lookahead.peek(keyword::LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(keyword::SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(keyword::Pallet) { + Ok(Self::Pallet(input.parse()?)) + } else if lookahead.peek(keyword::Call) { + Ok(Self::Call(input.parse()?)) + } else if lookahead.peek(keyword::Storage) { + Ok(Self::Storage(input.parse()?)) + } else if lookahead.peek(keyword::Event) { + Ok(Self::Event(input.parse()?)) + } else if lookahead.peek(keyword::Error) { + Ok(Self::Error(input.parse()?)) + } else if lookahead.peek(keyword::Config) { + Ok(Self::Config(input.parse()?)) + } else if lookahead.peek(keyword::Origin) { + Ok(Self::Origin(input.parse()?)) + } else if lookahead.peek(keyword::Inherent) { + Ok(Self::Inherent(input.parse()?)) + } else if lookahead.peek(keyword::ValidateUnsigned) { + Ok(Self::ValidateUnsigned(input.parse()?)) + } else if lookahead.peek(keyword::FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(keyword::Task) { + Ok(Self::Task(input.parse()?)) + } else if lookahead.peek(keyword::LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(keyword::SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } impl PalletPartKeyword { - /// Returns the name of `Self`. - fn name(&self) -> &'static str { - match self { - Self::Pallet(_) => "Pallet", - Self::Call(_) => "Call", - Self::Storage(_) => "Storage", - Self::Event(_) => "Event", - Self::Error(_) => "Error", - Self::Config(_) => "Config", - Self::Origin(_) => "Origin", - Self::Inherent(_) => "Inherent", - Self::ValidateUnsigned(_) => "ValidateUnsigned", - Self::FreezeReason(_) => "FreezeReason", - Self::HoldReason(_) => "HoldReason", - Self::Task(_) => "Task", - Self::LockId(_) => "LockId", - Self::SlashReason(_) => "SlashReason", - } - } - - /// Returns `true` if this pallet part is allowed to have generic arguments. - fn allows_generic(&self) -> bool { - Self::all_generic_arg().iter().any(|n| *n == self.name()) - } - - /// Returns the names of all pallet parts that allow to have a generic argument. - fn all_generic_arg() -> &'static [&'static str] { - &["Event", "Error", "Origin", "Config", "Task"] - } + /// Returns the name of `Self`. + fn name(&self) -> &'static str { + match self { + Self::Pallet(_) => "Pallet", + Self::Call(_) => "Call", + Self::Storage(_) => "Storage", + Self::Event(_) => "Event", + Self::Error(_) => "Error", + Self::Config(_) => "Config", + Self::Origin(_) => "Origin", + Self::Inherent(_) => "Inherent", + Self::ValidateUnsigned(_) => "ValidateUnsigned", + Self::FreezeReason(_) => "FreezeReason", + Self::HoldReason(_) => "HoldReason", + Self::Task(_) => "Task", + Self::LockId(_) => "LockId", + Self::SlashReason(_) => "SlashReason", + } + } + + /// Returns `true` if this pallet part is allowed to have generic arguments. + fn allows_generic(&self) -> bool { + Self::all_generic_arg().iter().any(|n| *n == self.name()) + } + + /// Returns the names of all pallet parts that allow to have a generic argument. + fn all_generic_arg() -> &'static [&'static str] { + &["Event", "Error", "Origin", "Config", "Task"] + } } impl ToTokens for PalletPartKeyword { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - Self::Pallet(inner) => inner.to_tokens(tokens), - Self::Call(inner) => inner.to_tokens(tokens), - Self::Storage(inner) => inner.to_tokens(tokens), - Self::Event(inner) => inner.to_tokens(tokens), - Self::Error(inner) => inner.to_tokens(tokens), - Self::Config(inner) => inner.to_tokens(tokens), - Self::Origin(inner) => inner.to_tokens(tokens), - Self::Inherent(inner) => inner.to_tokens(tokens), - Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), - Self::FreezeReason(inner) => inner.to_tokens(tokens), - Self::HoldReason(inner) => inner.to_tokens(tokens), - Self::Task(inner) => inner.to_tokens(tokens), - Self::LockId(inner) => inner.to_tokens(tokens), - Self::SlashReason(inner) => inner.to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Pallet(inner) => inner.to_tokens(tokens), + Self::Call(inner) => inner.to_tokens(tokens), + Self::Storage(inner) => inner.to_tokens(tokens), + Self::Event(inner) => inner.to_tokens(tokens), + Self::Error(inner) => inner.to_tokens(tokens), + Self::Config(inner) => inner.to_tokens(tokens), + Self::Origin(inner) => inner.to_tokens(tokens), + Self::Inherent(inner) => inner.to_tokens(tokens), + Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), + Self::FreezeReason(inner) => inner.to_tokens(tokens), + Self::HoldReason(inner) => inner.to_tokens(tokens), + Self::Task(inner) => inner.to_tokens(tokens), + Self::LockId(inner) => inner.to_tokens(tokens), + Self::SlashReason(inner) => inner.to_tokens(tokens), + } + } } #[derive(Debug, Clone)] pub struct PalletPart { - pub keyword: PalletPartKeyword, - pub generics: syn::Generics, + pub keyword: PalletPartKeyword, + pub generics: syn::Generics, } impl Parse for PalletPart { - fn parse(input: ParseStream) -> Result { - let keyword: PalletPartKeyword = input.parse()?; - - let generics: syn::Generics = input.parse()?; - if !generics.params.is_empty() && !keyword.allows_generic() { - let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); - let msg = format!( - "`{}` is not allowed to have generics. \ + fn parse(input: ParseStream) -> Result { + let keyword: PalletPartKeyword = input.parse()?; + + let generics: syn::Generics = input.parse()?; + if !generics.params.is_empty() && !keyword.allows_generic() { + let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); + let msg = format!( + "`{}` is not allowed to have generics. \ Only the following pallets are allowed to have generics: {}.", - keyword.name(), - valid_generics, - ); - return Err(syn::Error::new(keyword.span(), msg)); - } + keyword.name(), + valid_generics, + ); + return Err(syn::Error::new(keyword.span(), msg)) + } - Ok(Self { keyword, generics }) - } + Ok(Self { keyword, generics }) + } } impl PalletPart { - pub fn format_names(names: &[&'static str]) -> String { - let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); - res.join(", ") - } + pub fn format_names(names: &[&'static str]) -> String { + let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); + res.join(", ") + } - /// The name of this pallet part. - pub fn name(&self) -> &'static str { - self.keyword.name() - } + /// The name of this pallet part. + pub fn name(&self) -> &'static str { + self.keyword.name() + } } fn remove_kind( - input: ParseStream, - kind: WhereKind, - definitions: &mut Vec, + input: ParseStream, + kind: WhereKind, + definitions: &mut Vec, ) -> Result { - if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { - Ok(definitions.remove(pos)) - } else { - let msg = format!( - "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", - kind, kind - ); - Err(input.error(msg)) - } + if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { + Ok(definitions.remove(pos)) + } else { + let msg = format!( + "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", + kind, kind + ); + Err(input.error(msg)) + } } /// The declaration of a part without its generics #[derive(Debug, Clone)] pub struct PalletPartNoGeneric { - keyword: PalletPartKeyword, + keyword: PalletPartKeyword, } impl Parse for PalletPartNoGeneric { - fn parse(input: ParseStream) -> Result { - Ok(Self { - keyword: input.parse()?, - }) - } + fn parse(input: ParseStream) -> Result { + Ok(Self { keyword: input.parse()? }) + } } /// Parse [`PalletPartNoGeneric`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts_no_generic(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = - input.parse()?; + let pallet_parts: ext::Braces> = + input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.keyword.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.keyword.name(), - ); - return Err(Error::new(part.keyword.span(), msg)); - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.keyword.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.keyword.name(), + ); + return Err(Error::new(part.keyword.span(), msg)) + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } /// The final definition of a pallet with the resulting fixed index and explicit parts. #[derive(Debug, Clone)] pub struct Pallet { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). - pub index: u8, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The pallet parts to use for the pallet. - pub pallet_parts: Vec, - /// Expressions specified inside of a #[cfg] attribute. - pub cfg_pattern: Vec, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). + pub index: u8, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The pallet parts to use for the pallet. + pub pallet_parts: Vec, + /// Expressions specified inside of a #[cfg] attribute. + pub cfg_pattern: Vec, } impl Pallet { - /// Get resolved pallet parts - pub fn pallet_parts(&self) -> &[PalletPart] { - &self.pallet_parts - } + /// Get resolved pallet parts + pub fn pallet_parts(&self) -> &[PalletPart] { + &self.pallet_parts + } - /// Find matching parts - pub fn find_part(&self, name: &str) -> Option<&PalletPart> { - self.pallet_parts.iter().find(|part| part.name() == name) - } + /// Find matching parts + pub fn find_part(&self, name: &str) -> Option<&PalletPart> { + self.pallet_parts.iter().find(|part| part.name() == name) + } - /// Return whether pallet contains part - pub fn exists_part(&self, name: &str) -> bool { - self.find_part(name).is_some() - } + /// Return whether pallet contains part + pub fn exists_part(&self, name: &str) -> bool { + self.find_part(name).is_some() + } } /// Result of a conversion of a declaration of pallets. @@ -675,26 +634,26 @@ impl Pallet { /// +----------+ +----------+ +------------------+ /// ``` enum PalletsConversion { - /// Pallets implicitly declare parts. - /// - /// `System: frame_system`. - Implicit(Vec), - /// Pallets explicitly declare parts. - /// - /// `System: frame_system::{Pallet, Call}` - /// - /// However, for backwards compatibility with Polkadot/Kusama - /// we must propagate some other parts to the pallet by default. - Explicit(Vec), - /// Pallets explicitly declare parts that are fully expanded. - /// - /// This is the end state that contains extra parts included by - /// default by Substrate. - /// - /// `System: frame_system expanded::{Error} ::{Pallet, Call}` - /// - /// For this example, the `Pallet`, `Call` and `Error` parts are collected. - ExplicitExpanded(Vec), + /// Pallets implicitly declare parts. + /// + /// `System: frame_system`. + Implicit(Vec), + /// Pallets explicitly declare parts. + /// + /// `System: frame_system::{Pallet, Call}` + /// + /// However, for backwards compatibility with Polkadot/Kusama + /// we must propagate some other parts to the pallet by default. + Explicit(Vec), + /// Pallets explicitly declare parts that are fully expanded. + /// + /// This is the end state that contains extra parts included by + /// default by Substrate. + /// + /// `System: frame_system expanded::{Error} ::{Pallet, Call}` + /// + /// For this example, the `Pallet`, `Call` and `Error` parts are collected. + ExplicitExpanded(Vec), } /// Convert from the parsed pallet declaration to their final information. @@ -703,137 +662,125 @@ enum PalletsConversion { /// pallet using same rules as rust for fieldless enum. I.e. implicit are assigned number /// incrementally from last explicit or 0. fn convert_pallets(pallets: Vec) -> syn::Result { - if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { - return Ok(PalletsConversion::Implicit(pallets)); - } - - let mut indices = HashMap::new(); - let mut last_index: Option = None; - let mut names = HashMap::new(); - let mut is_expanded = true; - - let pallets = pallets - .into_iter() - .map(|pallet| { - let final_index = match pallet.index { - Some(i) => i, - None => last_index - .map_or(Some(0), |i| i.checked_add(1)) - .ok_or_else(|| { - let msg = "Pallet index doesn't fit into u8, index is 256"; - syn::Error::new(pallet.name.span(), msg) - })?, - }; - - last_index = Some(final_index); - - if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { - let msg = format!( - "Pallet indices are conflicting: Both pallets {} and {} are at index {}", - used_pallet, pallet.name, final_index, - ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err); - } - - if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { - let msg = "Two pallets with the same name!"; - - let mut err = syn::Error::new(used_pallet, msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err); - } - - let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); - - let available_parts = pallet_parts - .iter() - .map(|part| part.keyword.name()) - .collect::>(); - - // Check parts are correctly specified - match &pallet.specified_parts { - SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => { - for part in parts { - if !available_parts.contains(part.keyword.name()) { - let msg = format!( - "Invalid pallet part specified, the pallet `{}` doesn't have the \ + if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { + return Ok(PalletsConversion::Implicit(pallets)) + } + + let mut indices = HashMap::new(); + let mut last_index: Option = None; + let mut names = HashMap::new(); + let mut is_expanded = true; + + let pallets = pallets + .into_iter() + .map(|pallet| { + let final_index = match pallet.index { + Some(i) => i, + None => last_index.map_or(Some(0), |i| i.checked_add(1)).ok_or_else(|| { + let msg = "Pallet index doesn't fit into u8, index is 256"; + syn::Error::new(pallet.name.span(), msg) + })?, + }; + + last_index = Some(final_index); + + if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, final_index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err) + } + + if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet.name.span(), &msg)); + return Err(err) + } + + let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); + + let available_parts = + pallet_parts.iter().map(|part| part.keyword.name()).collect::>(); + + // Check parts are correctly specified + match &pallet.specified_parts { + SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => + for part in parts { + if !available_parts.contains(part.keyword.name()) { + let msg = format!( + "Invalid pallet part specified, the pallet `{}` doesn't have the \ `{}` part. Available parts are: {}.", - pallet.name, - part.keyword.name(), - pallet_parts.iter().fold(String::new(), |fold, part| { - if fold.is_empty() { - format!("`{}`", part.keyword.name()) - } else { - format!("{}, `{}`", fold, part.keyword.name()) - } - }) - ); - return Err(syn::Error::new(part.keyword.span(), msg)); - } - } - } - SpecifiedParts::All => (), - } - - // Set only specified parts. - match pallet.specified_parts { - SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { - !excluded_parts - .iter() - .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { - used_parts - .iter() - .any(|use_part| use_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::All => (), - } - - let cfg_pattern = pallet - .attrs - .iter() - .map(|attr| { - if attr - .path() - .segments - .first() - .map_or(false, |s| s.ident != "cfg") - { - let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ + pallet.name, + part.keyword.name(), + pallet_parts.iter().fold(String::new(), |fold, part| { + if fold.is_empty() { + format!("`{}`", part.keyword.name()) + } else { + format!("{}, `{}`", fold, part.keyword.name()) + } + }) + ); + return Err(syn::Error::new(part.keyword.span(), msg)) + } + }, + SpecifiedParts::All => (), + } + + // Set only specified parts. + match pallet.specified_parts { + SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { + !excluded_parts + .iter() + .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { + used_parts.iter().any(|use_part| use_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::All => (), + } + + let cfg_pattern = pallet + .attrs + .iter() + .map(|attr| { + if attr.path().segments.first().map_or(false, |s| s.ident != "cfg") { + let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ declarations in `construct_runtime`"; - return Err(syn::Error::new(attr.span(), msg)); - } - - attr.parse_args_with(|input: syn::parse::ParseStream| { - // Required, otherwise the parse stream doesn't advance and will result in - // an error. - let input = input.parse::()?; - cfg_expr::Expression::parse(&input.to_string()) - .map_err(|e| syn::Error::new(attr.span(), e.to_string())) - }) - }) - .collect::>>()?; - - is_expanded &= pallet.is_expanded; - - Ok(Pallet { - is_expanded: pallet.is_expanded, - name: pallet.name, - index: final_index, - path: pallet.path, - instance: pallet.instance, - cfg_pattern, - pallet_parts, - }) - }) - .collect::>>()?; - - if is_expanded { - Ok(PalletsConversion::ExplicitExpanded(pallets)) - } else { - Ok(PalletsConversion::Explicit(pallets)) - } + return Err(syn::Error::new(attr.span(), msg)) + } + + attr.parse_args_with(|input: syn::parse::ParseStream| { + // Required, otherwise the parse stream doesn't advance and will result in + // an error. + let input = input.parse::()?; + cfg_expr::Expression::parse(&input.to_string()) + .map_err(|e| syn::Error::new(attr.span(), e.to_string())) + }) + }) + .collect::>>()?; + + is_expanded &= pallet.is_expanded; + + Ok(Pallet { + is_expanded: pallet.is_expanded, + name: pallet.name, + index: final_index, + path: pallet.path, + instance: pallet.instance, + cfg_pattern, + pallet_parts, + }) + }) + .collect::>>()?; + + if is_expanded { + Ok(PalletsConversion::ExplicitExpanded(pallets)) + } else { + Ok(PalletsConversion::Explicit(pallets)) + } } diff --git a/support/procedural-fork/src/crate_version.rs b/support/procedural-fork/src/crate_version.rs index 63e7c7279..8c8975a42 100644 --- a/support/procedural-fork/src/crate_version.rs +++ b/support/procedural-fork/src/crate_version.rs @@ -24,31 +24,31 @@ use syn::{Error, Result}; /// Create an error that will be shown by rustc at the call site of the macro. fn create_error(message: &str) -> Error { - Error::new(Span::call_site(), message) + Error::new(Span::call_site(), message) } /// Implementation of the `crate_to_crate_version!` macro. pub fn crate_to_crate_version(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(create_error("No arguments expected!")); - } + if !input.is_empty() { + return Err(create_error("No arguments expected!")) + } - let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") - .map_err(|_| create_error("Major version needs to fit into `u16`"))?; + let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") + .map_err(|_| create_error("Major version needs to fit into `u16`"))?; - let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") - .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; + let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") + .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; - let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") - .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; + let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") + .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; - Ok(quote::quote! { - #crate_::traits::CrateVersion { - major: #major_version, - minor: #minor_version, - patch: #patch_version, - } - }) + Ok(quote::quote! { + #crate_::traits::CrateVersion { + major: #major_version, + minor: #minor_version, + patch: #patch_version, + } + }) } diff --git a/support/procedural-fork/src/derive_impl.rs b/support/procedural-fork/src/derive_impl.rs index 177a95e70..54755f116 100644 --- a/support/procedural-fork/src/derive_impl.rs +++ b/support/procedural-fork/src/derive_impl.rs @@ -23,67 +23,65 @@ use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::collections::HashSet; use syn::{ - parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, + parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(inject_runtime_type); - syn::custom_keyword!(no_aggregated_types); + syn::custom_keyword!(inject_runtime_type); + syn::custom_keyword!(no_aggregated_types); } #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] - RuntimeType(keyword::inject_runtime_type), + #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] + RuntimeType(keyword::inject_runtime_type), } #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + typ: PalletAttrType, } fn is_runtime_type(item: &syn::ImplItemType) -> bool { - item.attrs.iter().any(|attr| { - if let Ok(PalletAttr { - typ: PalletAttrType::RuntimeType(_), - .. - }) = parse2::(attr.into_token_stream()) - { - return true; - } - false - }) + item.attrs.iter().any(|attr| { + if let Ok(PalletAttr { typ: PalletAttrType::RuntimeType(_), .. }) = + parse2::(attr.into_token_stream()) + { + return true + } + false + }) } #[derive(Parse, Debug)] pub struct DeriveImplAttrArgs { - pub default_impl_path: Path, - _as: Option, - #[parse_if(_as.is_some())] - pub disambiguation_path: Option, - _comma: Option, - #[parse_if(_comma.is_some())] - pub no_aggregated_types: Option, + pub default_impl_path: Path, + _as: Option, + #[parse_if(_as.is_some())] + pub disambiguation_path: Option, + _comma: Option, + #[parse_if(_comma.is_some())] + pub no_aggregated_types: Option, } impl ForeignPath for DeriveImplAttrArgs { - fn foreign_path(&self) -> &Path { - &self.default_impl_path - } + fn foreign_path(&self) -> &Path { + &self.default_impl_path + } } impl ToTokens for DeriveImplAttrArgs { - fn to_tokens(&self, tokens: &mut TokenStream2) { - tokens.extend(self.default_impl_path.to_token_stream()); - tokens.extend(self._as.to_token_stream()); - tokens.extend(self.disambiguation_path.to_token_stream()); - tokens.extend(self._comma.to_token_stream()); - tokens.extend(self.no_aggregated_types.to_token_stream()); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + tokens.extend(self.default_impl_path.to_token_stream()); + tokens.extend(self._as.to_token_stream()); + tokens.extend(self.disambiguation_path.to_token_stream()); + tokens.extend(self._comma.to_token_stream()); + tokens.extend(self.no_aggregated_types.to_token_stream()); + } } /// Gets the [`Ident`] representation of the given [`ImplItem`], if one exists. Otherwise @@ -92,13 +90,13 @@ impl ToTokens for DeriveImplAttrArgs { /// Used by [`combine_impls`] to determine whether we can compare [`ImplItem`]s by [`Ident`] /// or not. fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { - match impl_item { - ImplItem::Const(item) => Some(&item.ident), - ImplItem::Fn(item) => Some(&item.sig.ident), - ImplItem::Type(item) => Some(&item.ident), - ImplItem::Macro(item) => item.mac.path.get_ident(), - _ => None, - } + match impl_item { + ImplItem::Const(item) => Some(&item.ident), + ImplItem::Fn(item) => Some(&item.sig.ident), + ImplItem::Type(item) => Some(&item.ident), + ImplItem::Macro(item) => item.mac.path.get_ident(), + _ => None, + } } /// The real meat behind `derive_impl`. Takes in a `local_impl`, which is the impl for which we @@ -114,68 +112,64 @@ fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { /// into `local_impl`. Items that lack an ident and also exist verbatim in `local_impl` are not /// copied over. fn combine_impls( - local_impl: ItemImpl, - foreign_impl: ItemImpl, - default_impl_path: Path, - disambiguation_path: Path, - inject_runtime_types: bool, + local_impl: ItemImpl, + foreign_impl: ItemImpl, + default_impl_path: Path, + disambiguation_path: Path, + inject_runtime_types: bool, ) -> ItemImpl { - let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = - local_impl - .items - .iter() - .cloned() - .partition(|impl_item| impl_item_ident(impl_item).is_some()); - let existing_local_keys: HashSet = existing_local_keys - .into_iter() - .filter_map(|item| impl_item_ident(&item).cloned()) - .collect(); - let mut final_impl = local_impl; - let extended_items = foreign_impl.items.into_iter().filter_map(|item| { - if let Some(ident) = impl_item_ident(&item) { - if existing_local_keys.contains(ident) { - // do not copy colliding items that have an ident - return None; - } - if let ImplItem::Type(typ) = item.clone() { - let cfg_attrs = typ - .attrs - .iter() - .filter(|attr| { - attr.path() - .get_ident() - .map_or(false, |ident| ident == "cfg") - }) - .map(|attr| attr.to_token_stream()); - if is_runtime_type(&typ) { - let item: ImplItem = if inject_runtime_types { - parse_quote! { - #( #cfg_attrs )* - type #ident = #ident; - } - } else { - item - }; - return Some(item); - } - // modify and insert uncolliding type items - let modified_item: ImplItem = parse_quote! { - #( #cfg_attrs )* - type #ident = <#default_impl_path as #disambiguation_path>::#ident; - }; - return Some(modified_item); - } - // copy uncolliding non-type items that have an ident - Some(item) - } else { - // do not copy colliding items that lack an ident - (!existing_unsupported_items.contains(&item)) - // copy uncolliding items without an ident verbatim - .then_some(item) - } - }); - final_impl.items.extend(extended_items); - final_impl + let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = + local_impl + .items + .iter() + .cloned() + .partition(|impl_item| impl_item_ident(impl_item).is_some()); + let existing_local_keys: HashSet = existing_local_keys + .into_iter() + .filter_map(|item| impl_item_ident(&item).cloned()) + .collect(); + let mut final_impl = local_impl; + let extended_items = foreign_impl.items.into_iter().filter_map(|item| { + if let Some(ident) = impl_item_ident(&item) { + if existing_local_keys.contains(&ident) { + // do not copy colliding items that have an ident + return None + } + if let ImplItem::Type(typ) = item.clone() { + let cfg_attrs = typ + .attrs + .iter() + .filter(|attr| attr.path().get_ident().map_or(false, |ident| ident == "cfg")) + .map(|attr| attr.to_token_stream()); + if is_runtime_type(&typ) { + let item: ImplItem = if inject_runtime_types { + parse_quote! { + #( #cfg_attrs )* + type #ident = #ident; + } + } else { + item + }; + return Some(item) + } + // modify and insert uncolliding type items + let modified_item: ImplItem = parse_quote! { + #( #cfg_attrs )* + type #ident = <#default_impl_path as #disambiguation_path>::#ident; + }; + return Some(modified_item) + } + // copy uncolliding non-type items that have an ident + Some(item) + } else { + // do not copy colliding items that lack an ident + (!existing_unsupported_items.contains(&item)) + // copy uncolliding items without an ident verbatim + .then_some(item) + } + }); + final_impl.items.extend(extended_items); + final_impl } /// Computes the disambiguation path for the `derive_impl` attribute macro. @@ -184,26 +178,25 @@ fn combine_impls( /// disambiguation is used as is. If not, we infer the disambiguation path from the /// `foreign_impl_path` and the computed scope. fn compute_disambiguation_path( - disambiguation_path: Option, - foreign_impl: ItemImpl, - default_impl_path: Path, + disambiguation_path: Option, + foreign_impl: ItemImpl, + default_impl_path: Path, ) -> Result { - match (disambiguation_path, foreign_impl.clone().trait_) { - (Some(disambiguation_path), _) => Ok(disambiguation_path), - (None, Some((_, foreign_impl_path, _))) => { - if default_impl_path.segments.len() > 1 { - let scope = default_impl_path.segments.first(); - Ok(parse_quote!(#scope :: #foreign_impl_path)) - } else { - Ok(foreign_impl_path) - } - } - _ => Err(syn::Error::new( - default_impl_path.span(), - "Impl statement must have a defined type being implemented \ + match (disambiguation_path, foreign_impl.clone().trait_) { + (Some(disambiguation_path), _) => Ok(disambiguation_path), + (None, Some((_, foreign_impl_path, _))) => + if default_impl_path.segments.len() > 1 { + let scope = default_impl_path.segments.first(); + Ok(parse_quote!(#scope :: #foreign_impl_path)) + } else { + Ok(foreign_impl_path) + }, + _ => Err(syn::Error::new( + default_impl_path.span(), + "Impl statement must have a defined type being implemented \ for a defined type such as `impl A for B`", - )), - } + )), + } } /// Internal implementation behind [`#[derive_impl(..)]`](`macro@crate::derive_impl`). @@ -218,100 +211,93 @@ fn compute_disambiguation_path( /// `disambiguation_path`: the module path of the external trait we will use to qualify /// defaults imported from the external `impl` statement pub fn derive_impl( - default_impl_path: TokenStream2, - foreign_tokens: TokenStream2, - local_tokens: TokenStream2, - disambiguation_path: Option, - no_aggregated_types: Option, + default_impl_path: TokenStream2, + foreign_tokens: TokenStream2, + local_tokens: TokenStream2, + disambiguation_path: Option, + no_aggregated_types: Option, ) -> Result { - let local_impl = parse2::(local_tokens)?; - let foreign_impl = parse2::(foreign_tokens)?; - let default_impl_path = parse2::(default_impl_path)?; + let local_impl = parse2::(local_tokens)?; + let foreign_impl = parse2::(foreign_tokens)?; + let default_impl_path = parse2::(default_impl_path)?; - let disambiguation_path = compute_disambiguation_path( - disambiguation_path, - foreign_impl.clone(), - default_impl_path.clone(), - )?; + let disambiguation_path = compute_disambiguation_path( + disambiguation_path, + foreign_impl.clone(), + default_impl_path.clone(), + )?; - // generate the combined impl - let combined_impl = combine_impls( - local_impl, - foreign_impl, - default_impl_path, - disambiguation_path, - no_aggregated_types.is_none(), - ); + // generate the combined impl + let combined_impl = combine_impls( + local_impl, + foreign_impl, + default_impl_path, + disambiguation_path, + no_aggregated_types.is_none(), + ); - Ok(quote!(#combined_impl)) + Ok(quote!(#combined_impl)) } #[test] fn test_derive_impl_attr_args_parsing() { - parse2::(quote!( - some::path::TestDefaultConfig as some::path::DefaultConfig - )) - .unwrap(); - parse2::(quote!( - frame_system::prelude::testing::TestDefaultConfig as DefaultConfig - )) - .unwrap(); - parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); - parse2::(quote!(Something as DefaultConfig)).unwrap(); - parse2::(quote!(DefaultConfig)).unwrap(); - assert!(parse2::(quote!()).is_err()); - assert!(parse2::(quote!(Config Config)).is_err()); + parse2::(quote!( + some::path::TestDefaultConfig as some::path::DefaultConfig + )) + .unwrap(); + parse2::(quote!( + frame_system::prelude::testing::TestDefaultConfig as DefaultConfig + )) + .unwrap(); + parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); + parse2::(quote!(Something as DefaultConfig)).unwrap(); + parse2::(quote!(DefaultConfig)).unwrap(); + assert!(parse2::(quote!()).is_err()); + assert!(parse2::(quote!(Config Config)).is_err()); } #[test] fn test_runtime_type_with_doc() { - #[allow(unused)] - trait TestTrait { - type Test; - } - #[allow(unused)] - struct TestStruct; - let p = parse2::(quote!( - impl TestTrait for TestStruct { - /// Some doc - #[inject_runtime_type] - type Test = u32; - } - )) - .unwrap(); - for item in p.items { - if let ImplItem::Type(typ) = item { - assert!(is_runtime_type(&typ)); - } - } + trait TestTrait { + type Test; + } + #[allow(unused)] + struct TestStruct; + let p = parse2::(quote!( + impl TestTrait for TestStruct { + /// Some doc + #[inject_runtime_type] + type Test = u32; + } + )) + .unwrap(); + for item in p.items { + if let ImplItem::Type(typ) = item { + assert_eq!(is_runtime_type(&typ), true); + } + } } #[test] fn test_disambiguation_path() { - let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); - let default_impl_path: Path = parse_quote!(SomeScope::SomeType); + let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); + let default_impl_path: Path = parse_quote!(SomeScope::SomeType); - // disambiguation path is specified - let disambiguation_path = compute_disambiguation_path( - Some(parse_quote!(SomeScope::SomePath)), - foreign_impl.clone(), - default_impl_path.clone(), - ); - assert_eq!( - disambiguation_path.unwrap(), - parse_quote!(SomeScope::SomePath) - ); + // disambiguation path is specified + let disambiguation_path = compute_disambiguation_path( + Some(parse_quote!(SomeScope::SomePath)), + foreign_impl.clone(), + default_impl_path.clone(), + ); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomePath)); - // disambiguation path is not specified and the default_impl_path has more than one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); - assert_eq!( - disambiguation_path.unwrap(), - parse_quote!(SomeScope::SomeTrait) - ); + // disambiguation path is not specified and the default_impl_path has more than one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomeTrait)); - // disambiguation path is not specified and the default_impl_path has only one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); + // disambiguation path is not specified and the default_impl_path has only one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); } diff --git a/support/procedural-fork/src/dummy_part_checker.rs b/support/procedural-fork/src/dummy_part_checker.rs index 6bed541d1..34d9a3e23 100644 --- a/support/procedural-fork/src/dummy_part_checker.rs +++ b/support/procedural-fork/src/dummy_part_checker.rs @@ -19,63 +19,61 @@ use crate::COUNTER; use proc_macro::TokenStream; pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { - if !input.is_empty() { - return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") - .to_compile_error() - .into(); - } + if !input.is_empty() { + return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") + .to_compile_error() + .into() + } - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let no_op_macro_ident = syn::Ident::new( - &format!("__dummy_part_checker_{}", count), - proc_macro2::Span::call_site(), - ); + let no_op_macro_ident = + syn::Ident::new(&format!("__dummy_part_checker_{}", count), proc_macro2::Span::call_site()); - quote::quote!( - #[macro_export] - #[doc(hidden)] - macro_rules! #no_op_macro_ident { - ( $( $tt:tt )* ) => {}; - } + quote::quote!( + #[macro_export] + #[doc(hidden)] + macro_rules! #no_op_macro_ident { + ( $( $tt:tt )* ) => {}; + } - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #no_op_macro_ident as is_std_enabled_for_genesis; - } + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #no_op_macro_ident as is_std_enabled_for_genesis; + } - #[doc(hidden)] - pub mod __substrate_event_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_event_part_defined; - } + #[doc(hidden)] + pub mod __substrate_event_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_event_part_defined; + } - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_inherent_part_defined; - } + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_inherent_part_defined; + } - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_validate_unsigned_part_defined; - } + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_validate_unsigned_part_defined; + } - #[doc(hidden)] - pub mod __substrate_call_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_call_part_defined; - } + #[doc(hidden)] + pub mod __substrate_call_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_call_part_defined; + } - #[doc(hidden)] - pub mod __substrate_origin_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_origin_part_defined; - } - ) - .into() + #[doc(hidden)] + pub mod __substrate_origin_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_origin_part_defined; + } + ) + .into() } diff --git a/support/procedural-fork/src/dynamic_params.rs b/support/procedural-fork/src/dynamic_params.rs index b302f2758..29399a885 100644 --- a/support/procedural-fork/src/dynamic_params.rs +++ b/support/procedural-fork/src/dynamic_params.rs @@ -26,132 +26,123 @@ use syn::{parse2, spanned::Spanned, visit_mut, visit_mut::VisitMut, Result, Toke /// Parse and expand a `#[dynamic_params(..)]` module. pub fn dynamic_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_pallet_params(..)]` attribute. pub fn dynamic_pallet_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_aggregated_params_internal]` attribute. pub fn dynamic_aggregated_params_internal( - _attr: TokenStream, - item: TokenStream, + _attr: TokenStream, + item: TokenStream, ) -> Result { - parse2::(item).map(ToTokens::into_token_stream) + parse2::(item).map(ToTokens::into_token_stream) } /// A top `#[dynamic_params(..)]` attribute together with a mod. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttr { - params_mod: syn::ItemMod, - meta: DynamicParamModAttrMeta, + params_mod: syn::ItemMod, + meta: DynamicParamModAttrMeta, } /// The inner meta of a `#[dynamic_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttrMeta { - name: syn::Ident, - _comma: Option, - #[parse_if(_comma.is_some())] - params_pallet: Option, + name: syn::Ident, + _comma: Option, + #[parse_if(_comma.is_some())] + params_pallet: Option, } impl DynamicParamModAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - let params_mod = parse2(item)?; - let meta = parse2(attr)?; - Ok(Self { params_mod, meta }) - } - - pub fn inner_mods(&self) -> Vec { - self.params_mod - .content - .as_ref() - .map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Mod(m) => Some(m), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + let params_mod = parse2(item)?; + let meta = parse2(attr)?; + Ok(Self { params_mod, meta }) + } + + pub fn inner_mods(&self) -> Vec { + self.params_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Mod(m) => Some(m), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicParamModAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); - let dynam_params_ident = ¶ms_mod.ident; - - let mut quoted_enum = quote! {}; - for m in self.inner_mods() { - let aggregate_name = - syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); - let mod_name = &m.ident; - - let mut attrs = m.attrs.clone(); - attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); - if let Err(err) = ensure_codec_index(&attrs, m.span()) { - tokens.extend(err.into_compile_error()); - return; - } - - quoted_enum.extend(quote! { - #(#attrs)* - #aggregate_name(#dynam_params_ident::#mod_name::Parameters), - }); - } - - // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. - if let Some(params_pallet) = &self.meta.params_pallet { - MacroInjectArgs { - runtime_params: name.clone(), - params_pallet: params_pallet.clone(), - } - .visit_item_mod_mut(&mut params_mod); - } - - tokens.extend(quote! { - #params_mod - - #[#scrate::dynamic_params::dynamic_aggregated_params_internal] - pub enum #name { - #quoted_enum - } - }); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); + let dynam_params_ident = ¶ms_mod.ident; + + let mut quoted_enum = quote! {}; + for m in self.inner_mods() { + let aggregate_name = + syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); + let mod_name = &m.ident; + + let mut attrs = m.attrs.clone(); + attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); + if let Err(err) = ensure_codec_index(&attrs, m.span()) { + tokens.extend(err.into_compile_error()); + return + } + + quoted_enum.extend(quote! { + #(#attrs)* + #aggregate_name(#dynam_params_ident::#mod_name::Parameters), + }); + } + + // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. + if let Some(params_pallet) = &self.meta.params_pallet { + MacroInjectArgs { runtime_params: name.clone(), params_pallet: params_pallet.clone() } + .visit_item_mod_mut(&mut params_mod); + } + + tokens.extend(quote! { + #params_mod + + #[#scrate::dynamic_params::dynamic_aggregated_params_internal] + pub enum #name { + #quoted_enum + } + }); + } } /// Ensure there is a `#[codec(index = ..)]` attribute. fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { - let mut found = false; - - for attr in attrs.iter() { - if attr.path().is_ident("codec") { - let meta: syn::ExprAssign = attr.parse_args()?; - if meta.left.to_token_stream().to_string() == "index" { - found = true; - break; - } - } - } - - if !found { - Err(syn::Error::new( - span, - "Missing explicit `#[codec(index = ..)]` attribute", - )) - } else { - Ok(()) - } + let mut found = false; + + for attr in attrs.iter() { + if attr.path().is_ident("codec") { + let meta: syn::ExprAssign = attr.parse_args()?; + if meta.left.to_token_stream().to_string() == "index" { + found = true; + break + } + } + } + + if !found { + Err(syn::Error::new(span, "Missing explicit `#[codec(index = ..)]` attribute")) + } else { + Ok(()) + } } /// Used to inject arguments into the inner `#[dynamic_pallet_params(..)]` attribute. @@ -159,121 +150,110 @@ fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { /// This allows the outer `#[dynamic_params(..)]` attribute to specify some arguments that don't /// need to be repeated every time. struct MacroInjectArgs { - runtime_params: syn::Ident, - params_pallet: syn::Type, + runtime_params: syn::Ident, + params_pallet: syn::Type, } impl VisitMut for MacroInjectArgs { - fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { - // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. - let attr = item - .attrs - .iter_mut() - .find(|attr| attr.path().is_ident("dynamic_pallet_params")); - - if let Some(attr) = attr { - if let syn::Meta::Path(path) = &attr.meta { - assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params") - } - - let runtime_params = &self.runtime_params; - let params_pallet = &self.params_pallet; - - attr.meta = syn::parse2::(quote! { - dynamic_pallet_params(#runtime_params, #params_pallet) - }) - .unwrap(); - } - - visit_mut::visit_item_mod_mut(self, item); - } + fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { + // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. + let attr = item.attrs.iter_mut().find(|attr| attr.path().is_ident("dynamic_pallet_params")); + + if let Some(attr) = attr { + match &attr.meta { + syn::Meta::Path(path) => + assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params"), + _ => (), + } + + let runtime_params = &self.runtime_params; + let params_pallet = &self.params_pallet; + + attr.meta = syn::parse2::(quote! { + dynamic_pallet_params(#runtime_params, #params_pallet) + }) + .unwrap() + .into(); + } + + visit_mut::visit_item_mod_mut(self, item); + } } /// The helper attribute of a `#[dynamic_pallet_params(runtime_params, params_pallet)]` /// attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttr { - inner_mod: syn::ItemMod, - meta: DynamicPalletParamAttrMeta, + inner_mod: syn::ItemMod, + meta: DynamicPalletParamAttrMeta, } /// The inner meta of a `#[dynamic_pallet_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttrMeta { - runtime_params: syn::Ident, - _comma: Token![,], - parameter_pallet: syn::Type, + runtime_params: syn::Ident, + _comma: Token![,], + parameter_pallet: syn::Type, } impl DynamicPalletParamAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - Ok(Self { - inner_mod: parse2(item)?, - meta: parse2(attr)?, - }) - } - - pub fn statics(&self) -> Vec { - self.inner_mod - .content - .as_ref() - .map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Static(s) => Some(s), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + Ok(Self { inner_mod: parse2(item)?, meta: parse2(attr)? }) + } + + pub fn statics(&self) -> Vec { + self.inner_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Static(s) => Some(s), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicPalletParamAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (params_mod, parameter_pallet, runtime_params) = ( - &self.inner_mod, - &self.meta.parameter_pallet, - &self.meta.runtime_params, - ); - - let aggregate_name = syn::Ident::new( - ¶ms_mod.ident.to_string().to_class_case(), - params_mod.ident.span(), - ); - let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); - let statics = self.statics(); - - let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - ) = Default::default(); - - for s in statics.iter() { - if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { - tokens.extend(err.into_compile_error()); - return; - } - - key_names.push(&s.ident); - key_values.push(format_ident!("{}Value", &s.ident)); - defaults.push(&s.expr); - attrs.push(&s.attrs); - value_types.push(&s.ty); - } - - let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); - let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); - let runtime_key_ident = format_ident!("{}Key", runtime_params); - let runtime_value_ident = format_ident!("{}Value", runtime_params); - - tokens.extend(quote! { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (params_mod, parameter_pallet, runtime_params) = + (&self.inner_mod, &self.meta.parameter_pallet, &self.meta.runtime_params); + + let aggregate_name = + syn::Ident::new(¶ms_mod.ident.to_string().to_class_case(), params_mod.ident.span()); + let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); + let statics = self.statics(); + + let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = Default::default(); + + for s in statics.iter() { + if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { + tokens.extend(err.into_compile_error()); + return + } + + key_names.push(&s.ident); + key_values.push(format_ident!("{}Value", &s.ident)); + defaults.push(&s.expr); + attrs.push(&s.attrs); + value_types.push(&s.ty); + } + + let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); + let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); + let runtime_key_ident = format_ident!("{}Key", runtime_params); + let runtime_value_ident = format_ident!("{}Value", runtime_params); + + tokens.extend(quote! { pub mod #mod_name { use super::*; @@ -446,44 +426,44 @@ impl ToTokens for DynamicPalletParamAttr { )* } }); - } + } } #[derive(derive_syn_parse::Parse)] pub struct DynamicParamAggregatedEnum { - aggregated_enum: syn::ItemEnum, + aggregated_enum: syn::ItemEnum, } impl ToTokens for DynamicParamAggregatedEnum { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let params_enum = &self.aggregated_enum; - let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); - - let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = - Default::default(); - let mut attributes = Vec::new(); - for (i, variant) in params_enum.variants.iter().enumerate() { - indices.push(i); - param_names.push(&variant.ident); - attributes.push(&variant.attrs); - - param_types.push(match &variant.fields { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let params_enum = &self.aggregated_enum; + let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); + + let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = + Default::default(); + let mut attributes = Vec::new(); + for (i, variant) in params_enum.variants.iter().enumerate() { + indices.push(i); + param_names.push(&variant.ident); + attributes.push(&variant.attrs); + + param_types.push(match &variant.fields { syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, _ => { *tokens = quote! { compile_error!("Only unnamed enum variants with one inner item are supported") }; return }, }); - } + } - let params_key_ident = format_ident!("{}Key", params_enum.ident); - let params_value_ident = format_ident!("{}Value", params_enum.ident); + let params_key_ident = format_ident!("{}Key", params_enum.ident); + let params_value_ident = format_ident!("{}Value", params_enum.ident); - tokens.extend(quote! { + tokens.extend(quote! { #[doc(hidden)] #[derive( Clone, @@ -574,10 +554,10 @@ impl ToTokens for DynamicParamAggregatedEnum { } )* }); - } + } } /// Get access to the current crate and convert the error to a compile error. fn crate_access() -> core::result::Result { - generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) + generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) } diff --git a/support/procedural-fork/src/key_prefix.rs b/support/procedural-fork/src/key_prefix.rs index aea60ce3b..7f1ab6866 100644 --- a/support/procedural-fork/src/key_prefix.rs +++ b/support/procedural-fork/src/key_prefix.rs @@ -23,84 +23,82 @@ use syn::{Ident, Result}; const MAX_IDENTS: usize = 18; pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(syn::Error::new(Span::call_site(), "No arguments expected")); - } - - let mut all_trait_impls = TokenStream::new(); - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - for i in 2..=MAX_IDENTS { - let current_tuple = (0..i) - .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) - .collect::>(); - - for prefix_count in 1..i { - let (prefixes, suffixes) = current_tuple.split_at(prefix_count); - - let hashers = current_tuple - .iter() - .map(|ident| format_ident!("Hasher{}", ident)) - .collect::>(); - let kargs = prefixes - .iter() - .map(|ident| format_ident!("KArg{}", ident)) - .collect::>(); - let partial_keygen = generate_keygen(prefixes); - let suffix_keygen = generate_keygen(suffixes); - let suffix_tuple = generate_tuple(suffixes); - - let trait_impls = quote! { - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: StorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - type Suffix = #suffix_tuple; - - fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { - <#partial_keygen>::final_key(prefix) - } - } - - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: ReversibleStorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - fn decode_partial_key(key_material: &[u8]) -> Result< - Self::Suffix, - #frame_support::__private::codec::Error, - > { - <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) - } - } - }; - - all_trait_impls.extend(trait_impls); - } - } - - Ok(all_trait_impls) + if !input.is_empty() { + return Err(syn::Error::new(Span::call_site(), "No arguments expected")) + } + + let mut all_trait_impls = TokenStream::new(); + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + for i in 2..=MAX_IDENTS { + let current_tuple = (0..i) + .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) + .collect::>(); + + for prefix_count in 1..i { + let (prefixes, suffixes) = current_tuple.split_at(prefix_count); + + let hashers = current_tuple + .iter() + .map(|ident| format_ident!("Hasher{}", ident)) + .collect::>(); + let kargs = + prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); + let partial_keygen = generate_keygen(prefixes); + let suffix_keygen = generate_keygen(suffixes); + let suffix_tuple = generate_tuple(suffixes); + + let trait_impls = quote! { + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: StorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + type Suffix = #suffix_tuple; + + fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { + <#partial_keygen>::final_key(prefix) + } + } + + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: ReversibleStorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + fn decode_partial_key(key_material: &[u8]) -> Result< + Self::Suffix, + #frame_support::__private::codec::Error, + > { + <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) + } + } + }; + + all_trait_impls.extend(trait_impls); + } + } + + Ok(all_trait_impls) } fn generate_tuple(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - idents[0].to_token_stream() - } else { - quote!((#(#idents),*)) - } + if idents.len() == 1 { + idents[0].to_token_stream() + } else { + quote!((#(#idents),*)) + } } fn generate_keygen(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - let key = &idents[0]; - let hasher = format_ident!("Hasher{}", key); + if idents.len() == 1 { + let key = &idents[0]; + let hasher = format_ident!("Hasher{}", key); - quote!(Key<#hasher, #key>) - } else { - let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); + quote!(Key<#hasher, #key>) + } else { + let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); - quote!((#(Key<#hashers, #idents>),*)) - } + quote!((#(Key<#hashers, #idents>),*)) + } } diff --git a/support/procedural-fork/src/match_and_insert.rs b/support/procedural-fork/src/match_and_insert.rs index a80b6e95f..aa9cc56d7 100644 --- a/support/procedural-fork/src/match_and_insert.rs +++ b/support/procedural-fork/src/match_and_insert.rs @@ -22,152 +22,138 @@ use std::iter::once; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(target); - syn::custom_keyword!(pattern); - syn::custom_keyword!(tokens); + syn::custom_keyword!(target); + syn::custom_keyword!(pattern); + syn::custom_keyword!(tokens); } pub fn match_and_insert(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let MatchAndInsertDef { - pattern, - tokens, - target, - } = syn::parse_macro_input!(input as MatchAndInsertDef); - - match expand_in_stream(&pattern, &mut Some(tokens), target) { - Ok(stream) => stream.into(), - Err(err) => err.to_compile_error().into(), - } + let MatchAndInsertDef { pattern, tokens, target } = + syn::parse_macro_input!(input as MatchAndInsertDef); + + match expand_in_stream(&pattern, &mut Some(tokens), target) { + Ok(stream) => stream.into(), + Err(err) => err.to_compile_error().into(), + } } struct MatchAndInsertDef { - // Token stream to search and insert tokens into. - target: TokenStream, - // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal - // (i.e. contains only Punct or Ident), and not being empty. - pattern: Vec, - // Token stream to insert after the match pattern. - tokens: TokenStream, + // Token stream to search and insert tokens into. + target: TokenStream, + // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal + // (i.e. contains only Punct or Ident), and not being empty. + pattern: Vec, + // Token stream to insert after the match pattern. + tokens: TokenStream, } impl syn::parse::Parse for MatchAndInsertDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut target; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); - let target = target.parse()?; - - let mut pattern; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); - let pattern = pattern - .parse::()? - .into_iter() - .collect::>(); - - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { - return Err(syn::Error::new(t.span(), "Unexpected group token tree")); - } - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { - return Err(syn::Error::new(t.span(), "Unexpected literal token tree")); - } - - if pattern.is_empty() { - return Err(syn::Error::new( - Span::call_site(), - "empty match pattern is invalid", - )); - } - - let mut tokens; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); - let tokens = tokens.parse()?; - - Ok(Self { - tokens, - pattern, - target, - }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut target; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); + let target = target.parse()?; + + let mut pattern; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); + let pattern = pattern.parse::()?.into_iter().collect::>(); + + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { + return Err(syn::Error::new(t.span(), "Unexpected group token tree")) + } + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { + return Err(syn::Error::new(t.span(), "Unexpected literal token tree")) + } + + if pattern.is_empty() { + return Err(syn::Error::new(Span::call_site(), "empty match pattern is invalid")) + } + + let mut tokens; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); + let tokens = tokens.parse()?; + + Ok(Self { tokens, pattern, target }) + } } // Insert `tokens` after the first matching `pattern`. // `tokens` must be some (Option is used for internal simplification). // `pattern` must not be empty and should only contain Ident or Punct. fn expand_in_stream( - pattern: &[TokenTree], - tokens: &mut Option, - stream: TokenStream, + pattern: &[TokenTree], + tokens: &mut Option, + stream: TokenStream, ) -> syn::Result { - assert!( - tokens.is_some(), - "`tokens` must be some, Option is used because `tokens` is used only once" - ); - assert!( - !pattern.is_empty(), - "`pattern` must not be empty, otherwise there is nothing to match against" - ); - - let stream_span = stream.span(); - let mut stream = stream.into_iter(); - let mut extended = TokenStream::new(); - let mut match_cursor = 0; - - while let Some(token) = stream.next() { - match token { - TokenTree::Group(group) => { - match_cursor = 0; - let group_stream = group.stream(); - match expand_in_stream(pattern, tokens, group_stream) { - Ok(s) => { - extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); - extended.extend(stream); - return Ok(extended); - } - Err(_) => { - extended.extend(once(TokenTree::Group(group))); - } - } - } - other => { - advance_match_cursor(&other, pattern, &mut match_cursor); - - extended.extend(once(other)); - - if match_cursor == pattern.len() { - extended.extend(once( - tokens.take().expect("tokens is used to replace only once"), - )); - extended.extend(stream); - return Ok(extended); - } - } - } - } - // if we reach this point, it means the stream is empty and we haven't found a matching pattern - let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); - Err(syn::Error::new(stream_span, msg)) + assert!( + tokens.is_some(), + "`tokens` must be some, Option is used because `tokens` is used only once" + ); + assert!( + !pattern.is_empty(), + "`pattern` must not be empty, otherwise there is nothing to match against" + ); + + let stream_span = stream.span(); + let mut stream = stream.into_iter(); + let mut extended = TokenStream::new(); + let mut match_cursor = 0; + + while let Some(token) = stream.next() { + match token { + TokenTree::Group(group) => { + match_cursor = 0; + let group_stream = group.stream(); + match expand_in_stream(pattern, tokens, group_stream) { + Ok(s) => { + extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); + extended.extend(stream); + return Ok(extended) + }, + Err(_) => { + extended.extend(once(TokenTree::Group(group))); + }, + } + }, + other => { + advance_match_cursor(&other, pattern, &mut match_cursor); + + extended.extend(once(other)); + + if match_cursor == pattern.len() { + extended + .extend(once(tokens.take().expect("tokens is used to replace only once"))); + extended.extend(stream); + return Ok(extended) + } + }, + } + } + // if we reach this point, it means the stream is empty and we haven't found a matching pattern + let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); + Err(syn::Error::new(stream_span, msg)) } fn advance_match_cursor(other: &TokenTree, pattern: &[TokenTree], match_cursor: &mut usize) { - use TokenTree::{Ident, Punct}; - - let does_match_other_pattern = match (other, &pattern[*match_cursor]) { - (Ident(i1), Ident(i2)) => i1 == i2, - (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), - _ => false, - }; - - if does_match_other_pattern { - *match_cursor += 1; - } else { - *match_cursor = 0; - } + use TokenTree::{Ident, Punct}; + + let does_match_other_pattern = match (other, &pattern[*match_cursor]) { + (Ident(i1), Ident(i2)) => i1 == i2, + (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), + _ => false, + }; + + if does_match_other_pattern { + *match_cursor += 1; + } else { + *match_cursor = 0; + } } diff --git a/support/procedural-fork/src/no_bound/clone.rs b/support/procedural-fork/src/no_bound/clone.rs index 17039bdc8..346bf450f 100644 --- a/support/procedural-fork/src/no_bound/clone.rs +++ b/support/procedural-fork/src/no_bound/clone.rs @@ -19,93 +19,89 @@ use syn::spanned::Spanned; /// Derive Clone but do not bound any generic. pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named.named.iter().map(|i| &i.ident).map(|i| { - quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(&self.#i) - ) - }); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self { #( #fields, )* } ) - } - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(&self.#i) - ) - }); + quote::quote!( Self { #( #fields, )* } ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = + unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self ( #( #fields, )* ) ) - } - syn::Fields::Unit => { - quote::quote!(Self) - } - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let cloned = captured.clone().map(|i| { - ::quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} - ) - } - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let cloned = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) - ) - } - syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), - } - }); + quote::quote!( Self ( #( #fields, )* ) ) + }, + syn::Fields::Unit => { + quote::quote!(Self) + }, + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let cloned = captured.clone().map(|i| { + ::quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + ) + }, + syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), + } + }); - quote::quote!(match self { - #( #variants, )* - }) - } - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(CloneNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - }; + quote::quote!(match self { + #( #variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(CloneNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { - fn clone(&self) -> Self { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { + fn clone(&self) -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/debug.rs b/support/procedural-fork/src/no_bound/debug.rs index 8034bb5ec..a1b3f4f0d 100644 --- a/support/procedural-fork/src/no_bound/debug.rs +++ b/support/procedural-fork/src/no_bound/debug.rs @@ -19,103 +19,103 @@ use syn::spanned::Spanned; /// Derive Debug but do not bound any generics. pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let input_ident = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let input_ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = - named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), - ); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), + ); - quote::quote!( - fmt.debug_struct(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - } - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); + quote::quote!( + fmt.debug_struct(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); - quote::quote!( - fmt.debug_tuple(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - } - syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - let full_variant_str = format!("{}::{}", input_ident, ident); - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let debugged = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - .field(stringify!(#i), &#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => { - fmt.debug_struct(#full_variant_str) - #( #debugged )* - .finish() - } - ) - } - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let debugged = captured - .clone() - .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => { - fmt.debug_tuple(#full_variant_str) - #( #debugged )* - .finish() - } - ) - } - syn::Fields::Unit => quote::quote!( - Self::#ident => fmt.write_str(#full_variant_str) - ), - } - }); + quote::quote!( + fmt.debug_tuple(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + }, + syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + let full_variant_str = format!("{}::{}", input_ident, ident); + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let debugged = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + .field(stringify!(#i), &#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => { + fmt.debug_struct(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let debugged = captured + .clone() + .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => { + fmt.debug_tuple(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unit => quote::quote!( + Self::#ident => fmt.write_str(#full_variant_str) + ), + } + }); - quote::quote!(match *self { - #( #variants, )* - }) - } - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(DebugNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - }; + quote::quote!(match *self { + #( #variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(DebugNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index cb054878d..0524247d2 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -21,150 +21,141 @@ use syn::{spanned::Spanned, Data, DeriveInput, Fields}; /// Derive Default but do not bound any generic. pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as DeriveInput); - - let name = &input.ident; - - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - - let impl_ = match input.data { - Data::Struct(struct_) => match struct_.fields { - Fields::Named(named) => { - let fields = named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span() => - #ident: ::core::default::Default::default() - } - }); - - quote!(Self { #( #fields, )* }) - } - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(Self( #( #fields, )* )) - } - Fields::Unit => { - quote!(Self) - } - }, - Data::Enum(enum_) => { - if enum_.variants.is_empty() { - return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") - .to_compile_error() - .into(); - } - - // all #[default] attrs with the variant they're on; i.e. a var - let default_variants = enum_ - .variants - .into_iter() - .filter(|variant| { - variant - .attrs - .iter() - .any(|attr| attr.path().is_ident("default")) - }) - .collect::>(); - - match &*default_variants { - [] => return syn::Error::new( - name.clone().span(), - "no default declared, make a variant default by placing `#[default]` above it", - ) - .into_compile_error() - .into(), - // only one variant with the #[default] attribute set - [default_variant] => { - let variant_attrs = default_variant - .attrs - .iter() - .filter(|a| a.path().is_ident("default")) - .collect::>(); - - // check that there is only one #[default] attribute on the variant - if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { - let mut err = - syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); - - err.combine(syn::Error::new_spanned( - first_attr, - "`#[default]` used here", - )); - - err.extend([second_attr].into_iter().chain(additional_attrs).map( - |variant| { - syn::Error::new_spanned(variant, "`#[default]` used again here") - }, - )); - - return err.into_compile_error().into(); - } - - let variant_ident = &default_variant.ident; - - let fully_qualified_variant_path = quote!(Self::#variant_ident); - - match &default_variant.fields { - Fields::Named(named) => { - let fields = - named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span()=> - #ident: ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path { #( #fields, )* }) - } - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path( #( #fields, )* )) - } - Fields::Unit => fully_qualified_variant_path, - } - } - [first, additional @ ..] => { - let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); - - err.combine(syn::Error::new_spanned(first, "first default")); - - err.extend( - additional - .iter() - .map(|variant| syn::Error::new_spanned(variant, "additional default")), - ); - - return err.into_compile_error().into(); - } - } - } - Data::Union(union_) => { - return syn::Error::new_spanned( - union_.union_token, - "Union type not supported by `derive(DefaultNoBound)`", - ) - .to_compile_error() - .into() - } - }; - - quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { - fn default() -> Self { - #impl_ - } - } - }; - ) - .into() + let input = syn::parse_macro_input!(input as DeriveInput); + + let name = &input.ident; + + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + }, + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + }, + Fields::Unit => { + quote!(Self) + }, + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into() + } + + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| variant.attrs.iter().any(|attr| attr.path().is_ident("default"))) + .collect::>(); + + match &*default_variants { + [] => return syn::Error::new( + name.clone().span(), + "no default declared, make a variant default by placing `#[default]` above it", + ) + .into_compile_error() + .into(), + // only one variant with the #[default] attribute set + [default_variant] => { + let variant_attrs = default_variant + .attrs + .iter() + .filter(|a| a.path().is_ident("default")) + .collect::>(); + + // check that there is only one #[default] attribute on the variant + if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { + let mut err = + syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); + + err.combine(syn::Error::new_spanned(first_attr, "`#[default]` used here")); + + err.extend([second_attr].into_iter().chain(additional_attrs).map( + |variant| { + syn::Error::new_spanned(variant, "`#[default]` used again here") + }, + )); + + return err.into_compile_error().into() + } + + let variant_ident = &default_variant.ident; + + let fully_qualified_variant_path = quote!(Self::#variant_ident); + + match &default_variant.fields { + Fields::Named(named) => { + let fields = + named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span()=> + #ident: ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path { #( #fields, )* }) + }, + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path( #( #fields, )* )) + }, + Fields::Unit => fully_qualified_variant_path, + } + }, + [first, additional @ ..] => { + let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); + + err.combine(syn::Error::new_spanned(first, "first default")); + + err.extend( + additional + .into_iter() + .map(|variant| syn::Error::new_spanned(variant, "additional default")), + ); + + return err.into_compile_error().into() + }, + } + }, + Data::Union(union_) => + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into(), + }; + + quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { + fn default() -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/ord.rs b/support/procedural-fork/src/no_bound/ord.rs index 20f30eb9d..b24d27c04 100644 --- a/support/procedural-fork/src/no_bound/ord.rs +++ b/support/procedural-fork/src/no_bound/ord.rs @@ -19,57 +19,57 @@ use syn::spanned::Spanned; /// Derive Ord but do not bound any generic. pub fn derive_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - } - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - } - syn::Fields::Unit => { - quote::quote!(core::cmp::Ordering::Equal) - } - }, - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - }; + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + }, + syn::Fields::Unit => { + quote::quote!(core::cmp::Ordering::Equal) + }, + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_eq.rs b/support/procedural-fork/src/no_bound/partial_eq.rs index 8833f6e5f..a1be71a96 100644 --- a/support/procedural-fork/src/no_bound/partial_eq.rs +++ b/support/procedural-fork/src/no_bound/partial_eq.rs @@ -19,119 +19,119 @@ use syn::spanned::Spanned; /// Derive PartialEq but do not bound any generic. pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - } - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + quote::quote!( true #( && #fields )* ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - } - syn::Fields::Unit => { - quote::quote!(true) - } - }, - syn::Data::Enum(enum_) => { - let variants = - enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let names = named.named.iter().map(|i| &i.ident); - let other_names = names.clone().enumerate().map(|(n, ident)| { - syn::Ident::new(&format!("_{}", n), ident.span()) - }); + quote::quote!( true #( && #fields )* ) + }, + syn::Fields::Unit => { + quote::quote!(true) + }, + }, + syn::Data::Enum(enum_) => { + let variants = + enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let names = named.named.iter().map(|i| &i.ident); + let other_names = names.clone().enumerate().map(|(n, ident)| { + syn::Ident::new(&format!("_{}", n), ident.span()) + }); - let capture = names.clone(); - let other_capture = names - .clone() - .zip(other_names.clone()) - .map(|(i, other_i)| quote::quote!(#i: #other_i)); - let eq = names.zip(other_names).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident { #( #capture, )* }, - Self::#ident { #( #other_capture, )* }, - ) => true #( && #eq )* - ) - } - syn::Fields::Unnamed(unnamed) => { - let names = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let other_names = - unnamed.unnamed.iter().enumerate().map(|(i, f)| { - syn::Ident::new(&format!("_{}_other", i), f.span()) - }); - let eq = names.clone().zip(other_names.clone()).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident ( #( #names, )* ), - Self::#ident ( #( #other_names, )* ), - ) => true #( && #eq )* - ) - } - syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), - } - }); + let capture = names.clone(); + let other_capture = names + .clone() + .zip(other_names.clone()) + .map(|(i, other_i)| quote::quote!(#i: #other_i)); + let eq = names.zip(other_names).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident { #( #capture, )* }, + Self::#ident { #( #other_capture, )* }, + ) => true #( && #eq )* + ) + }, + syn::Fields::Unnamed(unnamed) => { + let names = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let other_names = + unnamed.unnamed.iter().enumerate().map(|(i, f)| { + syn::Ident::new(&format!("_{}_other", i), f.span()) + }); + let eq = names.clone().zip(other_names.clone()).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident ( #( #names, )* ), + Self::#ident ( #( #other_names, )* ), + ) => true #( && #eq )* + ) + }, + syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), + } + }); - let mut different_variants = vec![]; - for (i, i_variant) in enum_.variants.iter().enumerate() { - for (j, j_variant) in enum_.variants.iter().enumerate() { - if i != j { - let i_ident = &i_variant.ident; - let j_ident = &j_variant.ident; - different_variants.push(quote::quote!( - (Self::#i_ident { .. }, Self::#j_ident { .. }) => false - )) - } - } - } + let mut different_variants = vec![]; + for (i, i_variant) in enum_.variants.iter().enumerate() { + for (j, j_variant) in enum_.variants.iter().enumerate() { + if i != j { + let i_ident = &i_variant.ident; + let j_ident = &j_variant.ident; + different_variants.push(quote::quote!( + (Self::#i_ident { .. }, Self::#j_ident { .. }) => false + )) + } + } + } - quote::quote!( match (self, other) { - #( #variants, )* - #( #different_variants, )* - }) - } - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialEqNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - }; + quote::quote!( match (self, other) { + #( #variants, )* + #( #different_variants, )* + }) + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialEqNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { - fn eq(&self, other: &Self) -> bool { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { + fn eq(&self, other: &Self) -> bool { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_ord.rs b/support/procedural-fork/src/no_bound/partial_ord.rs index c73199d4e..86aa42be9 100644 --- a/support/procedural-fork/src/no_bound/partial_ord.rs +++ b/support/procedural-fork/src/no_bound/partial_ord.rs @@ -19,72 +19,71 @@ use syn::spanned::Spanned; /// Derive PartialOrd but do not bound any generic. pub fn derive_partial_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = - match input.data { - syn::Data::Struct(struct_) => { - match struct_.fields { - syn::Fields::Named(named) => { - let fields = named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), - ); + let impl_ = match input.data { + syn::Data::Struct(struct_) => + match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), + ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - } - syn::Fields::Unnamed(unnamed) => { - let fields = + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + }, + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map( |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - } - syn::Fields::Unit => { - quote::quote!(Some(core::cmp::Ordering::Equal)) - } - } - } - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into(); - } - }; + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + }, + syn::Fields::Unit => { + quote::quote!(Some(core::cmp::Ordering::Equal)) + }, + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into() + }, + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { - fn partial_cmp(&self, other: &Self) -> Option { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { + fn partial_cmp(&self, other: &Self) -> Option { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/pallet/expand/call.rs b/support/procedural-fork/src/pallet/expand/call.rs index a39e81fd1..f395872c8 100644 --- a/support/procedural-fork/src/pallet/expand/call.rs +++ b/support/procedural-fork/src/pallet/expand/call.rs @@ -16,12 +16,12 @@ // limitations under the License. use crate::{ - pallet::{ - expand::warnings::{weight_constant_warning, weight_witness_warning}, - parse::call::CallWeightDef, - Def, - }, - COUNTER, + pallet::{ + expand::warnings::{weight_constant_warning, weight_witness_warning}, + parse::call::CallWeightDef, + Def, + }, + COUNTER, }; use proc_macro2::TokenStream as TokenStream2; use proc_macro_warning::Warning; @@ -32,56 +32,45 @@ use syn::spanned::Spanned; /// * Generate enum call and implement various trait on it. /// * Implement Callable and call_function on `Pallet` pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { - let (span, where_clause, methods, docs) = match def.call.as_ref() { - Some(call) => { - let span = call.attr_span; - let where_clause = call.where_clause.clone(); - let methods = call.methods.clone(); - let docs = call.docs.clone(); - - (span, where_clause, methods, docs) - } - None => ( - def.item.span(), - def.config.where_clause.clone(), - Vec::new(), - Vec::new(), - ), - }; - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(span); - let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); - let type_use_gen = &def.type_use_generics(span); - let call_ident = syn::Ident::new("Call", span); - let pallet_ident = &def.pallet_struct.pallet; - - let fn_name = methods - .iter() - .map(|method| &method.name) - .collect::>(); - let call_index = methods - .iter() - .map(|method| method.call_index) - .collect::>(); - let new_call_variant_fn_name = fn_name - .iter() - .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) - .collect::>(); - - let new_call_variant_doc = fn_name - .iter() - .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) - .collect::>(); - - let mut call_index_warnings = Vec::new(); - // Emit a warning for each call that is missing `call_index` when not in dev-mode. - for method in &methods { - if method.explicit_call_index || def.dev_mode { - continue; - } - - let warning = Warning::new_deprecated("ImplicitCallIndex") + let (span, where_clause, methods, docs) = match def.call.as_ref() { + Some(call) => { + let span = call.attr_span; + let where_clause = call.where_clause.clone(); + let methods = call.methods.clone(); + let docs = call.docs.clone(); + + (span, where_clause, methods, docs) + }, + None => (def.item.span(), def.config.where_clause.clone(), Vec::new(), Vec::new()), + }; + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(span); + let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); + let type_use_gen = &def.type_use_generics(span); + let call_ident = syn::Ident::new("Call", span); + let pallet_ident = &def.pallet_struct.pallet; + + let fn_name = methods.iter().map(|method| &method.name).collect::>(); + let call_index = methods.iter().map(|method| method.call_index).collect::>(); + let new_call_variant_fn_name = fn_name + .iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name + .iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); + + let mut call_index_warnings = Vec::new(); + // Emit a warning for each call that is missing `call_index` when not in dev-mode. + for method in &methods { + if method.explicit_call_index || def.dev_mode { + continue + } + + let warning = Warning::new_deprecated("ImplicitCallIndex") .index(call_index_warnings.len()) .old("use implicit call indices") .new("ensure that all calls have a `pallet::call_index` attribute or put the pallet into `dev` mode") @@ -91,408 +80,373 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ]) .span(method.name.span()) .build_or_panic(); - call_index_warnings.push(warning); - } - - let mut fn_weight = Vec::::new(); - let mut weight_warnings = Vec::new(); - for method in &methods { - match &method.weight { - CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), - CallWeightDef::Immediate(e) => { - weight_constant_warning(e, def.dev_mode, &mut weight_warnings); - weight_witness_warning(method, def.dev_mode, &mut weight_warnings); - - fn_weight.push(e.into_token_stream()); - } - CallWeightDef::Inherited => { - let pallet_weight = def - .call - .as_ref() - .expect("we have methods; we have calls; qed") - .inherited_call_weight - .as_ref() - .expect("the parser prevents this"); - - // Expand `<::WeightInfo>::call_name()`. - let t = &pallet_weight.typename; - let n = &method.name; - fn_weight.push(quote!({ < #t > :: #n () })); - } - } - } - debug_assert_eq!(fn_weight.len(), methods.len()); - - let fn_doc = methods - .iter() - .map(|method| &method.docs) - .collect::>(); - - let args_name = methods - .iter() - .map(|method| { - method - .args - .iter() - .map(|(_, name, _)| name.clone()) - .collect::>() - }) - .collect::>(); - - let args_name_stripped = methods - .iter() - .map(|method| { - method - .args - .iter() - .map(|(_, name, _)| { - syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) - }) - .collect::>() - }) - .collect::>(); - - let make_args_name_pattern = |ref_tok| { - args_name - .iter() - .zip(args_name_stripped.iter()) - .map(|(args_name, args_name_stripped)| { - args_name - .iter() - .zip(args_name_stripped) - .map(|(args_name, args_name_stripped)| { - if args_name == args_name_stripped { - quote::quote!( #ref_tok #args_name ) - } else { - quote::quote!( #args_name_stripped: #ref_tok #args_name ) - } - }) - .collect::>() - }) - .collect::>() - }; - - let args_name_pattern = make_args_name_pattern(None); - let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); - - let args_type = methods - .iter() - .map(|method| { - method - .args - .iter() - .map(|(_, _, type_)| type_.clone()) - .collect::>() - }) - .collect::>(); - - let args_compact_attr = methods.iter().map(|method| { - method - .args - .iter() - .map(|(is_compact, _, type_)| { - if *is_compact { - quote::quote_spanned!(type_.span() => #[codec(compact)] ) - } else { - quote::quote!() - } - }) - .collect::>() - }); - - let default_docs = [syn::parse_quote!( - r"Contains a variant per dispatchable extrinsic that this pallet has." - )]; - let docs = if docs.is_empty() { - &default_docs[..] - } else { - &docs[..] - }; - - let maybe_compile_error = if def.call.is_none() { - quote::quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ - construct_runtime?", - )); - } - } else { - proc_macro2::TokenStream::new() - }; - - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { - "never" - } else { - "always" - }; - - // Wrap all calls inside of storage layers - if let Some(syn::Item::Impl(item_impl)) = def - .call - .as_ref() - .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) - { - item_impl.items.iter_mut().for_each(|i| { - if let syn::ImplItem::Fn(method) = i { - let block = &method.block; - method.block = syn::parse_quote! {{ - // We execute all dispatchable in a new storage layer, allowing them - // to return an error at any point, and undoing any storage changes. - #frame_support::storage::with_storage_layer(|| #block) - }}; - } - }); - } - - // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings - let maybe_allow_attrs = methods - .iter() - .map(|method| { - method - .attrs - .iter() - .find(|attr| attr.path().is_ident("allow")) - .map_or(proc_macro2::TokenStream::new(), |attr| { - attr.to_token_stream() - }) - }) - .collect::>(); - - let cfg_attrs = methods - .iter() - .map(|method| { - let attrs = method - .cfg_attrs - .iter() - .map(|attr| attr.to_token_stream()) - .collect::>(); - quote::quote!( #( #attrs )* ) - }) - .collect::>(); - - let feeless_check = methods - .iter() - .map(|method| &method.feeless_check) - .collect::>(); - let feeless_check_result = - feeless_check - .iter() - .zip(args_name.iter()) - .map(|(feeless_check, arg_name)| { - if let Some(feeless_check) = feeless_check { - quote::quote!(#feeless_check(origin, #( #arg_name, )*)) - } else { - quote::quote!(false) - } - }); - - quote::quote_spanned!(span => - #[doc(hidden)] - mod warnings { - #( - #call_index_warnings - )* - #( - #weight_warnings - )* - } - - #[allow(unused_imports)] - #[doc(hidden)] - pub mod __substrate_call_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - }; - } - - #[doc(hidden)] - pub use #macro_ident as is_call_part_defined; - } - - #( #[doc = #docs] )* - #[derive( - #frame_support::RuntimeDebugNoBound, - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - #[allow(non_camel_case_types)] - pub enum #call_ident<#type_decl_bounded_gen> #where_clause { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#type_use_gen,)>, - #frame_support::Never, - ), - #( - #cfg_attrs - #( #[doc = #fn_doc] )* - #[codec(index = #call_index)] - #fn_name { - #( - #[allow(missing_docs)] - #args_compact_attr #args_name_stripped: #args_type - ),* - }, - )* - } - - impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { - #( - #cfg_attrs - #[doc = #new_call_variant_doc] - pub fn #new_call_variant_fn_name( - #( #args_name_stripped: #args_type ),* - ) -> Self { - Self::#fn_name { - #( #args_name_stripped ),* - } - } - )* - } - - impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo - for #call_ident<#type_use_gen> - #where_clause - { - fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - let __pallet_base_weight = #fn_weight; - - let __pallet_weight = < - dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> - >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_class = < - dyn #frame_support::dispatch::ClassifyDispatch< - ( #( & #args_type, )* ) - > - >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_pays_fee = < - dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> - >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); - - #frame_support::dispatch::DispatchInfo { - weight: __pallet_weight, - class: __pallet_class, - pays_fee: __pallet_pays_fee, - } - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> - #where_clause - { - type Origin = #frame_system::pallet_prelude::OriginFor; - #[allow(unused_variables)] - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - #feeless_check_result - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_name(&self) -> &'static str { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_names() -> &'static [&'static str] { - &[ #( #cfg_attrs stringify!(#fn_name), )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_index(&self) -> u8 { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_indices() -> &'static [u8] { - &[ #( #cfg_attrs #call_index, )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable - for #call_ident<#type_use_gen> - #where_clause - { - type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; - fn dispatch_bypass_filter( - self, - origin: Self::RuntimeOrigin - ) -> #frame_support::dispatch::DispatchResultWithPostInfo { - #frame_support::dispatch_context::run_in_context(|| { - match self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern, )* } => { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) - ); - #maybe_allow_attrs - <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) - .map(Into::into).map_err(Into::into) - }, - )* - Self::__Ignore(_, _) => { - let _ = origin; // Use origin for empty Call enum - unreachable!("__PhantomItem cannot be used."); - }, - } - }) - } - } - - impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> - #where_clause - { - type RuntimeCall = #call_ident<#type_use_gen>; - } - - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { - #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() - } - } - ) + call_index_warnings.push(warning); + } + + let mut fn_weight = Vec::::new(); + let mut weight_warnings = Vec::new(); + for method in &methods { + match &method.weight { + CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), + CallWeightDef::Immediate(e) => { + weight_constant_warning(e, def.dev_mode, &mut weight_warnings); + weight_witness_warning(method, def.dev_mode, &mut weight_warnings); + + fn_weight.push(e.into_token_stream()); + }, + CallWeightDef::Inherited => { + let pallet_weight = def + .call + .as_ref() + .expect("we have methods; we have calls; qed") + .inherited_call_weight + .as_ref() + .expect("the parser prevents this"); + + // Expand `<::WeightInfo>::call_name()`. + let t = &pallet_weight.typename; + let n = &method.name; + fn_weight.push(quote!({ < #t > :: #n () })); + }, + } + } + debug_assert_eq!(fn_weight.len(), methods.len()); + + let fn_doc = methods.iter().map(|method| &method.docs).collect::>(); + + let args_name = methods + .iter() + .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) + .collect::>(); + + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + + let args_type = methods + .iter() + .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) + .collect::>(); + + let args_compact_attr = methods.iter().map(|method| { + method + .args + .iter() + .map(|(is_compact, _, type_)| { + if *is_compact { + quote::quote_spanned!(type_.span() => #[codec(compact)] ) + } else { + quote::quote!() + } + }) + .collect::>() + }); + + let default_docs = + [syn::parse_quote!(r"Contains a variant per dispatchable extrinsic that this pallet has.")]; + let docs = if docs.is_empty() { &default_docs[..] } else { &docs[..] }; + + let maybe_compile_error = if def.call.is_none() { + quote::quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ + construct_runtime?", + )); + } + } else { + proc_macro2::TokenStream::new() + }; + + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // Wrap all calls inside of storage layers + if let Some(syn::Item::Impl(item_impl)) = def + .call + .as_ref() + .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) + { + item_impl.items.iter_mut().for_each(|i| { + if let syn::ImplItem::Fn(method) = i { + let block = &method.block; + method.block = syn::parse_quote! {{ + // We execute all dispatchable in a new storage layer, allowing them + // to return an error at any point, and undoing any storage changes. + #frame_support::storage::with_storage_layer(|| #block) + }}; + } + }); + } + + // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings + let maybe_allow_attrs = methods + .iter() + .map(|method| { + method + .attrs + .iter() + .find(|attr| attr.path().is_ident("allow")) + .map_or(proc_macro2::TokenStream::new(), |attr| attr.to_token_stream()) + }) + .collect::>(); + + let cfg_attrs = methods + .iter() + .map(|method| { + let attrs = + method.cfg_attrs.iter().map(|attr| attr.to_token_stream()).collect::>(); + quote::quote!( #( #attrs )* ) + }) + .collect::>(); + + let feeless_check = methods.iter().map(|method| &method.feeless_check).collect::>(); + let feeless_check_result = + feeless_check.iter().zip(args_name.iter()).map(|(feeless_check, arg_name)| { + if let Some(feeless_check) = feeless_check { + quote::quote!(#feeless_check(origin, #( #arg_name, )*)) + } else { + quote::quote!(false) + } + }); + + quote::quote_spanned!(span => + #[doc(hidden)] + mod warnings { + #( + #call_index_warnings + )* + #( + #weight_warnings + )* + } + + #[allow(unused_imports)] + #[doc(hidden)] + pub mod __substrate_call_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + }; + } + + #[doc(hidden)] + pub use #macro_ident as is_call_part_defined; + } + + #( #[doc = #docs] )* + #[derive( + #frame_support::RuntimeDebugNoBound, + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + #[allow(non_camel_case_types)] + pub enum #call_ident<#type_decl_bounded_gen> #where_clause { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#type_use_gen,)>, + #frame_support::Never, + ), + #( + #cfg_attrs + #( #[doc = #fn_doc] )* + #[codec(index = #call_index)] + #fn_name { + #( + #[allow(missing_docs)] + #args_compact_attr #args_name_stripped: #args_type + ),* + }, + )* + } + + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #cfg_attrs + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name_stripped: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name_stripped ),* + } + } + )* + } + + impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo + for #call_ident<#type_use_gen> + #where_clause + { + fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + let __pallet_base_weight = #fn_weight; + + let __pallet_weight = < + dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> + >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_class = < + dyn #frame_support::dispatch::ClassifyDispatch< + ( #( & #args_type, )* ) + > + >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_pays_fee = < + dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> + >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); + + #frame_support::dispatch::DispatchInfo { + weight: __pallet_weight, + class: __pallet_class, + pays_fee: __pallet_pays_fee, + } + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> + #where_clause + { + type Origin = #frame_system::pallet_prelude::OriginFor; + #[allow(unused_variables)] + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + #feeless_check_result + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_name(&self) -> &'static str { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ #( #cfg_attrs stringify!(#fn_name), )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_index(&self) -> u8 { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_indices() -> &'static [u8] { + &[ #( #cfg_attrs #call_index, )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable + for #call_ident<#type_use_gen> + #where_clause + { + type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; + fn dispatch_bypass_filter( + self, + origin: Self::RuntimeOrigin + ) -> #frame_support::dispatch::DispatchResultWithPostInfo { + #frame_support::dispatch_context::run_in_context(|| { + match self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern, )* } => { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) + ); + #maybe_allow_attrs + <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) + .map(Into::into).map_err(Into::into) + }, + )* + Self::__Ignore(_, _) => { + let _ = origin; // Use origin for empty Call enum + unreachable!("__PhantomItem cannot be used."); + }, + } + }) + } + } + + impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> + #where_clause + { + type RuntimeCall = #call_ident<#type_use_gen>; + } + + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { + #[allow(dead_code)] + #[doc(hidden)] + pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { + #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/composite.rs b/support/procedural-fork/src/pallet/expand/composite.rs index 49c0ad675..d449afe8f 100644 --- a/support/procedural-fork/src/pallet/expand/composite.rs +++ b/support/procedural-fork/src/pallet/expand/composite.rs @@ -20,21 +20,21 @@ use proc_macro2::TokenStream; /// Expands `composite_enum` and adds the `VariantCount` implementation for it. pub fn expand_composites(def: &mut Def) -> TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for composite in &def.composites { - let name = &composite.ident; - let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); - let variants_count = composite.variant_count; + for composite in &def.composites { + let name = &composite.ident; + let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); + let variants_count = composite.variant_count; - // add `VariantCount` implementation for `composite_enum` - expand.extend(quote::quote_spanned!(composite.attr_span => + // add `VariantCount` implementation for `composite_enum` + expand.extend(quote::quote_spanned!(composite.attr_span => impl #impl_generics #frame_support::traits::VariantCount for #name #ty_generics #where_clause { const VARIANT_COUNT: u32 = #variants_count; } )); - } + } - expand + expand } diff --git a/support/procedural-fork/src/pallet/expand/config.rs b/support/procedural-fork/src/pallet/expand/config.rs index 55ac72537..5cf4035a8 100644 --- a/support/procedural-fork/src/pallet/expand/config.rs +++ b/support/procedural-fork/src/pallet/expand/config.rs @@ -23,20 +23,20 @@ use syn::{parse_quote, Item}; /// /// * Generate default rust doc pub fn expand_config(def: &mut Def) -> TokenStream { - let config = &def.config; - let config_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; - if let Item::Trait(item) = item { - item - } else { - unreachable!("Checked by config parser") - } - }; + let config = &def.config; + let config_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; + if let Item::Trait(item) = item { + item + } else { + unreachable!("Checked by config parser") + } + }; - config_item.attrs.insert( - 0, - parse_quote!( - #[doc = r" + config_item.attrs.insert( + 0, + parse_quote!( + #[doc = r" Configuration trait of this pallet. The main purpose of this trait is to act as an interface between this pallet and the runtime in @@ -44,54 +44,54 @@ which it is embedded in. A type, function, or constant in this trait is essentia configured by the runtime that includes this pallet. Consequently, a runtime that wants to include this pallet must implement this trait." - ] - ), - ); + ] + ), + ); - // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is - // impossible consequently. - match &config.default_sub_trait { - Some(default_sub_trait) if !default_sub_trait.items.is_empty() => { - let trait_items = &default_sub_trait - .items - .iter() - .map(|item| { - if item.1 { - if let syn::TraitItem::Type(item) = item.0.clone() { - let mut item = item.clone(); - item.bounds.clear(); - syn::TraitItem::Type(item) - } else { - item.0.clone() - } - } else { - item.0.clone() - } - }) - .collect::>(); + // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is + // impossible consequently. + match &config.default_sub_trait { + Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { + let trait_items = &default_sub_trait + .items + .iter() + .map(|item| { + if item.1 { + if let syn::TraitItem::Type(item) = item.0.clone() { + let mut item = item.clone(); + item.bounds.clear(); + syn::TraitItem::Type(item) + } else { + item.0.clone() + } + } else { + item.0.clone() + } + }) + .collect::>(); - let type_param_bounds = if default_sub_trait.has_system { - let system = &def.frame_system; - quote::quote!(: #system::DefaultConfig) - } else { - quote::quote!() - }; + let type_param_bounds = if default_sub_trait.has_system { + let system = &def.frame_system; + quote::quote!(: #system::DefaultConfig) + } else { + quote::quote!() + }; - quote!( - /// Based on [`Config`]. Auto-generated by - /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). - /// Can be used in tandem with - /// [`#[register_default_config]`](`frame_support::register_default_config`) and - /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits - /// based on existing pallet config traits in a safe and developer-friendly way. - /// - /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about - /// the auto-generated `DefaultConfig` trait and how it is generated. - pub trait DefaultConfig #type_param_bounds { - #(#trait_items)* - } - ) - } - _ => Default::default(), - } + quote!( + /// Based on [`Config`]. Auto-generated by + /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). + /// Can be used in tandem with + /// [`#[register_default_config]`](`frame_support::register_default_config`) and + /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits + /// based on existing pallet config traits in a safe and developer-friendly way. + /// + /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about + /// the auto-generated `DefaultConfig` trait and how it is generated. + pub trait DefaultConfig #type_param_bounds { + #(#trait_items)* + } + ) + }, + _ => Default::default(), + } } diff --git a/support/procedural-fork/src/pallet/expand/constants.rs b/support/procedural-fork/src/pallet/expand/constants.rs index 5153ccf49..57fa8b7f3 100644 --- a/support/procedural-fork/src/pallet/expand/constants.rs +++ b/support/procedural-fork/src/pallet/expand/constants.rs @@ -18,99 +18,91 @@ use crate::pallet::Def; struct ConstDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// default_byte implementation - pub default_byte_impl: proc_macro2::TokenStream, - /// Constant name for Metadata (optional) - pub metadata_name: Option, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// default_byte implementation + pub default_byte_impl: proc_macro2::TokenStream, + /// Constant name for Metadata (optional) + pub metadata_name: Option, } /// /// * Impl fn module_constant_metadata for pallet. pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); - - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - - let config_consts = def.config.consts_metadata.iter().map(|const_| { - let ident = &const_.ident; - let const_type = &const_.type_; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = <::#ident as - #frame_support::traits::Get<#const_type>>::get(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: None, - } - }); - - let extra_consts = def - .extra_constants - .iter() - .flat_map(|d| &d.extra_constants) - .map(|const_| { - let ident = &const_.ident; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = >::#ident(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: const_.metadata_name.clone(), - } - }); - - let consts = config_consts.chain(extra_consts).map(|const_| { - let const_type = &const_.type_; - let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); - - let no_docs = vec![]; - let doc = if cfg!(feature = "no-metadata-docs") { - &no_docs - } else { - &const_.doc - }; - - let default_byte_impl = &const_.default_byte_impl; - - quote::quote!({ - #frame_support::__private::metadata_ir::PalletConstantMetadataIR { - name: #ident_str, - ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), - value: { #default_byte_impl }, - docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], - } - }) - }); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ - - #[doc(hidden)] - pub fn pallet_constants_metadata() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> - { - #frame_support::__private::sp_std::vec![ #( #consts ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); + + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + + let config_consts = def.config.consts_metadata.iter().map(|const_| { + let ident = &const_.ident; + let const_type = &const_.type_; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = <::#ident as + #frame_support::traits::Get<#const_type>>::get(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: None, + } + }); + + let extra_consts = def.extra_constants.iter().flat_map(|d| &d.extra_constants).map(|const_| { + let ident = &const_.ident; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = >::#ident(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: const_.metadata_name.clone(), + } + }); + + let consts = config_consts.chain(extra_consts).map(|const_| { + let const_type = &const_.type_; + let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); + + let no_docs = vec![]; + let doc = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &const_.doc }; + + let default_byte_impl = &const_.default_byte_impl; + + quote::quote!({ + #frame_support::__private::metadata_ir::PalletConstantMetadataIR { + name: #ident_str, + ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), + value: { #default_byte_impl }, + docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], + } + }) + }); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ + + #[doc(hidden)] + pub fn pallet_constants_metadata() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> + { + #frame_support::__private::sp_std::vec![ #( #consts ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/doc_only.rs b/support/procedural-fork/src/pallet/expand/doc_only.rs index 3e60e9a9b..621a051ac 100644 --- a/support/procedural-fork/src/pallet/expand/doc_only.rs +++ b/support/procedural-fork/src/pallet/expand/doc_only.rs @@ -20,84 +20,84 @@ use proc_macro2::Span; use crate::pallet::Def; pub fn expand_doc_only(def: &mut Def) -> proc_macro2::TokenStream { - let dispatchables = if let Some(call_def) = &def.call { - let type_impl_generics = def.type_impl_generics(Span::call_site()); - call_def - .methods - .iter() - .map(|method| { - let name = &method.name; - let args = &method - .args - .iter() - .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) - .collect::(); - let docs = &method.docs; + let dispatchables = if let Some(call_def) = &def.call { + let type_impl_generics = def.type_impl_generics(Span::call_site()); + call_def + .methods + .iter() + .map(|method| { + let name = &method.name; + let args = &method + .args + .iter() + .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) + .collect::(); + let docs = &method.docs; - let real = format!(" [`Pallet::{}`].", name); - quote::quote!( - #( #[doc = #docs] )* - /// - /// # Warning: Doc-Only - /// - /// This function is an automatically generated, and is doc-only, uncallable - /// stub. See the real version in - #[ doc = #real ] - pub fn #name<#type_impl_generics>(#args) { unreachable!(); } - ) - }) - .collect::() - } else { - quote::quote!() - }; + let real = format!(" [`Pallet::{}`].", name); + quote::quote!( + #( #[doc = #docs] )* + /// + /// # Warning: Doc-Only + /// + /// This function is an automatically generated, and is doc-only, uncallable + /// stub. See the real version in + #[ doc = #real ] + pub fn #name<#type_impl_generics>(#args) { unreachable!(); } + ) + }) + .collect::() + } else { + quote::quote!() + }; - let storage_types = def - .storages - .iter() - .map(|storage| { - let storage_name = &storage.ident; - let storage_type_docs = &storage.docs; - let real = format!("[`pallet::{}`].", storage_name); - quote::quote!( - #( #[doc = #storage_type_docs] )* - /// - /// # Warning: Doc-Only - /// - /// This type is automatically generated, and is doc-only. See the real version in - #[ doc = #real ] - pub struct #storage_name(); - ) - }) - .collect::(); + let storage_types = def + .storages + .iter() + .map(|storage| { + let storage_name = &storage.ident; + let storage_type_docs = &storage.docs; + let real = format!("[`pallet::{}`].", storage_name); + quote::quote!( + #( #[doc = #storage_type_docs] )* + /// + /// # Warning: Doc-Only + /// + /// This type is automatically generated, and is doc-only. See the real version in + #[ doc = #real ] + pub struct #storage_name(); + ) + }) + .collect::(); - quote::quote!( - /// Auto-generated docs-only module listing all (public and private) defined storage types - /// for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. - /// - /// To see the actual storage type, find a struct with the same name at the root of the - /// pallet, in the list of [*Type Definitions*](../index.html#types). - #[cfg(doc)] - pub mod storage_types { - use super::*; - #storage_types - } + quote::quote!( + /// Auto-generated docs-only module listing all (public and private) defined storage types + /// for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. + /// + /// To see the actual storage type, find a struct with the same name at the root of the + /// pallet, in the list of [*Type Definitions*](../index.html#types). + #[cfg(doc)] + pub mod storage_types { + use super::*; + #storage_types + } - /// Auto-generated docs-only module listing all defined dispatchables for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or - /// [`Call`]. - #[cfg(doc)] - pub mod dispatchables { - use super::*; - #dispatchables - } - ) + /// Auto-generated docs-only module listing all defined dispatchables for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or + /// [`Call`]. + #[cfg(doc)] + pub mod dispatchables { + use super::*; + #dispatchables + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs index 42891dab6..ec19f889a 100644 --- a/support/procedural-fork/src/pallet/expand/documentation.rs +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -20,20 +20,20 @@ use proc_macro2::TokenStream; use quote::ToTokens; use syn::{spanned::Spanned, Attribute, Lit, LitStr}; -const DOC: &str = "doc"; -const PALLET_DOC: &str = "pallet_doc"; +const DOC: &'static str = "doc"; +const PALLET_DOC: &'static str = "pallet_doc"; /// Get the documentation file path from the `pallet_doc` attribute. /// /// Supported format: /// `#[pallet_doc(PATH)]`: The path of the file from which the documentation is loaded fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { - let lit: syn::LitStr = attr.parse_args().map_err(|_| { + let lit: syn::LitStr = attr.parse_args().map_err(|_| { let msg = "The `pallet_doc` received an unsupported argument. Supported format: `pallet_doc(\"PATH\")`"; syn::Error::new(attr.span(), msg) })?; - Ok(DocMetaValue::Path(lit)) + Ok(DocMetaValue::Path(lit)) } /// Get the value from the `doc` comment attribute: @@ -42,49 +42,46 @@ fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { /// - `#[doc = "A doc string"]`: Documentation as a string literal /// - `#[doc = include_str!(PATH)]`: Documentation obtained from a path fn parse_doc_value(attr: &Attribute) -> syn::Result> { - if !attr.path().is_ident(DOC) { - return Ok(None); - } - - let meta = attr.meta.require_name_value()?; - - match &meta.value { - syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), - syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => { - Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))) - } - _ => Err(syn::Error::new( - attr.span(), - "Expected `= \"docs\"` or `= include_str!(\"PATH\")`", - )), - } + if !attr.path().is_ident(DOC) { + return Ok(None) + } + + let meta = attr.meta.require_name_value()?; + + match &meta.value { + syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), + syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => + Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))), + _ => + Err(syn::Error::new(attr.span(), "Expected `= \"docs\"` or `= include_str!(\"PATH\")`")), + } } /// Supported documentation tokens. #[derive(Debug)] enum DocMetaValue { - /// Documentation with string literals. - /// - /// `#[doc = "Lit"]` - Lit(Lit), - /// Documentation with `include_str!` macro. - /// - /// The string literal represents the file `PATH`. - /// - /// `#[doc = include_str!(PATH)]` - Path(LitStr), + /// Documentation with string literals. + /// + /// `#[doc = "Lit"]` + Lit(Lit), + /// Documentation with `include_str!` macro. + /// + /// The string literal represents the file `PATH`. + /// + /// `#[doc = include_str!(PATH)]` + Path(LitStr), } impl ToTokens for DocMetaValue { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - DocMetaValue::Lit(lit) => lit.to_tokens(tokens), - DocMetaValue::Path(path_lit) => { - let decl = quote::quote!(include_str!(#path_lit)); - tokens.extend(decl) - } - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + DocMetaValue::Lit(lit) => lit.to_tokens(tokens), + DocMetaValue::Path(path_lit) => { + let decl = quote::quote!(include_str!(#path_lit)); + tokens.extend(decl) + }, + } + } } /// Extract the documentation from the given pallet definition @@ -113,63 +110,63 @@ impl ToTokens for DocMetaValue { /// Unlike the `doc` attribute, the documentation provided to the `proc_macro` attribute is /// not added to the pallet. pub fn expand_documentation(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let where_clauses = &def.config.where_clause; - - // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. - - // The `pallet_doc` attributes are excluded from the generation of the pallet, - // but they are included in the runtime metadata. - let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); - let mut index = 0; - while index < def.item.attrs.len() { - let attr = &def.item.attrs[index]; - if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { - pallet_docs.push(def.item.attrs.remove(index)); - // Do not increment the index, we have just removed the - // element from the attributes. - continue; - } - - index += 1; - } - - // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. - let docs = match def - .item - .attrs - .iter() - .filter_map(|v| parse_doc_value(v).transpose()) - .collect::>>() - { - Ok(r) => r, - Err(err) => return err.into_compile_error(), - }; - - // Capture the `#[pallet_doc("../README.md")]`. - let pallet_docs = match pallet_docs - .into_iter() - .map(|attr| parse_pallet_doc_value(&attr)) - .collect::>>() - { - Ok(docs) => docs, - Err(err) => return err.into_compile_error(), - }; - - let docs = docs.iter().chain(pallet_docs.iter()); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ - - #[doc(hidden)] - pub fn pallet_documentation_metadata() - -> #frame_support::__private::sp_std::vec::Vec<&'static str> - { - #frame_support::__private::sp_std::vec![ #( #docs ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let where_clauses = &def.config.where_clause; + + // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. + + // The `pallet_doc` attributes are excluded from the generation of the pallet, + // but they are included in the runtime metadata. + let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); + let mut index = 0; + while index < def.item.attrs.len() { + let attr = &def.item.attrs[index]; + if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { + pallet_docs.push(def.item.attrs.remove(index)); + // Do not increment the index, we have just removed the + // element from the attributes. + continue + } + + index += 1; + } + + // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. + let docs = match def + .item + .attrs + .iter() + .filter_map(|v| parse_doc_value(v).transpose()) + .collect::>>() + { + Ok(r) => r, + Err(err) => return err.into_compile_error(), + }; + + // Capture the `#[pallet_doc("../README.md")]`. + let pallet_docs = match pallet_docs + .into_iter() + .map(|attr| parse_pallet_doc_value(&attr)) + .collect::>>() + { + Ok(docs) => docs, + Err(err) => return err.into_compile_error(), + }; + + let docs = docs.iter().chain(pallet_docs.iter()); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ + + #[doc(hidden)] + pub fn pallet_documentation_metadata() + -> #frame_support::__private::sp_std::vec::Vec<&'static str> + { + #frame_support::__private::sp_std::vec![ #( #docs ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/error.rs b/support/procedural-fork/src/pallet/expand/error.rs index e2c3f680c..72fb6e923 100644 --- a/support/procedural-fork/src/pallet/expand/error.rs +++ b/support/procedural-fork/src/pallet/expand/error.rs @@ -16,11 +16,11 @@ // limitations under the License. use crate::{ - pallet::{ - parse::error::{VariantDef, VariantField}, - Def, - }, - COUNTER, + pallet::{ + parse::error::{VariantDef, VariantField}, + Def, + }, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; @@ -29,49 +29,49 @@ use syn::spanned::Spanned; /// /// * impl various trait on Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let error_token_unique_id = - syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let config_where_clause = &def.config.where_clause; - - let error = if let Some(error) = &def.error { - error - } else { - return quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - }; - }; - - let error_ident = &error.error; - let type_impl_gen = &def.type_impl_generics(error.attr_span); - let type_use_gen = &def.type_use_generics(error.attr_span); - - let phantom_variant: syn::Variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, - #frame_support::Never, - ) - ); - - let as_str_matches = error.variants.iter().map( + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let error_token_unique_id = + syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let config_where_clause = &def.config.where_clause; + + let error = if let Some(error) = &def.error { + error + } else { + return quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + } + }; + + let error_ident = &error.error; + let type_impl_gen = &def.type_impl_generics(error.attr_span); + let type_use_gen = &def.type_use_generics(error.attr_span); + + let phantom_variant: syn::Variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, + #frame_support::Never, + ) + ); + + let as_str_matches = error.variants.iter().map( |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { let variant_str = variant.to_string(); let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); @@ -89,107 +89,103 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { }, ); - let error_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; - - error_item.variants.insert(0, phantom_variant); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { - "never" - } else { - "always" - }; - - // derive TypeInfo for error metadata - error_item.attrs.push(syn::parse_quote! { - #[derive( - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - #frame_support::PalletError, - )] - }); - error_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - )); - - if get_doc_literals(&error_item.attrs).is_empty() { - error_item.attrs.push(syn::parse_quote!( - #[doc = "The `Error` enum of this pallet."] - )); - } - - quote::quote_spanned!(error.attr_span => - impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> - #config_where_clause - { - fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) - -> #frame_support::__private::sp_std::fmt::Result - { - f.write_str(self.as_str()) - } - } - - impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn as_str(&self) -> &'static str { - match &self { - Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), - #( #as_str_matches )* - } - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> &'static str { - err.as_str() - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> - for #frame_support::sp_runtime::DispatchError - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> Self { - use #frame_support::__private::codec::Encode; - let index = < - ::PalletInfo - as #frame_support::traits::PalletInfo - >::index::>() - .expect("Every active module has an index in the runtime; qed") as u8; - let mut encoded = err.encode(); - encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); - - #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { - index, - error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), - message: Some(err.as_str()), - }) - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - error = [{ #error_ident }] - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - ) + let error_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; + + error_item.variants.insert(0, phantom_variant); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // derive TypeInfo for error metadata + error_item.attrs.push(syn::parse_quote! { + #[derive( + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + #frame_support::PalletError, + )] + }); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + )); + + if get_doc_literals(&error_item.attrs).is_empty() { + error_item.attrs.push(syn::parse_quote!( + #[doc = "The `Error` enum of this pallet."] + )); + } + + quote::quote_spanned!(error.attr_span => + impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> + #config_where_clause + { + fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) + -> #frame_support::__private::sp_std::fmt::Result + { + f.write_str(self.as_str()) + } + } + + impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn as_str(&self) -> &'static str { + match &self { + Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), + #( #as_str_matches )* + } + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> &'static str { + err.as_str() + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> + for #frame_support::sp_runtime::DispatchError + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> Self { + use #frame_support::__private::codec::Encode; + let index = < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::index::>() + .expect("Every active module has an index in the runtime; qed") as u8; + let mut encoded = err.encode(); + encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); + + #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { + index, + error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), + message: Some(err.as_str()), + }) + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + error = [{ #error_ident }] + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + ) } diff --git a/support/procedural-fork/src/pallet/expand/event.rs b/support/procedural-fork/src/pallet/expand/event.rs index 931dcd95a..655fc5507 100644 --- a/support/procedural-fork/src/pallet/expand/event.rs +++ b/support/procedural-fork/src/pallet/expand/event.rs @@ -16,8 +16,8 @@ // limitations under the License. use crate::{ - pallet::{parse::event::PalletEventDepositAttr, Def}, - COUNTER, + pallet::{parse::event::PalletEventDepositAttr, Def}, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; @@ -27,159 +27,148 @@ use syn::{spanned::Spanned, Ident}; /// * Impl various trait on Event including metadata /// * if deposit_event is defined, implement deposit_event on module. pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (event, macro_ident) = if let Some(event) = &def.event { - let ident = Ident::new( - &format!("__is_event_part_defined_{}", count), - event.attr_span, - ); - (event, ident) - } else { - let macro_ident = Ident::new( - &format!("__is_event_part_defined_{}", count), - def.item.span(), - ); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::event] defined, perhaps you should \ - remove `Event` from construct_runtime?", - )); - } - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - }; - }; - - let event_where_clause = &event.where_clause; - - // NOTE: actually event where clause must be a subset of config where clause because of - // `type RuntimeEvent: From>`. But we merge either way for potential better error - // message - let completed_where_clause = - super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); - - let event_ident = &event.event; - let frame_system = &def.frame_system; - let frame_support = &def.frame_support; - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); - - let event_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by event parser") - } - }; - - // Phantom data is added for generic event. - if event.gen_kind.is_generic() { - let variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#event_use_gen)>, - #frame_support::Never, - ) - ); - - // Push ignore variant at the end. - event_item.variants.push(variant); - } - - if get_doc_literals(&event_item.attrs).is_empty() { - event_item - .attrs - .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); - } - - // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug - event_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - )); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { - "never" - } else { - "always" - }; - - // skip requirement for type params to implement `TypeInfo`, and set docs capture - event_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] - )); - - let deposit_event = if let Some(deposit_event) = &event.deposit_event { - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let trait_use_gen = &def.trait_use_generics(event.attr_span); - let type_impl_gen = &def.type_impl_generics(event.attr_span); - let type_use_gen = &def.type_use_generics(event.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - - let PalletEventDepositAttr { - fn_vis, fn_span, .. - } = deposit_event; - - quote::quote_spanned!(*fn_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #fn_vis fn deposit_event(event: Event<#event_use_gen>) { - let event = < - ::RuntimeEvent as - From> - >::from(event); - - let event = < - ::RuntimeEvent as - Into<::RuntimeEvent> - >::into(event); - - <#frame_system::Pallet>::deposit_event(event) - } - } - ) - } else { - Default::default() - }; - - quote::quote_spanned!(event.attr_span => - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => {}; - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - - #deposit_event - - impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { - fn from(_: #event_ident<#event_use_gen>) {} - } - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (event, macro_ident) = if let Some(event) = &def.event { + let ident = Ident::new(&format!("__is_event_part_defined_{}", count), event.attr_span); + (event, ident) + } else { + let macro_ident = + Ident::new(&format!("__is_event_part_defined_{}", count), def.item.span()); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::event] defined, perhaps you should \ + remove `Event` from construct_runtime?", + )); + } + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + } + }; + + let event_where_clause = &event.where_clause; + + // NOTE: actually event where clause must be a subset of config where clause because of + // `type RuntimeEvent: From>`. But we merge either way for potential better error + // message + let completed_where_clause = + super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); + + let event_ident = &event.event; + let frame_system = &def.frame_system; + let frame_support = &def.frame_support; + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); + + let event_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by event parser") + } + }; + + // Phantom data is added for generic event. + if event.gen_kind.is_generic() { + let variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#event_use_gen)>, + #frame_support::Never, + ) + ); + + // Push ignore variant at the end. + event_item.variants.push(variant); + } + + if get_doc_literals(&event_item.attrs).is_empty() { + event_item + .attrs + .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); + } + + // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug + event_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + )); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; + + // skip requirement for type params to implement `TypeInfo`, and set docs capture + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] + )); + + let deposit_event = if let Some(deposit_event) = &event.deposit_event { + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let trait_use_gen = &def.trait_use_generics(event.attr_span); + let type_impl_gen = &def.type_impl_generics(event.attr_span); + let type_use_gen = &def.type_use_generics(event.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + + let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; + + quote::quote_spanned!(*fn_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #fn_vis fn deposit_event(event: Event<#event_use_gen>) { + let event = < + ::RuntimeEvent as + From> + >::from(event); + + let event = < + ::RuntimeEvent as + Into<::RuntimeEvent> + >::into(event); + + <#frame_system::Pallet>::deposit_event(event) + } + } + ) + } else { + Default::default() + }; + + quote::quote_spanned!(event.attr_span => + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => {}; + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + + #deposit_event + + impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { + fn from(_: #event_ident<#event_use_gen>) {} + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_build.rs b/support/procedural-fork/src/pallet/expand/genesis_build.rs index c6089550d..248e83469 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_build.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_build.rs @@ -20,34 +20,30 @@ use crate::pallet::Def; /// /// * implement the trait `sp_runtime::BuildStorage` pub fn expand_genesis_build(def: &mut Def) -> proc_macro2::TokenStream { - let genesis_config = if let Some(genesis_config) = &def.genesis_config { - genesis_config - } else { - return Default::default(); - }; - let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); + let genesis_config = if let Some(genesis_config) = &def.genesis_config { + genesis_config + } else { + return Default::default() + }; + let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); - let frame_support = &def.frame_support; - let type_impl_gen = &genesis_config - .gen_kind - .type_impl_gen(genesis_build.attr_span); - let gen_cfg_ident = &genesis_config.genesis_config; - let gen_cfg_use_gen = &genesis_config - .gen_kind - .type_use_gen(genesis_build.attr_span); + let frame_support = &def.frame_support; + let type_impl_gen = &genesis_config.gen_kind.type_impl_gen(genesis_build.attr_span); + let gen_cfg_ident = &genesis_config.genesis_config; + let gen_cfg_use_gen = &genesis_config.gen_kind.type_use_gen(genesis_build.attr_span); - let where_clause = &genesis_build.where_clause; + let where_clause = &genesis_build.where_clause; - quote::quote_spanned!(genesis_build.attr_span => - #[cfg(feature = "std")] - impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause - { - fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { - #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { - self.build(); - Ok(()) - }) - } - } - ) + quote::quote_spanned!(genesis_build.attr_span => + #[cfg(feature = "std")] + impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause + { + fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { + #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { + self.build(); + Ok(()) + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_config.rs b/support/procedural-fork/src/pallet/expand/genesis_config.rs index e171e2468..31d519ef2 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_config.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_config.rs @@ -23,130 +23,125 @@ use syn::{spanned::Spanned, Ident}; /// /// * add various derive trait on GenesisConfig struct. pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (genesis_config, def_macro_ident, std_macro_ident) = if let Some(genesis_config) = - &def.genesis_config - { - let def_macro_ident = Ident::new( - &format!("__is_genesis_config_defined_{}", count), - genesis_config.genesis_config.span(), - ); - - let std_macro_ident = Ident::new( - &format!("__is_std_macro_defined_for_genesis_{}", count), - genesis_config.genesis_config.span(), - ); - - (genesis_config, def_macro_ident, std_macro_ident) - } else { - let def_macro_ident = Ident::new( - &format!("__is_genesis_config_defined_{}", count), - def.item.span(), - ); - - let std_macro_ident = Ident::new( - &format!("__is_std_enabled_for_genesis_{}", count), - def.item.span(), - ); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::genesis_config] defined, perhaps you should \ - remove `Config` from construct_runtime?", - )); - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - }; - }; - - let frame_support = &def.frame_support; - - let genesis_config_item = - &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; - - let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); - - match genesis_config_item { - syn::Item::Enum(syn::ItemEnum { attrs, .. }) - | syn::Item::Struct(syn::ItemStruct { attrs, .. }) - | syn::Item::Type(syn::ItemType { attrs, .. }) => { - if get_doc_literals(attrs).is_empty() { - attrs.push(syn::parse_quote!( - #[doc = r" + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (genesis_config, def_macro_ident, std_macro_ident) = + if let Some(genesis_config) = &def.genesis_config { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + genesis_config.genesis_config.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_macro_defined_for_genesis_{}", count), + genesis_config.genesis_config.span(), + ); + + (genesis_config, def_macro_ident, std_macro_ident) + } else { + let def_macro_ident = + Ident::new(&format!("__is_genesis_config_defined_{}", count), def.item.span()); + + let std_macro_ident = + Ident::new(&format!("__is_std_enabled_for_genesis_{}", count), def.item.span()); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::genesis_config] defined, perhaps you should \ + remove `Config` from construct_runtime?", + )); + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } + }; + + let frame_support = &def.frame_support; + + let genesis_config_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; + + let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); + + match genesis_config_item { + syn::Item::Enum(syn::ItemEnum { attrs, .. }) | + syn::Item::Struct(syn::ItemStruct { attrs, .. }) | + syn::Item::Type(syn::ItemType { attrs, .. }) => { + if get_doc_literals(attrs).is_empty() { + attrs.push(syn::parse_quote!( + #[doc = r" Can be used to configure the [genesis state](https://docs.substrate.io/build/genesis-configuration/) of this pallet. "] - )); - } - attrs.push(syn::parse_quote!( - #[derive(#frame_support::Serialize, #frame_support::Deserialize)] - )); - attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); - attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); - attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); - } - _ => unreachable!("Checked by genesis_config parser"), - } - - quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => {}; - } - - #[cfg(not(feature = "std"))] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have the std feature enabled, this will cause the `", - $pallet_path, - "::GenesisConfig` type to not implement serde traits." - )); - }; - } - - #[cfg(feature = "std")] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - } + )); + } + attrs.push(syn::parse_quote!( + #[derive(#frame_support::Serialize, #frame_support::Deserialize)] + )); + attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); + attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); + attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); + }, + _ => unreachable!("Checked by genesis_config parser"), + } + + quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => {}; + } + + #[cfg(not(feature = "std"))] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have the std feature enabled, this will cause the `", + $pallet_path, + "::GenesisConfig` type to not implement serde traits." + )); + }; + } + + #[cfg(feature = "std")] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/hooks.rs b/support/procedural-fork/src/pallet/expand/hooks.rs index 6967f4c08..3623b5952 100644 --- a/support/procedural-fork/src/pallet/expand/hooks.rs +++ b/support/procedural-fork/src/pallet/expand/hooks.rs @@ -19,326 +19,322 @@ use crate::pallet::Def; /// * implement the individual traits using the Hooks trait pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { - let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { - Some(hooks) => { - let where_clause = hooks.where_clause.clone(); - let span = hooks.attr_span; - let has_runtime_upgrade = hooks.has_runtime_upgrade; - (where_clause, span, has_runtime_upgrade) - } - None => ( - def.config.where_clause.clone(), - def.pallet_struct.attr_span, - false, - ), - }; + let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { + Some(hooks) => { + let where_clause = hooks.where_clause.clone(); + let span = hooks.attr_span; + let has_runtime_upgrade = hooks.has_runtime_upgrade; + (where_clause, span, has_runtime_upgrade) + }, + None => (def.config.where_clause.clone(), def.pallet_struct.attr_span, false), + }; - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(span); - let type_use_gen = &def.type_use_generics(span); - let pallet_ident = &def.pallet_struct.pallet; - let frame_system = &def.frame_system; - let pallet_name = quote::quote! { - < - ::PalletInfo - as - #frame_support::traits::PalletInfo - >::name::().unwrap_or("") - }; + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(span); + let type_use_gen = &def.type_use_generics(span); + let pallet_ident = &def.pallet_struct.pallet; + let frame_system = &def.frame_system; + let pallet_name = quote::quote! { + < + ::PalletInfo + as + #frame_support::traits::PalletInfo + >::name::().unwrap_or("") + }; - let initialize_on_chain_storage_version = if let Some(in_code_version) = - &def.pallet_struct.storage_version - { - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", - #pallet_name, - #in_code_version - ); - #in_code_version.put::(); - } - } else { - quote::quote! { - let default_version = #frame_support::traits::StorageVersion::new(0); - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", - #pallet_name, - default_version - ); - default_version.put::(); - } - }; + let initialize_on_chain_storage_version = if let Some(in_code_version) = + &def.pallet_struct.storage_version + { + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", + #pallet_name, + #in_code_version + ); + #in_code_version.put::(); + } + } else { + quote::quote! { + let default_version = #frame_support::traits::StorageVersion::new(0); + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", + #pallet_name, + default_version + ); + default_version.put::(); + } + }; - let log_runtime_upgrade = if has_runtime_upgrade { - // a migration is defined here. - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "⚠️ {} declares internal migrations (which *might* execute). \ - On-chain `{:?}` vs in-code storage version `{:?}`", - #pallet_name, - ::on_chain_storage_version(), - ::in_code_storage_version(), - ); - } - } else { - // default. - quote::quote! { - #frame_support::__private::log::debug!( - target: #frame_support::LOG_TARGET, - "✅ no migration for {}", - #pallet_name, - ); - } - }; + let log_runtime_upgrade = if has_runtime_upgrade { + // a migration is defined here. + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "⚠️ {} declares internal migrations (which *might* execute). \ + On-chain `{:?}` vs in-code storage version `{:?}`", + #pallet_name, + ::on_chain_storage_version(), + ::in_code_storage_version(), + ); + } + } else { + // default. + quote::quote! { + #frame_support::__private::log::debug!( + target: #frame_support::LOG_TARGET, + "✅ no migration for {}", + #pallet_name, + ); + } + }; - let hooks_impl = if def.hooks.is_none() { - let frame_system = &def.frame_system; - quote::quote! { - impl<#type_impl_gen> - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause {} - } - } else { - proc_macro2::TokenStream::new() - }; + let hooks_impl = if def.hooks.is_none() { + let frame_system = &def.frame_system; + quote::quote! { + impl<#type_impl_gen> + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause {} + } + } else { + proc_macro2::TokenStream::new() + }; - // If a storage version is set, we should ensure that the storage version on chain matches the - // in-code storage version. This assumes that `Executive` is running custom migrations before - // the pallets are called. - let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); - let in_code_version = ::in_code_storage_version(); + // If a storage version is set, we should ensure that the storage version on chain matches the + // in-code storage version. This assumes that `Executive` is running custom migrations before + // the pallets are called. + let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); + let in_code_version = ::in_code_storage_version(); - if on_chain_version != in_code_version { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", - #pallet_name, - on_chain_version, - in_code_version, - ); + if on_chain_version != in_code_version { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", + #pallet_name, + on_chain_version, + in_code_version, + ); - return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); - } - } - } else { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); + return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); + } + } + } else { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); - if on_chain_version != #frame_support::traits::StorageVersion::new(0) { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} is set to non zero, \ - while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", - #pallet_name, - on_chain_version, - ); + if on_chain_version != #frame_support::traits::StorageVersion::new(0) { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} is set to non zero, \ + while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", + #pallet_name, + on_chain_version, + ); - return Err("On chain storage version set, while the pallet doesn't \ - have the `#[pallet::storage_version(VERSION)]` attribute.".into()); - } - } - }; + return Err("On chain storage version set, while the pallet doesn't \ + have the `#[pallet::storage_version(VERSION)]` attribute.".into()); + } + } + }; - quote::quote_spanned!(span => - #hooks_impl + quote::quote_spanned!(span => + #hooks_impl - impl<#type_impl_gen> - #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_finalize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_finalize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_finalize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_finalize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_idle( - n: #frame_system::pallet_prelude::BlockNumberFor::, - remaining_weight: #frame_support::weights::Weight - ) -> #frame_support::weights::Weight { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_idle(n, remaining_weight) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_idle( + n: #frame_system::pallet_prelude::BlockNumberFor::, + remaining_weight: #frame_support::weights::Weight + ) -> #frame_support::weights::Weight { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_idle(n, remaining_weight) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_poll( - n: #frame_system::pallet_prelude::BlockNumberFor::, - weight: &mut #frame_support::weights::WeightMeter - ) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_poll(n, weight); - } - } + impl<#type_impl_gen> + #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_poll( + n: #frame_system::pallet_prelude::BlockNumberFor::, + weight: &mut #frame_support::weights::WeightMeter + ) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_poll(n, weight); + } + } - impl<#type_impl_gen> - #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_initialize( - n: #frame_system::pallet_prelude::BlockNumberFor:: - ) -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_initialize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_initialize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_initialize( + n: #frame_system::pallet_prelude::BlockNumberFor:: + ) -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_initialize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_initialize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::BeforeAllRuntimeMigrations - for #pallet_ident<#type_use_gen> #where_clause - { - fn before_all_runtime_migrations() -> #frame_support::weights::Weight { - use #frame_support::traits::{Get, PalletInfoAccess}; - use #frame_support::__private::hashing::twox_128; - use #frame_support::storage::unhashed::contains_prefixed_key; - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("before_all") - ); + impl<#type_impl_gen> + #frame_support::traits::BeforeAllRuntimeMigrations + for #pallet_ident<#type_use_gen> #where_clause + { + fn before_all_runtime_migrations() -> #frame_support::weights::Weight { + use #frame_support::traits::{Get, PalletInfoAccess}; + use #frame_support::__private::hashing::twox_128; + use #frame_support::storage::unhashed::contains_prefixed_key; + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("before_all") + ); - // Check if the pallet has any keys set, including the storage version. If there are - // no keys set, the pallet was just added to the runtime and needs to have its - // version initialized. - let pallet_hashed_prefix = ::name_hash(); - let exists = contains_prefixed_key(&pallet_hashed_prefix); - if !exists { - #initialize_on_chain_storage_version - ::DbWeight::get().reads_writes(1, 1) - } else { - ::DbWeight::get().reads(1) - } - } - } + // Check if the pallet has any keys set, including the storage version. If there are + // no keys set, the pallet was just added to the runtime and needs to have its + // version initialized. + let pallet_hashed_prefix = ::name_hash(); + let exists = contains_prefixed_key(&pallet_hashed_prefix); + if !exists { + #initialize_on_chain_storage_version + ::DbWeight::get().reads_writes(1, 1) + } else { + ::DbWeight::get().reads(1) + } + } + } - impl<#type_impl_gen> - #frame_support::traits::OnRuntimeUpgrade - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_runtime_upgrade() -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") - ); + impl<#type_impl_gen> + #frame_support::traits::OnRuntimeUpgrade + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_runtime_upgrade() -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") + ); - // log info about the upgrade. - #log_runtime_upgrade + // log info about the upgrade. + #log_runtime_upgrade - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_runtime_upgrade() - } + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_runtime_upgrade() + } - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::pre_upgrade() - } + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::pre_upgrade() + } - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #post_storage_version_check + #[cfg(feature = "try-runtime")] + fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #post_storage_version_check - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::post_upgrade(state) - } - } + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::post_upgrade(state) + } + } - impl<#type_impl_gen> - #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::offchain_worker(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::offchain_worker(n) + } + } - // Integrity tests are only required for when `std` is enabled. - #frame_support::std_enabled! { - impl<#type_impl_gen> - #frame_support::traits::IntegrityTest - for #pallet_ident<#type_use_gen> #where_clause - { - fn integrity_test() { - #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::integrity_test() - }); - } - } - } + // Integrity tests are only required for when `std` is enabled. + #frame_support::std_enabled! { + impl<#type_impl_gen> + #frame_support::traits::IntegrityTest + for #pallet_ident<#type_use_gen> #where_clause + { + fn integrity_test() { + #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::integrity_test() + }); + } + } + } - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> - #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn try_state( - n: #frame_system::pallet_prelude::BlockNumberFor::, - _s: #frame_support::traits::TryStateSelect - ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🩺 Running {:?} try-state checks", - #pallet_name, - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::try_state(n).map_err(|err| { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "❌ {:?} try_state checks failed: {:?}", - #pallet_name, - err - ); + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> + #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn try_state( + n: #frame_system::pallet_prelude::BlockNumberFor::, + _s: #frame_support::traits::TryStateSelect + ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🩺 Running {:?} try-state checks", + #pallet_name, + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::try_state(n).map_err(|err| { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "❌ {:?} try_state checks failed: {:?}", + #pallet_name, + err + ); - err - }) - } - } - ) + err + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/inherent.rs b/support/procedural-fork/src/pallet/expand/inherent.rs index 0a80d672a..182d79f5b 100644 --- a/support/procedural-fork/src/pallet/expand/inherent.rs +++ b/support/procedural-fork/src/pallet/expand/inherent.rs @@ -21,38 +21,35 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_inherents(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new( - &format!("__is_inherent_part_defined_{}", count), - def.item.span(), - ); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new(&format!("__is_inherent_part_defined_{}", count), def.item.span()); - let maybe_compile_error = if def.inherent.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::inherent] defined, perhaps you should \ - remove `Inherent` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.inherent.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::inherent] defined, perhaps you should \ + remove `Inherent` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_inherent_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_inherent_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/instances.rs b/support/procedural-fork/src/pallet/expand/instances.rs index 12423409c..b6dfa7e6d 100644 --- a/support/procedural-fork/src/pallet/expand/instances.rs +++ b/support/procedural-fork/src/pallet/expand/instances.rs @@ -22,22 +22,22 @@ use proc_macro2::Span; /// * Provide inherent instance to be used by construct_runtime /// * Provide Instance1 ..= Instance16 for instantiable pallet pub fn expand_instances(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); - let instances = if def.config.has_instance { - (1..=NUMBER_OF_INSTANCE) - .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) - .collect() - } else { - vec![] - }; + let frame_support = &def.frame_support; + let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); + let instances = if def.config.has_instance { + (1..=NUMBER_OF_INSTANCE) + .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) + .collect() + } else { + vec![] + }; - quote::quote!( - /// Hidden instance generated to be internally used when module is used without - /// instance. - #[doc(hidden)] - pub type #inherent_ident = (); + quote::quote!( + /// Hidden instance generated to be internally used when module is used without + /// instance. + #[doc(hidden)] + pub type #inherent_ident = (); - #( pub use #frame_support::instances::#instances; )* - ) + #( pub use #frame_support::instances::#instances; )* + ) } diff --git a/support/procedural-fork/src/pallet/expand/mod.rs b/support/procedural-fork/src/pallet/expand/mod.rs index ff4423f85..067839c28 100644 --- a/support/procedural-fork/src/pallet/expand/mod.rs +++ b/support/procedural-fork/src/pallet/expand/mod.rs @@ -42,12 +42,12 @@ use quote::ToTokens; /// Merge where clause together, `where` token span is taken from the first not none one. pub fn merge_where_clauses(clauses: &[&Option]) -> Option { - let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); - let mut res = clauses.next()?.clone(); - for other in clauses { - res.predicates.extend(other.predicates.iter().cloned()) - } - Some(res) + let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); + let mut res = clauses.next()?.clone(); + for other in clauses { + res.predicates.extend(other.predicates.iter().cloned()) + } + Some(res) } /// Expand definition, in particular: @@ -55,32 +55,32 @@ pub fn merge_where_clauses(clauses: &[&Option]) -> Option proc_macro2::TokenStream { - // Remove the `pallet_doc` attribute first. - let metadata_docs = documentation::expand_documentation(&mut def); - let constants = constants::expand_constants(&mut def); - let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); - let config = config::expand_config(&mut def); - let call = call::expand_call(&mut def); - let tasks = tasks::expand_tasks(&mut def); - let error = error::expand_error(&mut def); - let event = event::expand_event(&mut def); - let storages = storage::expand_storages(&mut def); - let inherents = inherent::expand_inherents(&mut def); - let instances = instances::expand_instances(&mut def); - let hooks = hooks::expand_hooks(&mut def); - let genesis_build = genesis_build::expand_genesis_build(&mut def); - let genesis_config = genesis_config::expand_genesis_config(&mut def); - let type_values = type_value::expand_type_values(&mut def); - let origins = origin::expand_origins(&mut def); - let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); - let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); - let doc_only = doc_only::expand_doc_only(&mut def); - let composites = composite::expand_composites(&mut def); + // Remove the `pallet_doc` attribute first. + let metadata_docs = documentation::expand_documentation(&mut def); + let constants = constants::expand_constants(&mut def); + let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); + let config = config::expand_config(&mut def); + let call = call::expand_call(&mut def); + let tasks = tasks::expand_tasks(&mut def); + let error = error::expand_error(&mut def); + let event = event::expand_event(&mut def); + let storages = storage::expand_storages(&mut def); + let inherents = inherent::expand_inherents(&mut def); + let instances = instances::expand_instances(&mut def); + let hooks = hooks::expand_hooks(&mut def); + let genesis_build = genesis_build::expand_genesis_build(&mut def); + let genesis_config = genesis_config::expand_genesis_config(&mut def); + let type_values = type_value::expand_type_values(&mut def); + let origins = origin::expand_origins(&mut def); + let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); + let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); + let doc_only = doc_only::expand_doc_only(&mut def); + let composites = composite::expand_composites(&mut def); - def.item.attrs.insert( - 0, - syn::parse_quote!( - #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed + def.item.attrs.insert( + 0, + syn::parse_quote!( + #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed to construct this pallet. The main components of this pallet are: @@ -93,38 +93,38 @@ storage item. Otherwise, all storage items are listed among [*Type Definitions*] - [`Config`], which contains the configuration trait of this pallet. - [`Event`] and [`Error`], which are listed among the [*Enums*](#enums). "] - ), - ); + ), + ); - let new_items = quote::quote!( - #metadata_docs - #constants - #pallet_struct - #config - #call - #tasks - #error - #event - #storages - #inherents - #instances - #hooks - #genesis_build - #genesis_config - #type_values - #origins - #validate_unsigned - #tt_default_parts - #doc_only - #composites - ); + let new_items = quote::quote!( + #metadata_docs + #constants + #pallet_struct + #config + #call + #tasks + #error + #event + #storages + #inherents + #instances + #hooks + #genesis_build + #genesis_config + #type_values + #origins + #validate_unsigned + #tt_default_parts + #doc_only + #composites + ); - def.item - .content - .as_mut() - .expect("This is checked by parsing") - .1 - .push(syn::Item::Verbatim(new_items)); + def.item + .content + .as_mut() + .expect("This is checked by parsing") + .1 + .push(syn::Item::Verbatim(new_items)); - def.item.into_token_stream() + def.item.into_token_stream() } diff --git a/support/procedural-fork/src/pallet/expand/origin.rs b/support/procedural-fork/src/pallet/expand/origin.rs index 167445ad6..55865b424 100644 --- a/support/procedural-fork/src/pallet/expand/origin.rs +++ b/support/procedural-fork/src/pallet/expand/origin.rs @@ -21,38 +21,35 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_origins(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new( - &format!("__is_origin_part_defined_{}", count), - def.item.span(), - ); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new(&format!("__is_origin_part_defined_{}", count), def.item.span()); - let maybe_compile_error = if def.origin.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::origin] defined, perhaps you should \ - remove `Origin` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.origin.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::origin] defined, perhaps you should \ + remove `Origin` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_origin_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_origin_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_origin_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_origin_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/pallet_struct.rs b/support/procedural-fork/src/pallet/expand/pallet_struct.rs index c5def65ed..7cdf6bde9 100644 --- a/support/procedural-fork/src/pallet/expand/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/expand/pallet_struct.rs @@ -28,275 +28,263 @@ use frame_support_procedural_tools::get_doc_literals; /// * implementation of `PalletInfoAccess` information /// * implementation of `StorageInfoTrait` on Pallet pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); - let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); - let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - let config_where_clause = &def.config.where_clause; - - let mut storages_where_clauses = vec![&def.config.where_clause]; - storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let storages_where_clauses = merge_where_clauses(&storages_where_clauses); - - let pallet_item = { - let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; - let item = &mut pallet_module_items[def.pallet_struct.index]; - if let syn::Item::Struct(item) = item { - item - } else { - unreachable!("Checked by pallet struct parser") - } - }; - - // If the first field type is `_` then we replace with `PhantomData` - if let Some(field) = pallet_item.fields.iter_mut().next() { - if field.ty == syn::parse_quote!(_) { - field.ty = syn::parse_quote!( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> - ); - } - } - - if get_doc_literals(&pallet_item.attrs).is_empty() { - pallet_item.attrs.push(syn::parse_quote!( - #[doc = r" + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); + let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); + let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + let config_where_clause = &def.config.where_clause; + + let mut storages_where_clauses = vec![&def.config.where_clause]; + storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let storages_where_clauses = merge_where_clauses(&storages_where_clauses); + + let pallet_item = { + let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; + let item = &mut pallet_module_items[def.pallet_struct.index]; + if let syn::Item::Struct(item) = item { + item + } else { + unreachable!("Checked by pallet struct parser") + } + }; + + // If the first field type is `_` then we replace with `PhantomData` + if let Some(field) = pallet_item.fields.iter_mut().next() { + if field.ty == syn::parse_quote!(_) { + field.ty = syn::parse_quote!( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> + ); + } + } + + if get_doc_literals(&pallet_item.attrs).is_empty() { + pallet_item.attrs.push(syn::parse_quote!( + #[doc = r" The `Pallet` struct, the main type that implements traits and standalone functions within the pallet. "] - )); - } - - pallet_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - )] - )); - - let pallet_error_metadata = if let Some(error_def) = &def.error { - let error_ident = &error_def.error; - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { - ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() - }) - } - } - ) - } else { - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - None - } - } - ) - }; - - let storage_info_span = def - .pallet_struct - .without_storage_info - .unwrap_or(def.pallet_struct.attr_span); - - let storage_names = &def - .storages - .iter() - .map(|storage| &storage.ident) - .collect::>(); - let storage_cfg_attrs = &def - .storages - .iter() - .map(|storage| &storage.cfg_attrs) - .collect::>(); - - // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use - // partial or full storage info from storage. - let storage_info_traits = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) - } else { - quote::quote_spanned!(storage_info_span => StorageInfoTrait) - } - }) - .collect::>(); - - let storage_info_methods = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => partial_storage_info) - } else { - quote::quote_spanned!(storage_info_span => storage_info) - } - }) - .collect::>(); - - let storage_info = quote::quote_spanned!(storage_info_span => - impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait - for #pallet_ident<#type_use_gen> - #storages_where_clauses - { - fn storage_info() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> - { - #[allow(unused_mut)] - let mut res = #frame_support::__private::sp_std::vec![]; - - #( - #(#storage_cfg_attrs)* - { - let mut storage_info = < - #storage_names<#type_use_gen> - as #frame_support::traits::#storage_info_traits - >::#storage_info_methods(); - res.append(&mut storage_info); - } - )* - - res - } - } - ); - - let (storage_version, in_code_storage_version_ty) = - if let Some(v) = def.pallet_struct.storage_version.as_ref() { - ( - quote::quote! { #v }, - quote::quote! { #frame_support::traits::StorageVersion }, - ) - } else { - ( - quote::quote! { core::default::Default::default() }, - quote::quote! { #frame_support::traits::NoStorageVersionSet }, - ) - }; - - let whitelisted_storage_idents: Vec = def - .storages - .iter() - .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) - .collect(); - - let whitelisted_storage_keys_impl = quote::quote![ - use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; - impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { - fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { - use #frame_support::__private::sp_std::vec; - vec![#( - TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) - ),*] - } - } - ]; - - quote::quote_spanned!(def.pallet_struct.attr_span => - #pallet_error_metadata - - /// Type alias to `Pallet`, to be used by `construct_runtime`. - /// - /// Generated by `pallet` attribute macro. - #[deprecated(note = "use `Pallet` instead")] - #[allow(dead_code)] - pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; - - // Implement `GetStorageVersion` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::GetStorageVersion - for #pallet_ident<#type_use_gen> - #config_where_clause - { - type InCodeStorageVersion = #in_code_storage_version_ty; - - fn in_code_storage_version() -> Self::InCodeStorageVersion { - #storage_version - } - - fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { - #frame_support::traits::StorageVersion::get::() - } - } - - // Implement `OnGenesis` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::OnGenesis - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn on_genesis() { - let storage_version: #frame_support::traits::StorageVersion = #storage_version; - storage_version.put::(); - } - } - - // Implement `PalletInfoAccess` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn index() -> usize { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::index::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name_hash() -> [u8; 16] { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name_hash::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn module_name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::module_name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn crate_version() -> #frame_support::traits::CrateVersion { - #frame_support::crate_to_crate_version!() - } - } - - impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn count() -> usize { 1 } - fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { - use #frame_support::traits::PalletInfoAccess; - let item = #frame_support::traits::PalletInfoData { - index: Self::index(), - name: Self::name(), - module_name: Self::module_name(), - crate_version: Self::crate_version(), - }; - #frame_support::__private::sp_std::vec![item] - } - } - - #storage_info - #whitelisted_storage_keys_impl - ) + )); + } + + pallet_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + )] + )); + + let pallet_error_metadata = if let Some(error_def) = &def.error { + let error_ident = &error_def.error; + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { + ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) + } + } + ) + } else { + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + None + } + } + ) + }; + + let storage_info_span = + def.pallet_struct.without_storage_info.unwrap_or(def.pallet_struct.attr_span); + + let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); + let storage_cfg_attrs = + &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); + + // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use + // partial or full storage info from storage. + let storage_info_traits = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) + } else { + quote::quote_spanned!(storage_info_span => StorageInfoTrait) + } + }) + .collect::>(); + + let storage_info_methods = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => partial_storage_info) + } else { + quote::quote_spanned!(storage_info_span => storage_info) + } + }) + .collect::>(); + + let storage_info = quote::quote_spanned!(storage_info_span => + impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait + for #pallet_ident<#type_use_gen> + #storages_where_clauses + { + fn storage_info() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> + { + #[allow(unused_mut)] + let mut res = #frame_support::__private::sp_std::vec![]; + + #( + #(#storage_cfg_attrs)* + { + let mut storage_info = < + #storage_names<#type_use_gen> + as #frame_support::traits::#storage_info_traits + >::#storage_info_methods(); + res.append(&mut storage_info); + } + )* + + res + } + } + ); + + let (storage_version, in_code_storage_version_ty) = + if let Some(v) = def.pallet_struct.storage_version.as_ref() { + (quote::quote! { #v }, quote::quote! { #frame_support::traits::StorageVersion }) + } else { + ( + quote::quote! { core::default::Default::default() }, + quote::quote! { #frame_support::traits::NoStorageVersionSet }, + ) + }; + + let whitelisted_storage_idents: Vec = def + .storages + .iter() + .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) + .collect(); + + let whitelisted_storage_keys_impl = quote::quote![ + use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; + impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { + fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { + use #frame_support::__private::sp_std::vec; + vec![#( + TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) + ),*] + } + } + ]; + + quote::quote_spanned!(def.pallet_struct.attr_span => + #pallet_error_metadata + + /// Type alias to `Pallet`, to be used by `construct_runtime`. + /// + /// Generated by `pallet` attribute macro. + #[deprecated(note = "use `Pallet` instead")] + #[allow(dead_code)] + pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; + + // Implement `GetStorageVersion` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::GetStorageVersion + for #pallet_ident<#type_use_gen> + #config_where_clause + { + type InCodeStorageVersion = #in_code_storage_version_ty; + + fn in_code_storage_version() -> Self::InCodeStorageVersion { + #storage_version + } + + fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { + #frame_support::traits::StorageVersion::get::() + } + } + + // Implement `OnGenesis` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::OnGenesis + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn on_genesis() { + let storage_version: #frame_support::traits::StorageVersion = #storage_version; + storage_version.put::(); + } + } + + // Implement `PalletInfoAccess` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn index() -> usize { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::index::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name_hash() -> [u8; 16] { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name_hash::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn module_name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::module_name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn crate_version() -> #frame_support::traits::CrateVersion { + #frame_support::crate_to_crate_version!() + } + } + + impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn count() -> usize { 1 } + fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { + use #frame_support::traits::PalletInfoAccess; + let item = #frame_support::traits::PalletInfoData { + index: Self::index(), + name: Self::name(), + module_name: Self::module_name(), + crate_version: Self::crate_version(), + }; + #frame_support::__private::sp_std::vec![item] + } + } + + #storage_info + #whitelisted_storage_keys_impl + ) } diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs index 32752dc52..937b068cf 100644 --- a/support/procedural-fork/src/pallet/expand/storage.rs +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -16,14 +16,14 @@ // limitations under the License. use crate::{ - counter_prefix, - pallet::{ - parse::{ - helper::two128_str, - storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, - }, - Def, - }, + counter_prefix, + pallet::{ + parse::{ + helper::two128_str, + storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, + }, + Def, + }, }; use quote::ToTokens; use std::{collections::HashMap, ops::IndexMut}; @@ -32,76 +32,73 @@ use syn::spanned::Spanned; /// Generate the prefix_ident related to the storage. /// prefix_ident is used for the prefix struct to be given to storage as first generic param. fn prefix_ident(storage: &StorageDef) -> syn::Ident { - let storage_ident = &storage.ident; - syn::Ident::new( - &format!("_GeneratedPrefixForStorage{}", storage_ident), - storage_ident.span(), - ) + let storage_ident = &storage.ident; + syn::Ident::new(&format!("_GeneratedPrefixForStorage{}", storage_ident), storage_ident.span()) } /// Generate the counter_prefix_ident related to the storage. /// counter_prefix_ident is used for the prefix struct to be given to counted storage map. fn counter_prefix_ident(storage_ident: &syn::Ident) -> syn::Ident { - syn::Ident::new( - &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), - storage_ident.span(), - ) + syn::Ident::new( + &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) } /// Check for duplicated storage prefixes. This step is necessary since users can specify an /// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure /// that the prefix specified by the user is not a duplicate of an existing one. fn check_prefix_duplicates( - storage_def: &StorageDef, - // A hashmap of all already used prefix and their associated error if duplication - used_prefixes: &mut HashMap, + storage_def: &StorageDef, + // A hashmap of all already used prefix and their associated error if duplication + used_prefixes: &mut HashMap, ) -> syn::Result<()> { - let prefix = storage_def.prefix(); - let dup_err = syn::Error::new( - storage_def.prefix_span(), - format!("Duplicate storage prefixes found for `{}`", prefix), - ); - - if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { - let mut err = dup_err; - err.combine(other_dup_err); - return Err(err); - } - - if let Metadata::CountedMap { .. } = storage_def.metadata { - let counter_prefix = counter_prefix(&prefix); - let counter_dup_err = syn::Error::new( - storage_def.prefix_span(), - format!( - "Duplicate storage prefixes found for `{}`, used for counter associated to \ + let prefix = storage_def.prefix(); + let dup_err = syn::Error::new( + storage_def.prefix_span(), + format!("Duplicate storage prefixes found for `{}`", prefix), + ); + + if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { + let mut err = dup_err; + err.combine(other_dup_err); + return Err(err) + } + + if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix = counter_prefix(&prefix); + let counter_dup_err = syn::Error::new( + storage_def.prefix_span(), + format!( + "Duplicate storage prefixes found for `{}`, used for counter associated to \ counted storage map", - counter_prefix, - ), - ); - - if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { - let mut err = counter_dup_err; - err.combine(other_dup_err); - return Err(err); - } - } - - Ok(()) + counter_prefix, + ), + ); + + if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { + let mut err = counter_dup_err; + err.combine(other_dup_err); + return Err(err) + } + } + + Ok(()) } pub struct ResultOnEmptyStructMetadata { - /// The Rust ident that is going to be used as the name of the OnEmpty struct. - pub name: syn::Ident, - /// The path to the error type being returned by the ResultQuery. - pub error_path: syn::Path, - /// The visibility of the OnEmpty struct. - pub visibility: syn::Visibility, - /// The type of the storage item. - pub value_ty: syn::Type, - /// The name of the pallet error enum variant that is going to be returned. - pub variant_name: syn::Ident, - /// The span used to report compilation errors about the OnEmpty struct. - pub span: proc_macro2::Span, + /// The Rust ident that is going to be used as the name of the OnEmpty struct. + pub name: syn::Ident, + /// The path to the error type being returned by the ResultQuery. + pub error_path: syn::Path, + /// The visibility of the OnEmpty struct. + pub visibility: syn::Visibility, + /// The type of the storage item. + pub value_ty: syn::Type, + /// The name of the pallet error enum variant that is going to be returned. + pub variant_name: syn::Ident, + /// The span used to report compilation errors about the OnEmpty struct. + pub span: proc_macro2::Span, } /// @@ -109,305 +106,277 @@ pub struct ResultOnEmptyStructMetadata { /// * if generics are named: reorder the generic, remove their name, and add the missing ones. /// * Add `#[allow(type_alias_bounds)]` pub fn process_generics(def: &mut Def) -> syn::Result> { - let frame_support = &def.frame_support; - let mut on_empty_struct_metadata = Vec::new(); - - for storage_def in def.storages.iter_mut() { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; - - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - - typ_item - .attrs - .push(syn::parse_quote!(#[allow(type_alias_bounds)])); - - let typ_path = match &mut *typ_item.ty { - syn::Type::Path(p) => p, - _ => unreachable!("Checked by def"), - }; - - let args = match &mut typ_path.path.segments[0].arguments { - syn::PathArguments::AngleBracketed(args) => args, - _ => unreachable!("Checked by def"), - }; - - let prefix_ident = prefix_ident(storage_def); - let type_use_gen = if def.config.has_instance { - quote::quote_spanned!(storage_def.attr_span => T, I) - } else { - quote::quote_spanned!(storage_def.attr_span => T) - }; - - let default_query_kind: syn::Type = - syn::parse_quote!(#frame_support::storage::types::OptionQuery); - let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { - if let Some(QueryKind::ResultQuery(error_path, variant_name)) = - storage_def.query_kind.as_ref() - { - let on_empty_ident = - quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); - on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { - name: on_empty_ident.clone(), - visibility: storage_def.vis.clone(), - value_ty, - error_path: error_path.clone(), - variant_name: variant_name.clone(), - span: storage_def.attr_span, - }); - return syn::parse_quote!(#on_empty_ident); - } - syn::parse_quote!(#frame_support::traits::GetDefault) - }; - let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); - - let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { - if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { - if let syn::Type::Path(syn::TypePath { - path: syn::Path { segments, .. }, - .. - }) = query_type - { - if let Some(seg) = segments.last_mut() { - if let syn::PathArguments::AngleBracketed( - syn::AngleBracketedGenericArguments { args, .. }, - ) = &mut seg.arguments - { - args.clear(); - args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); - } - } - } else { - let msg = format!( - "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ + let frame_support = &def.frame_support; + let mut on_empty_struct_metadata = Vec::new(); + + for storage_def in def.storages.iter_mut() { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; + + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + + typ_item.attrs.push(syn::parse_quote!(#[allow(type_alias_bounds)])); + + let typ_path = match &mut *typ_item.ty { + syn::Type::Path(p) => p, + _ => unreachable!("Checked by def"), + }; + + let args = match &mut typ_path.path.segments[0].arguments { + syn::PathArguments::AngleBracketed(args) => args, + _ => unreachable!("Checked by def"), + }; + + let prefix_ident = prefix_ident(storage_def); + let type_use_gen = if def.config.has_instance { + quote::quote_spanned!(storage_def.attr_span => T, I) + } else { + quote::quote_spanned!(storage_def.attr_span => T) + }; + + let default_query_kind: syn::Type = + syn::parse_quote!(#frame_support::storage::types::OptionQuery); + let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { + if let Some(QueryKind::ResultQuery(error_path, variant_name)) = + storage_def.query_kind.as_ref() + { + let on_empty_ident = + quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); + on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { + name: on_empty_ident.clone(), + visibility: storage_def.vis.clone(), + value_ty, + error_path: error_path.clone(), + variant_name: variant_name.clone(), + span: storage_def.attr_span, + }); + return syn::parse_quote!(#on_empty_ident) + } + syn::parse_quote!(#frame_support::traits::GetDefault) + }; + let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); + + let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { + if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { + if let syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) = + query_type + { + if let Some(seg) = segments.last_mut() { + if let syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { args, .. }, + ) = &mut seg.arguments + { + args.clear(); + args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); + } + } + } else { + let msg = format!( + "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ with 1 type parameter, found `{}`", - query_type.to_token_stream() - ); - return Err(syn::Error::new(query_type.span(), msg)); - } - } - Ok(()) - }; - - if let Some(named_generics) = storage_def.named_generics.clone() { - args.args.clear(); - args.args - .push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); - match named_generics { - StorageGenerics::Value { - value, - query_kind, - on_empty, - } => { - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - } - StorageGenerics::Map { - hasher, - key, - value, - query_kind, - on_empty, - max_values, - } - | StorageGenerics::CountedMap { - hasher, - key, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher)); - args.args.push(syn::GenericArgument::Type(key)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - } - StorageGenerics::DoubleMap { - hasher1, - key1, - hasher2, - key2, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher1)); - args.args.push(syn::GenericArgument::Type(key1)); - args.args.push(syn::GenericArgument::Type(hasher2)); - args.args.push(syn::GenericArgument::Type(key2)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - } - StorageGenerics::NMap { - keygen, - value, - query_kind, - on_empty, - max_values, - } - | StorageGenerics::CountedNMap { - keygen, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(keygen)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - } - } - } else { - args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); - - let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { - Metadata::Value { .. } => (1, 2, 3), - Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), - Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), - Metadata::DoubleMap { .. } => (5, 6, 7), - }; - - if storage_def.use_default_hasher { - let hasher_indices: Vec = match storage_def.metadata { - Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], - Metadata::DoubleMap { .. } => vec![1, 3], - _ => vec![], - }; - for hasher_idx in hasher_indices { - args.args[hasher_idx] = syn::GenericArgument::Type( - syn::parse_quote!(#frame_support::Blake2_128Concat), - ); - } - } - - if query_idx < args.args.len() { - if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { - set_result_query_type_parameter(query_kind)?; - } - } else if let Some(QueryKind::ResultQuery(error_path, _)) = - storage_def.query_kind.as_ref() - { - args.args - .push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) - } - - // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to - // generate a default OnEmpty struct for it. - if on_empty_idx >= args.args.len() - && matches!( - storage_def.query_kind.as_ref(), - Some(QueryKind::ResultQuery(_, _)) - ) - { - let value_ty = match args.args[value_idx].clone() { - syn::GenericArgument::Type(ty) => ty, - _ => unreachable!(), - }; - let on_empty = default_on_empty(value_ty); - args.args.push(syn::GenericArgument::Type(on_empty)); - } - } - } - - Ok(on_empty_struct_metadata) + query_type.to_token_stream().to_string() + ); + return Err(syn::Error::new(query_type.span(), msg)) + } + } + Ok(()) + }; + + if let Some(named_generics) = storage_def.named_generics.clone() { + args.args.clear(); + args.args.push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); + match named_generics { + StorageGenerics::Value { value, query_kind, on_empty } => { + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + }, + StorageGenerics::Map { hasher, key, value, query_kind, on_empty, max_values } | + StorageGenerics::CountedMap { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher)); + args.args.push(syn::GenericArgument::Type(key)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + StorageGenerics::DoubleMap { + hasher1, + key1, + hasher2, + key2, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher1)); + args.args.push(syn::GenericArgument::Type(key1)); + args.args.push(syn::GenericArgument::Type(hasher2)); + args.args.push(syn::GenericArgument::Type(key2)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values } | + StorageGenerics::CountedNMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(keygen)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, + } + } else { + args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); + + let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { + Metadata::Value { .. } => (1, 2, 3), + Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), + Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), + Metadata::DoubleMap { .. } => (5, 6, 7), + }; + + if storage_def.use_default_hasher { + let hasher_indices: Vec = match storage_def.metadata { + Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], + Metadata::DoubleMap { .. } => vec![1, 3], + _ => vec![], + }; + for hasher_idx in hasher_indices { + args.args[hasher_idx] = syn::GenericArgument::Type( + syn::parse_quote!(#frame_support::Blake2_128Concat), + ); + } + } + + if query_idx < args.args.len() { + if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { + set_result_query_type_parameter(query_kind)?; + } + } else if let Some(QueryKind::ResultQuery(error_path, _)) = + storage_def.query_kind.as_ref() + { + args.args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) + } + + // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to + // generate a default OnEmpty struct for it. + if on_empty_idx >= args.args.len() && + matches!(storage_def.query_kind.as_ref(), Some(QueryKind::ResultQuery(_, _))) + { + let value_ty = match args.args[value_idx].clone() { + syn::GenericArgument::Type(ty) => ty, + _ => unreachable!(), + }; + let on_empty = default_on_empty(value_ty); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + } + } + + Ok(on_empty_struct_metadata) } fn augment_final_docs(def: &mut Def) { - // expand the docs with a new line showing the storage type (value, map, double map, etc), and - // the key/value type(s). - let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); - typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); - }; - def.storages - .iter_mut() - .for_each(|storage| match &storage.metadata { - Metadata::Value { value } => { - let doc_line = format!( - "Storage type is [`StorageValue`] with value type `{}`.", - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - } - Metadata::Map { key, value } => { - let doc_line = format!( - "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - } - Metadata::DoubleMap { key1, key2, value } => { - let doc_line = format!( + // expand the docs with a new line showing the storage type (value, map, double map, etc), and + // the key/value type(s). + let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); + typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); + }; + def.storages.iter_mut().for_each(|storage| match &storage.metadata { + Metadata::Value { value } => { + let doc_line = format!( + "Storage type is [`StorageValue`] with value type `{}`.", + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::Map { key, value } => { + let doc_line = format!( + "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::DoubleMap { key1, key2, value } => { + let doc_line = format!( "Storage type is [`StorageDoubleMap`] with key1 type {}, key2 type {} and value type {}.", key1.to_token_stream(), key2.to_token_stream(), value.to_token_stream() ); - push_string_literal(&doc_line, storage); - } - Metadata::NMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - } - Metadata::CountedNMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - } - Metadata::CountedMap { key, value } => { - let doc_line = format!( - "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - } - }); + push_string_literal(&doc_line, storage); + }, + Metadata::NMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::CountedNMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + Metadata::CountedMap { key, value } => { + let doc_line = format!( + "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + }, + }); } /// @@ -418,29 +387,29 @@ fn augment_final_docs(def: &mut Def) { /// * Add `#[allow(type_alias_bounds)]` on storages type alias /// * generate metadatas pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { - let on_empty_struct_metadata = match process_generics(def) { - Ok(idents) => idents, - Err(e) => return e.into_compile_error(), - }; - - augment_final_docs(def); - - // Check for duplicate prefixes - let mut prefix_set = HashMap::new(); - let mut errors = def - .storages - .iter() - .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); - if let Some(mut final_error) = errors.next() { - errors.for_each(|error| final_error.combine(error)); - return final_error.into_compile_error(); - } - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let pallet_ident = &def.pallet_struct.pallet; - - let entries_builder = def.storages.iter().map(|storage| { + let on_empty_struct_metadata = match process_generics(def) { + Ok(idents) => idents, + Err(e) => return e.into_compile_error(), + }; + + augment_final_docs(def); + + // Check for duplicate prefixes + let mut prefix_set = HashMap::new(); + let mut errors = def + .storages + .iter() + .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); + if let Some(mut final_error) = errors.next() { + errors.for_each(|error| final_error.combine(error)); + return final_error.into_compile_error() + } + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let pallet_ident = &def.pallet_struct.pallet; + + let entries_builder = def.storages.iter().map(|storage| { let no_docs = vec![]; let docs = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &storage.docs }; @@ -463,202 +432,202 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let getters = def.storages.iter().map(|storage| { - if let Some(getter) = &storage.getter { - let completed_where_clause = - super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); - - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - let type_impl_gen = &def.type_impl_generics(storage.attr_span); - let type_use_gen = &def.type_use_generics(storage.attr_span); - let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); - - let cfg_attrs = &storage.cfg_attrs; - - // If the storage item is public, link it and otherwise just mention it. - // - // We can not just copy the docs from a non-public type as it may links to internal - // types which makes the compiler very unhappy :( - let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { - format!("An auto-generated getter for [`{}`].", storage.ident) - } else { - format!("An auto-generated getter for `{}`.", storage.ident) - }; - - match &storage.metadata { - Metadata::Value { value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter() -> #query { - < - #full_ident as #frame_support::storage::StorageValue<#value> - >::get() - } - } - ) - } - Metadata::Map { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - < - #full_ident as #frame_support::storage::StorageMap<#key, #value> - >::get(k) - } - } - ) - } - Metadata::CountedMap { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - // NOTE: we can't use any trait here because CountedStorageMap - // doesn't implement any. - <#full_ident>::get(k) - } - } - ) - } - Metadata::DoubleMap { key1, key2, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k1: KArg1, k2: KArg2) -> #query where - KArg1: #frame_support::__private::codec::EncodeLike<#key1>, - KArg2: #frame_support::__private::codec::EncodeLike<#key2>, - { - < - #full_ident as - #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> - >::get(k1, k2) - } - } - ) - } - Metadata::NMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - < - #full_ident as - #frame_support::storage::StorageNMap<#keygen, #value> - >::get(key) - } - } - ) - } - Metadata::CountedNMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - } - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - // NOTE: we can't use any trait here because CountedStorageNMap - // doesn't implement any. - <#full_ident>::get(key) - } - } - ) - } - } - } else { - Default::default() - } - }); - - let prefix_structs = def.storages.iter().map(|storage_def| { + let getters = def.storages.iter().map(|storage| { + if let Some(getter) = &storage.getter { + let completed_where_clause = + super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); + + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let type_impl_gen = &def.type_impl_generics(storage.attr_span); + let type_use_gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + + let cfg_attrs = &storage.cfg_attrs; + + // If the storage item is public, link it and otherwise just mention it. + // + // We can not just copy the docs from a non-public type as it may links to internal + // types which makes the compiler very unhappy :( + let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { + format!("An auto-generated getter for [`{}`].", storage.ident) + } else { + format!("An auto-generated getter for `{}`.", storage.ident) + }; + + match &storage.metadata { + Metadata::Value { value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter() -> #query { + < + #full_ident as #frame_support::storage::StorageValue<#value> + >::get() + } + } + ) + }, + Metadata::Map { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + < + #full_ident as #frame_support::storage::StorageMap<#key, #value> + >::get(k) + } + } + ) + }, + Metadata::CountedMap { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + // NOTE: we can't use any trait here because CountedStorageMap + // doesn't implement any. + <#full_ident>::get(k) + } + } + ) + }, + Metadata::DoubleMap { key1, key2, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k1: KArg1, k2: KArg2) -> #query where + KArg1: #frame_support::__private::codec::EncodeLike<#key1>, + KArg2: #frame_support::__private::codec::EncodeLike<#key2>, + { + < + #full_ident as + #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> + >::get(k1, k2) + } + } + ) + }, + Metadata::NMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + < + #full_ident as + #frame_support::storage::StorageNMap<#keygen, #value> + >::get(key) + } + } + ) + }, + Metadata::CountedNMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + }, + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + // NOTE: we can't use any trait here because CountedStorageNMap + // doesn't implement any. + <#full_ident>::get(key) + } + } + ) + }, + } + } else { + Default::default() + } + }); + + let prefix_structs = def.storages.iter().map(|storage_def| { let type_impl_gen = &def.type_impl_generics(storage_def.attr_span); let type_use_gen = &def.type_use_generics(storage_def.attr_span); let prefix_struct_ident = prefix_ident(storage_def); @@ -798,159 +767,153 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { - use crate::pallet::parse::GenericKind; - use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; - - let ResultOnEmptyStructMetadata { - name, - visibility, - value_ty, - error_path, - variant_name, - span, - } = metadata; - - let generic_kind = match error_path.segments.last() { - Some(PathSegment { - arguments: PathArguments::AngleBracketed(args), - .. - }) => { - let (has_config, has_instance) = - args.args - .iter() - .fold( - (false, false), - |(has_config, has_instance), arg| match arg { - GenericArgument::Type(Type::Path(TypePath { - path: Path { segments, .. }, - .. - })) => { - let maybe_config = - segments.first().map_or(false, |seg| seg.ident == "T"); - let maybe_instance = - segments.first().map_or(false, |seg| seg.ident == "I"); - - (has_config || maybe_config, has_instance || maybe_instance) - } - _ => (has_config, has_instance), - }, - ); - GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) - } - _ => GenericKind::None, - }; - let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); - let config_where_clause = &def.config.where_clause; - - quote::quote_spanned!(span => - #[doc(hidden)] - #[allow(non_camel_case_types)] - #visibility struct #name; - - impl<#type_impl_gen> #frame_support::traits::Get> - for #name - #config_where_clause - { - fn get() -> Result<#value_ty, #error_path> { - Err(<#error_path>::#variant_name) - } - } - ) - }); - - // aggregated where clause of all storage types and the whole pallet. - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - - let try_decode_entire_state = { - let mut storage_names = def - .storages - .iter() - .filter_map(|storage| { - // A little hacky; don't generate for cfg gated storages to not get compile errors - // when building "frame-feature-testing" gated storages in the "frame-support-test" - // crate. - if storage.try_decode && storage.cfg_attrs.is_empty() { - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) - } else { - None - } - }) - .collect::>(); - storage_names.sort_by_cached_key(|ident| ident.to_string()); - - quote::quote!( - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage - for #pallet_ident<#type_use_gen> #completed_where_clause - { - fn try_decode_entire_state() -> Result> { - let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> - ::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed"); - - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); - - // NOTE: for now, we have to exclude storage items that are feature gated. - let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); - let mut decoded = 0usize; - - #( - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ - {pallet_name}::{}", stringify!(#storage_names)); - - match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { - Ok(count) => { - decoded += count; - }, - Err(err) => { - errors.extend(err); - }, - } - )* - - if errors.is_empty() { - Ok(decoded) - } else { - Err(errors) - } - } - } - ) - }; - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> - #completed_where_clause - { - #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - prefix: < - ::PalletInfo as - #frame_support::traits::PalletInfo - >::name::<#pallet_ident<#type_use_gen>>() - .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), - entries: { - #[allow(unused_mut)] - let mut entries = #frame_support::__private::sp_std::vec![]; - #( #entries_builder )* - entries - }, - } - } - } - - #( #getters )* - #( #prefix_structs )* - #( #on_empty_structs )* - - #try_decode_entire_state - ) + let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { + use crate::pallet::parse::GenericKind; + use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; + + let ResultOnEmptyStructMetadata { + name, + visibility, + value_ty, + error_path, + variant_name, + span, + } = metadata; + + let generic_kind = match error_path.segments.last() { + Some(PathSegment { arguments: PathArguments::AngleBracketed(args), .. }) => { + let (has_config, has_instance) = + args.args.iter().fold((false, false), |(has_config, has_instance), arg| { + match arg { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments, .. }, + .. + })) => { + let maybe_config = + segments.first().map_or(false, |seg| seg.ident == "T"); + let maybe_instance = + segments.first().map_or(false, |seg| seg.ident == "I"); + + (has_config || maybe_config, has_instance || maybe_instance) + }, + _ => (has_config, has_instance), + } + }); + GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) + }, + _ => GenericKind::None, + }; + let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); + let config_where_clause = &def.config.where_clause; + + quote::quote_spanned!(span => + #[doc(hidden)] + #[allow(non_camel_case_types)] + #visibility struct #name; + + impl<#type_impl_gen> #frame_support::traits::Get> + for #name + #config_where_clause + { + fn get() -> Result<#value_ty, #error_path> { + Err(<#error_path>::#variant_name) + } + } + ) + }); + + // aggregated where clause of all storage types and the whole pallet. + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + + let try_decode_entire_state = { + let mut storage_names = def + .storages + .iter() + .filter_map(|storage| { + // A little hacky; don't generate for cfg gated storages to not get compile errors + // when building "frame-feature-testing" gated storages in the "frame-support-test" + // crate. + if storage.try_decode && storage.cfg_attrs.is_empty() { + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) + } else { + None + } + }) + .collect::>(); + storage_names.sort_by_cached_key(|ident| ident.to_string()); + + quote::quote!( + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage + for #pallet_ident<#type_use_gen> #completed_where_clause + { + fn try_decode_entire_state() -> Result> { + let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> + ::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"); + + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); + + // NOTE: for now, we have to exclude storage items that are feature gated. + let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); + let mut decoded = 0usize; + + #( + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ + {pallet_name}::{}", stringify!(#storage_names)); + + match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { + Ok(count) => { + decoded += count; + }, + Err(err) => { + errors.extend(err); + }, + } + )* + + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) + } + } + } + ) + }; + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> + #completed_where_clause + { + #[doc(hidden)] + pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), + entries: { + #[allow(unused_mut)] + let mut entries = #frame_support::__private::sp_std::vec![]; + #( #entries_builder )* + entries + }, + } + } + } + + #( #getters )* + #( #prefix_structs )* + #( #on_empty_structs )* + + #try_decode_entire_state + ) } diff --git a/support/procedural-fork/src/pallet/expand/tasks.rs b/support/procedural-fork/src/pallet/expand/tasks.rs index 2db53b7d9..6697e5c82 100644 --- a/support/procedural-fork/src/pallet/expand/tasks.rs +++ b/support/procedural-fork/src/pallet/expand/tasks.rs @@ -27,145 +27,141 @@ use quote::{format_ident, quote, ToTokens}; use syn::{parse_quote, spanned::Spanned, ItemEnum, ItemImpl}; impl TaskEnumDef { - /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the - /// event they _don't_ specify one (which is actually the most common behavior) we have to - /// generate one based on the existing [`TasksDef`]. This method performs that generation. - pub fn generate( - tasks: &TasksDef, - type_decl_bounded_generics: TokenStream2, - type_use_generics: TokenStream2, - ) -> Self { - let variants = if tasks.tasks_attr.is_some() { - tasks - .tasks - .iter() - .map(|task| { - let ident = &task.item.sig.ident; - let ident = - format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); + /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the + /// event they _don't_ specify one (which is actually the most common behavior) we have to + /// generate one based on the existing [`TasksDef`]. This method performs that generation. + pub fn generate( + tasks: &TasksDef, + type_decl_bounded_generics: TokenStream2, + type_use_generics: TokenStream2, + ) -> Self { + let variants = if tasks.tasks_attr.is_some() { + tasks + .tasks + .iter() + .map(|task| { + let ident = &task.item.sig.ident; + let ident = + format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); - let args = task.item.sig.inputs.iter().collect::>(); + let args = task.item.sig.inputs.iter().collect::>(); - if args.is_empty() { - quote!(#ident) - } else { - quote!(#ident { - #(#args),* - }) - } - }) - .collect::>() - } else { - Vec::new() - }; - let mut task_enum_def: TaskEnumDef = parse_quote! { - /// Auto-generated enum that encapsulates all tasks defined by this pallet. - /// - /// Conceptually similar to the [`Call`] enum, but for tasks. This is only - /// generated if there are tasks present in this pallet. - #[pallet::task_enum] - pub enum Task<#type_decl_bounded_generics> { - #( - #variants, - )* - } - }; - task_enum_def.type_use_generics = type_use_generics; - task_enum_def - } + if args.is_empty() { + quote!(#ident) + } else { + quote!(#ident { + #(#args),* + }) + } + }) + .collect::>() + } else { + Vec::new() + }; + let mut task_enum_def: TaskEnumDef = parse_quote! { + /// Auto-generated enum that encapsulates all tasks defined by this pallet. + /// + /// Conceptually similar to the [`Call`] enum, but for tasks. This is only + /// generated if there are tasks present in this pallet. + #[pallet::task_enum] + pub enum Task<#type_decl_bounded_generics> { + #( + #variants, + )* + } + }; + task_enum_def.type_use_generics = type_use_generics; + task_enum_def + } } impl ToTokens for TaskEnumDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let item_enum = &self.item_enum; - let ident = &item_enum.ident; - let vis = &item_enum.vis; - let attrs = &item_enum.attrs; - let generics = &item_enum.generics; - let variants = &item_enum.variants; - let scrate = &self.scrate; - let type_use_generics = &self.type_use_generics; - if self.attr.is_some() { - // `item_enum` is short-hand / generated enum - tokens.extend(quote! { - #(#attrs)* - #[derive( - #scrate::CloneNoBound, - #scrate::EqNoBound, - #scrate::PartialEqNoBound, - #scrate::pallet_prelude::Encode, - #scrate::pallet_prelude::Decode, - #scrate::pallet_prelude::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_generics))] - #vis enum #ident #generics { - #variants - #[doc(hidden)] - #[codec(skip)] - __Ignore(core::marker::PhantomData, #scrate::Never), - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let item_enum = &self.item_enum; + let ident = &item_enum.ident; + let vis = &item_enum.vis; + let attrs = &item_enum.attrs; + let generics = &item_enum.generics; + let variants = &item_enum.variants; + let scrate = &self.scrate; + let type_use_generics = &self.type_use_generics; + if self.attr.is_some() { + // `item_enum` is short-hand / generated enum + tokens.extend(quote! { + #(#attrs)* + #[derive( + #scrate::CloneNoBound, + #scrate::EqNoBound, + #scrate::PartialEqNoBound, + #scrate::pallet_prelude::Encode, + #scrate::pallet_prelude::Decode, + #scrate::pallet_prelude::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_generics))] + #vis enum #ident #generics { + #variants + #[doc(hidden)] + #[codec(skip)] + __Ignore(core::marker::PhantomData, #scrate::Never), + } - impl core::fmt::Debug for #ident<#type_use_generics> { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct(stringify!(#ident)).field("value", self).finish() - } - } - }); - } else { - // `item_enum` is a manually specified enum (no attribute) - tokens.extend(item_enum.to_token_stream()); - } - } + impl core::fmt::Debug for #ident<#type_use_generics> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct(stringify!(#ident)).field("value", self).finish() + } + } + }); + } else { + // `item_enum` is a manually specified enum (no attribute) + tokens.extend(item_enum.to_token_stream()); + } + } } /// Represents an already-expanded [`TasksDef`]. #[derive(Parse)] pub struct ExpandedTasksDef { - pub task_item_impl: ItemImpl, - pub task_trait_impl: ItemImpl, + pub task_item_impl: ItemImpl, + pub task_trait_impl: ItemImpl, } impl ToTokens for TasksDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let scrate = &self.scrate; - let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); - let enum_arguments = &self.enum_arguments; - let enum_use = quote!(#enum_ident #enum_arguments); + fn to_tokens(&self, tokens: &mut TokenStream2) { + let scrate = &self.scrate; + let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); + let enum_arguments = &self.enum_arguments; + let enum_use = quote!(#enum_ident #enum_arguments); - let task_fn_idents = self - .tasks - .iter() - .map(|task| { - format_ident!( - "{}", - &task.item.sig.ident.to_string().to_class_case(), - span = task.item.sig.ident.span() - ) - }) - .collect::>(); - let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); - let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); - let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); - let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); + let task_fn_idents = self + .tasks + .iter() + .map(|task| { + format_ident!( + "{}", + &task.item.sig.ident.to_string().to_class_case(), + span = task.item.sig.ident.span() + ) + }) + .collect::>(); + let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); + let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); + let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); + let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); - let task_fn_impls = self.tasks.iter().map(|task| { - let mut task_fn_impl = task.item.clone(); - task_fn_impl.attrs = vec![]; - task_fn_impl - }); + let task_fn_impls = self.tasks.iter().map(|task| { + let mut task_fn_impl = task.item.clone(); + task_fn_impl.attrs = vec![]; + task_fn_impl + }); - let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); - let task_arg_names = self - .tasks - .iter() - .map(|task| &task.arg_names) - .collect::>(); + let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); + let task_arg_names = self.tasks.iter().map(|task| &task.arg_names).collect::>(); - let sp_std = quote!(#scrate::__private::sp_std); - let impl_generics = &self.item_impl.generics; - tokens.extend(quote! { + let sp_std = quote!(#scrate::__private::sp_std); + let impl_generics = &self.item_impl.generics; + tokens.extend(quote! { impl #impl_generics #enum_use { #(#task_fn_impls)* @@ -216,66 +212,56 @@ impl ToTokens for TasksDef { } } }); - } + } } /// Expands the [`TasksDef`] in the enclosing [`Def`], if present, and returns its tokens. /// /// This modifies the underlying [`Def`] in addition to returning any tokens that were added. pub fn expand_tasks_impl(def: &mut Def) -> TokenStream2 { - let Some(tasks) = &mut def.tasks else { - return quote!(); - }; - let ExpandedTasksDef { - task_item_impl, - task_trait_impl, - } = parse_quote!(#tasks); - quote! { - #task_item_impl - #task_trait_impl - } + let Some(tasks) = &mut def.tasks else { return quote!() }; + let ExpandedTasksDef { task_item_impl, task_trait_impl } = parse_quote!(#tasks); + quote! { + #task_item_impl + #task_trait_impl + } } /// Represents a fully-expanded [`TaskEnumDef`]. #[derive(Parse)] pub struct ExpandedTaskEnum { - pub item_enum: ItemEnum, - pub debug_impl: ItemImpl, + pub item_enum: ItemEnum, + pub debug_impl: ItemImpl, } /// Modifies a [`Def`] to expand the underlying [`TaskEnumDef`] if present, and also returns /// its tokens. A blank [`TokenStream2`] is returned if no [`TaskEnumDef`] has been generated /// or defined. pub fn expand_task_enum(def: &mut Def) -> TokenStream2 { - let Some(task_enum) = &mut def.task_enum else { - return quote!(); - }; - let ExpandedTaskEnum { - item_enum, - debug_impl, - } = parse_quote!(#task_enum); - quote! { - #item_enum - #debug_impl - } + let Some(task_enum) = &mut def.task_enum else { return quote!() }; + let ExpandedTaskEnum { item_enum, debug_impl } = parse_quote!(#task_enum); + quote! { + #item_enum + #debug_impl + } } /// Modifies a [`Def`] to expand the underlying [`TasksDef`] and also generate a /// [`TaskEnumDef`] if applicable. The tokens for these items are returned if they are created. pub fn expand_tasks(def: &mut Def) -> TokenStream2 { - if let Some(tasks_def) = &def.tasks { - if def.task_enum.is_none() { - def.task_enum = Some(TaskEnumDef::generate( - tasks_def, - def.type_decl_bounded_generics(tasks_def.item_impl.span()), - def.type_use_generics(tasks_def.item_impl.span()), - )); - } - } - let tasks_extra_output = expand_tasks_impl(def); - let task_enum_extra_output = expand_task_enum(def); - quote! { - #tasks_extra_output - #task_enum_extra_output - } + if let Some(tasks_def) = &def.tasks { + if def.task_enum.is_none() { + def.task_enum = Some(TaskEnumDef::generate( + &tasks_def, + def.type_decl_bounded_generics(tasks_def.item_impl.span()), + def.type_use_generics(tasks_def.item_impl.span()), + )); + } + } + let tasks_extra_output = expand_tasks_impl(def); + let task_enum_extra_output = expand_task_enum(def); + quote! { + #tasks_extra_output + #task_enum_extra_output + } } diff --git a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs index 57b78339a..99364aaa9 100644 --- a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs +++ b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs @@ -16,211 +16,201 @@ // limitations under the License. use crate::{ - pallet::{CompositeKeyword, Def}, - COUNTER, + pallet::{CompositeKeyword, Def}, + COUNTER, }; use syn::spanned::Spanned; /// Generate the `tt_default_parts` macro. pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let default_parts_unique_id = - syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); - let extra_parts_unique_id = - syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); - let default_parts_unique_id_v2 = - syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); - - let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); - - let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); - - let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); - - let event_part = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); - quote::quote!( Event #gen , ) - }); - - let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); - - let origin_part = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!( )); - quote::quote!( Origin #gen , ) - }); - - let config_part = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config - .gen_kind - .is_generic() - .then(|| quote::quote!( )); - quote::quote!( Config #gen , ) - }); - - let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); - - let validate_unsigned_part = def - .validate_unsigned - .as_ref() - .map(|_| quote::quote!(ValidateUnsigned,)); - - let freeze_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(FreezeReason,)); - - let hold_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(HoldReason,)); - - let lock_id_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(LockId,)); - - let slash_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(SlashReason,)); - - let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); - - let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); - - let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); - - let event_part_v2 = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!()); - quote::quote!(+ Event #gen) - }); - - let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); - - let origin_part_v2 = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!()); - quote::quote!(+ Origin #gen) - }); - - let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config - .gen_kind - .is_generic() - .then(|| quote::quote!()); - quote::quote!(+ Config #gen) - }); - - let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); - - let validate_unsigned_part_v2 = def - .validate_unsigned - .as_ref() - .map(|_| quote::quote!(+ ValidateUnsigned)); - - let freeze_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(+ FreezeReason)); - - let hold_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(+ HoldReason)); - - let lock_id_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(+ LockId)); - - let slash_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(+ SlashReason)); - - quote::quote!( - // This macro follows the conventions as laid out by the `tt-call` crate. It does not - // accept any arguments and simply returns the pallet parts, separated by commas, then - // wrapped inside of braces and finally prepended with double colons, to the caller inside - // of a key named `tokens`. - // - // We need to accept a path argument here, because this macro gets expanded on the - // crate that called the `construct_runtime!` macro, and the actual path is unknown. - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part - #inherent_part #validate_unsigned_part #freeze_reason_part #task_part - #hold_reason_part #lock_id_part #slash_reason_part - } - }] - } - }; - } - - pub use #default_parts_unique_id as tt_default_parts; - - - // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared - // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. - // - // For example, after expansion an explicit pallet would look like: - // `System: expanded::{Error} ::{Pallet, Call}`. - // - // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. - #[macro_export] - #[doc(hidden)] - macro_rules! #extra_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - #error_part - } - }] - } - }; - } - - pub use #extra_parts_unique_id as tt_extra_parts; - - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id_v2 { - { - $caller:tt - frame_support = [{ $($frame_support:ident)::* }] - } => { - $($frame_support)*::__private::tt_return! { - $caller - tokens = [{ - + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 - #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 - #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 - }] - } - }; - } - - pub use #default_parts_unique_id_v2 as tt_default_parts_v2; - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let default_parts_unique_id = + syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); + let extra_parts_unique_id = + syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); + let default_parts_unique_id_v2 = + syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); + + let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); + + let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); + + let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); + + let event_part = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Event #gen , ) + }); + + let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); + + let origin_part = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!( )); + quote::quote!( Origin #gen , ) + }); + + let config_part = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Config #gen , ) + }); + + let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); + + let validate_unsigned_part = + def.validate_unsigned.as_ref().map(|_| quote::quote!(ValidateUnsigned,)); + + let freeze_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(FreezeReason,)); + + let hold_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(HoldReason,)); + + let lock_id_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(LockId,)); + + let slash_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(SlashReason,)); + + let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); + + let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); + + let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); + + let event_part_v2 = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Event #gen) + }); + + let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); + + let origin_part_v2 = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!()); + quote::quote!(+ Origin #gen) + }); + + let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Config #gen) + }); + + let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); + + let validate_unsigned_part_v2 = + def.validate_unsigned.as_ref().map(|_| quote::quote!(+ ValidateUnsigned)); + + let freeze_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(+ FreezeReason)); + + let hold_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(+ HoldReason)); + + let lock_id_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(+ LockId)); + + let slash_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(+ SlashReason)); + + quote::quote!( + // This macro follows the conventions as laid out by the `tt-call` crate. It does not + // accept any arguments and simply returns the pallet parts, separated by commas, then + // wrapped inside of braces and finally prepended with double colons, to the caller inside + // of a key named `tokens`. + // + // We need to accept a path argument here, because this macro gets expanded on the + // crate that called the `construct_runtime!` macro, and the actual path is unknown. + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part + #inherent_part #validate_unsigned_part #freeze_reason_part #task_part + #hold_reason_part #lock_id_part #slash_reason_part + } + }] + } + }; + } + + pub use #default_parts_unique_id as tt_default_parts; + + + // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared + // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. + // + // For example, after expansion an explicit pallet would look like: + // `System: expanded::{Error} ::{Pallet, Call}`. + // + // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. + #[macro_export] + #[doc(hidden)] + macro_rules! #extra_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + #error_part + } + }] + } + }; + } + + pub use #extra_parts_unique_id as tt_extra_parts; + + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id_v2 { + { + $caller:tt + frame_support = [{ $($frame_support:ident)::* }] + } => { + $($frame_support)*::__private::tt_return! { + $caller + tokens = [{ + + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 + #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 + #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 + }] + } + }; + } + + pub use #default_parts_unique_id_v2 as tt_default_parts_v2; + ) } diff --git a/support/procedural-fork/src/pallet/expand/type_value.rs b/support/procedural-fork/src/pallet/expand/type_value.rs index 84db3e431..5dc6309c0 100644 --- a/support/procedural-fork/src/pallet/expand/type_value.rs +++ b/support/procedural-fork/src/pallet/expand/type_value.rs @@ -22,56 +22,56 @@ use crate::pallet::Def; /// * implement the `Get<..>` on it /// * Rename the name of the function to internal name pub fn expand_type_values(def: &mut Def) -> proc_macro2::TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for type_value in &def.type_values { - let fn_name_str = &type_value.ident.to_string(); - let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); - let fn_ident_renamed = syn::Ident::new( - &format!("__type_value_for_{}", fn_name_snakecase), - type_value.ident.span(), - ); + for type_value in &def.type_values { + let fn_name_str = &type_value.ident.to_string(); + let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); + let fn_ident_renamed = syn::Ident::new( + &format!("__type_value_for_{}", fn_name_snakecase), + type_value.ident.span(), + ); - let type_value_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; - if let syn::Item::Fn(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; + let type_value_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; + if let syn::Item::Fn(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; - // Rename the type_value function name - type_value_item.sig.ident = fn_ident_renamed.clone(); + // Rename the type_value function name + type_value_item.sig.ident = fn_ident_renamed.clone(); - let vis = &type_value.vis; - let ident = &type_value.ident; - let type_ = &type_value.type_; - let where_clause = &type_value.where_clause; + let vis = &type_value.vis; + let ident = &type_value.ident; + let type_ = &type_value.type_; + let where_clause = &type_value.where_clause; - let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { - ( - def.type_impl_generics(type_value.attr_span), - def.type_use_generics(type_value.attr_span), - ) - } else { - (Default::default(), Default::default()) - }; + let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { + ( + def.type_impl_generics(type_value.attr_span), + def.type_use_generics(type_value.attr_span), + ) + } else { + (Default::default(), Default::default()) + }; - let docs = &type_value.docs; + let docs = &type_value.docs; - expand.extend(quote::quote_spanned!(type_value.attr_span => - #( #[doc = #docs] )* - #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); - impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> - #where_clause - { - fn get() -> #type_ { - #fn_ident_renamed::<#struct_use_gen>() - } - } - )); - } - expand + expand.extend(quote::quote_spanned!(type_value.attr_span => + #( #[doc = #docs] )* + #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); + impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> + #where_clause + { + fn get() -> #type_ { + #fn_ident_renamed::<#struct_use_gen>() + } + } + )); + } + expand } diff --git a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs index 28c78a1c6..876995585 100644 --- a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs @@ -21,38 +21,36 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new( - &format!("__is_validate_unsigned_part_defined_{}", count), - def.item.span(), - ); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = + Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); - let maybe_compile_error = if def.validate_unsigned.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ - remove `ValidateUnsigned` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.validate_unsigned.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ + remove `ValidateUnsigned` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_validate_unsigned_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_validate_unsigned_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/warnings.rs b/support/procedural-fork/src/pallet/expand/warnings.rs index ece03a13a..030e3ddaf 100644 --- a/support/procedural-fork/src/pallet/expand/warnings.rs +++ b/support/procedural-fork/src/pallet/expand/warnings.rs @@ -20,84 +20,79 @@ use crate::pallet::parse::call::{CallVariantDef, CallWeightDef}; use proc_macro_warning::Warning; use syn::{ - spanned::Spanned, - visit::{self, Visit}, + spanned::Spanned, + visit::{self, Visit}, }; /// Warn if any of the call arguments starts with a underscore and is used in a weight formula. pub(crate) fn weight_witness_warning( - method: &CallVariantDef, - dev_mode: bool, - warnings: &mut Vec, + method: &CallVariantDef, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return; - } - let CallWeightDef::Immediate(w) = &method.weight else { - return; - }; + if dev_mode { + return + } + let CallWeightDef::Immediate(w) = &method.weight else { return }; - let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") - .old("not check weight witness data") - .new("ensure that all witness data for weight calculation is checked before usage") - .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); + let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") + .old("not check weight witness data") + .new("ensure that all witness data for weight calculation is checked before usage") + .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); - for (_, arg_ident, _) in method.args.iter() { - if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), arg_ident) { - continue; - } + for (_, arg_ident, _) in method.args.iter() { + if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { + continue + } - let warning = partial_warning - .clone() - .index(warnings.len()) - .span(arg_ident.span()) - .build_or_panic(); + let warning = partial_warning + .clone() + .index(warnings.len()) + .span(arg_ident.span()) + .build_or_panic(); - warnings.push(warning); - } + warnings.push(warning); + } } /// Warn if the weight is a constant and the pallet not in `dev_mode`. pub(crate) fn weight_constant_warning( - weight: &syn::Expr, - dev_mode: bool, - warnings: &mut Vec, + weight: &syn::Expr, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return; - } - let syn::Expr::Lit(lit) = weight else { return }; + if dev_mode { + return + } + let syn::Expr::Lit(lit) = weight else { return }; - let warning = Warning::new_deprecated("ConstantWeight") - .index(warnings.len()) - .old("use hard-coded constant as call weight") - .new("benchmark all calls or put the pallet into `dev` mode") - .help_link("https://github.com/paritytech/substrate/pull/13798") - .span(lit.span()) - .build_or_panic(); + let warning = Warning::new_deprecated("ConstantWeight") + .index(warnings.len()) + .old("use hard-coded constant as call weight") + .new("benchmark all calls or put the pallet into `dev` mode") + .help_link("https://github.com/paritytech/substrate/pull/13798") + .span(lit.span()) + .build_or_panic(); - warnings.push(warning); + warnings.push(warning); } /// Returns whether `expr` contains `ident`. fn contains_ident(mut expr: syn::Expr, ident: &syn::Ident) -> bool { - struct ContainsIdent { - ident: syn::Ident, - found: bool, - } + struct ContainsIdent { + ident: syn::Ident, + found: bool, + } - impl<'a> Visit<'a> for ContainsIdent { - fn visit_ident(&mut self, i: &syn::Ident) { - if *i == self.ident { - self.found = true; - } - } - } + impl<'a> Visit<'a> for ContainsIdent { + fn visit_ident(&mut self, i: &syn::Ident) { + if *i == self.ident { + self.found = true; + } + } + } - let mut visitor = ContainsIdent { - ident: ident.clone(), - found: false, - }; - visit::visit_expr(&mut visitor, &mut expr); - visitor.found + let mut visitor = ContainsIdent { ident: ident.clone(), found: false }; + visit::visit_expr(&mut visitor, &mut expr); + visitor.found } diff --git a/support/procedural-fork/src/pallet/mod.rs b/support/procedural-fork/src/pallet/mod.rs index 5b9bc621b..42d8272fb 100644 --- a/support/procedural-fork/src/pallet/mod.rs +++ b/support/procedural-fork/src/pallet/mod.rs @@ -32,30 +32,30 @@ pub use parse::{composite::keyword::CompositeKeyword, Def}; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(dev_mode); + syn::custom_keyword!(dev_mode); } pub fn pallet( - attr: proc_macro::TokenStream, - item: proc_macro::TokenStream, + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - let mut dev_mode = false; - if !attr.is_empty() { - if syn::parse::(attr.clone()).is_ok() { - dev_mode = true; - } else { - let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ + let mut dev_mode = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + dev_mode = true; + } else { + let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::pallet]` or `#[pallet]`, or must specify the \ `dev_mode` attribute, such as `#[frame_support::pallet(dev_mode)]` or \ #[pallet(dev_mode)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into(); - } - } - - let item = syn::parse_macro_input!(item as syn::ItemMod); - match parse::Def::try_from(item, dev_mode) { - Ok(def) => expand::expand(def).into(), - Err(e) => e.to_compile_error().into(), - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into() + } + } + + let item = syn::parse_macro_input!(item as syn::ItemMod); + match parse::Def::try_from(item, dev_mode) { + Ok(def) => expand::expand(def).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/pallet/parse/call.rs b/support/procedural-fork/src/pallet/parse/call.rs index 0bcf38a4e..4e09b86fd 100644 --- a/support/procedural-fork/src/pallet/parse/call.rs +++ b/support/procedural-fork/src/pallet/parse/call.rs @@ -24,124 +24,124 @@ use syn::{spanned::Spanned, ExprClosure}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(RuntimeOrigin); - syn::custom_keyword!(weight); - syn::custom_keyword!(call_index); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(feeless_if); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(RuntimeOrigin); + syn::custom_keyword!(weight); + syn::custom_keyword!(call_index); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(feeless_if); } /// Definition of dispatchables typically `impl Pallet { ... }` pub struct CallDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// Information on methods (used for expansion). - pub methods: Vec, - /// The span of the pallet::call attribute. - pub attr_span: proc_macro2::Span, - /// Docs, specified on the impl Block. - pub docs: Vec, - /// The optional `weight` attribute on the `pallet::call`. - pub inherited_call_weight: Option, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// Information on methods (used for expansion). + pub methods: Vec, + /// The span of the pallet::call attribute. + pub attr_span: proc_macro2::Span, + /// Docs, specified on the impl Block. + pub docs: Vec, + /// The optional `weight` attribute on the `pallet::call`. + pub inherited_call_weight: Option, } /// The weight of a call. #[derive(Clone)] pub enum CallWeightDef { - /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. - Immediate(syn::Expr), + /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. + Immediate(syn::Expr), - /// The default value that should be set for dev-mode pallets. Usually zero. - DevModeDefault, + /// The default value that should be set for dev-mode pallets. Usually zero. + DevModeDefault, - /// Inherits whatever value is configured on the pallet level. - /// - /// The concrete value is not known at this point. - Inherited, + /// Inherits whatever value is configured on the pallet level. + /// + /// The concrete value is not known at this point. + Inherited, } /// Definition of dispatchable typically: `#[weight...] fn foo(origin .., param1: ...) -> ..` #[derive(Clone)] pub struct CallVariantDef { - /// Function name. - pub name: syn::Ident, - /// Information on args: `(is_compact, name, type)` - pub args: Vec<(bool, syn::Ident, Box)>, - /// Weight for the call. - pub weight: CallWeightDef, - /// Call index of the dispatchable. - pub call_index: u8, - /// Whether an explicit call index was specified. - pub explicit_call_index: bool, - /// Docs, used for metadata. - pub docs: Vec, - /// Attributes annotated at the top of the dispatchable function. - pub attrs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// The optional `feeless_if` attribute on the `pallet::call`. - pub feeless_check: Option, + /// Function name. + pub name: syn::Ident, + /// Information on args: `(is_compact, name, type)` + pub args: Vec<(bool, syn::Ident, Box)>, + /// Weight for the call. + pub weight: CallWeightDef, + /// Call index of the dispatchable. + pub call_index: u8, + /// Whether an explicit call index was specified. + pub explicit_call_index: bool, + /// Docs, used for metadata. + pub docs: Vec, + /// Attributes annotated at the top of the dispatchable function. + pub attrs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// The optional `feeless_if` attribute on the `pallet::call`. + pub feeless_check: Option, } /// Attributes for functions in call impl block. pub enum FunctionAttr { - /// Parse for `#[pallet::call_index(expr)]` - CallIndex(u8), - /// Parse for `#[pallet::weight(expr)]` - Weight(syn::Expr), - /// Parse for `#[pallet::feeless_if(expr)]` - FeelessIf(Span, syn::ExprClosure), + /// Parse for `#[pallet::call_index(expr)]` + CallIndex(u8), + /// Parse for `#[pallet::weight(expr)]` + Weight(syn::Expr), + /// Parse for `#[pallet::feeless_if(expr)]` + FeelessIf(Span, syn::ExprClosure), } impl syn::parse::Parse for FunctionAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::weight) { - content.parse::()?; - let weight_content; - syn::parenthesized!(weight_content in content); - Ok(FunctionAttr::Weight(weight_content.parse::()?)) - } else if lookahead.peek(keyword::call_index) { - content.parse::()?; - let call_index_content; - syn::parenthesized!(call_index_content in content); - let index = call_index_content.parse::()?; - if !index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(index.span(), msg)); - } - Ok(FunctionAttr::CallIndex(index.base10_parse()?)) - } else if lookahead.peek(keyword::feeless_if) { - content.parse::()?; - let closure_content; - syn::parenthesized!(closure_content in content); - Ok(FunctionAttr::FeelessIf( - closure_content.span(), - closure_content.parse::().map_err(|e| { - let msg = "Invalid feeless_if attribute: expected a closure"; - let mut err = syn::Error::new(closure_content.span(), msg); - err.combine(e); - err - })?, - )) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::weight) { + content.parse::()?; + let weight_content; + syn::parenthesized!(weight_content in content); + Ok(FunctionAttr::Weight(weight_content.parse::()?)) + } else if lookahead.peek(keyword::call_index) { + content.parse::()?; + let call_index_content; + syn::parenthesized!(call_index_content in content); + let index = call_index_content.parse::()?; + if !index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(index.span(), msg)) + } + Ok(FunctionAttr::CallIndex(index.base10_parse()?)) + } else if lookahead.peek(keyword::feeless_if) { + content.parse::()?; + let closure_content; + syn::parenthesized!(closure_content in content); + Ok(FunctionAttr::FeelessIf( + closure_content.span(), + closure_content.parse::().map_err(|e| { + let msg = "Invalid feeless_if attribute: expected a closure"; + let mut err = syn::Error::new(closure_content.span(), msg); + err.combine(e); + err + })?, + )) + } else { + Err(lookahead.error()) + } + } } /// Attribute for arguments in function in call impl block. @@ -149,324 +149,319 @@ impl syn::parse::Parse for FunctionAttr { pub struct ArgAttrIsCompact; impl syn::parse::Parse for ArgAttrIsCompact { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(ArgAttrIsCompact) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(ArgAttrIsCompact) + } } /// Check the syntax is `OriginFor`, `&OriginFor` or `T::RuntimeOrigin`. pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::Result<()> { - pub struct CheckOriginFor(bool); - impl syn::parse::Parse for CheckOriginFor { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let is_ref = input.parse::().is_ok(); - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(is_ref)) - } - } - - pub struct CheckRuntimeOrigin; - impl syn::parse::Parse for CheckRuntimeOrigin { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - let result_origin_for = syn::parse2::(ty.to_token_stream()); - let result_runtime_origin = syn::parse2::(ty.to_token_stream()); - match (result_origin_for, result_runtime_origin) { - (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), - (_, Ok(_)) => Ok(()), - (_, _) => { - let msg = if is_ref { - "Invalid type: expected `&OriginFor`" - } else { - "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" - }; - Err(syn::Error::new(ty.span(), msg)) - } - } + pub struct CheckOriginFor(bool); + impl syn::parse::Parse for CheckOriginFor { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let is_ref = input.parse::().is_ok(); + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(is_ref)) + } + } + + pub struct CheckRuntimeOrigin; + impl syn::parse::Parse for CheckRuntimeOrigin { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + let result_origin_for = syn::parse2::(ty.to_token_stream()); + let result_runtime_origin = syn::parse2::(ty.to_token_stream()); + return match (result_origin_for, result_runtime_origin) { + (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), + (_, Ok(_)) => Ok(()), + (_, _) => { + let msg = if is_ref { + "Invalid type: expected `&OriginFor`" + } else { + "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" + }; + return Err(syn::Error::new(ty.span(), msg)) + }, + } } impl CallDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - inherited_call_weight: Option, - ) -> syn::Result { - let item_impl = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::call, expected item impl", - )); - }; - - let instances = vec![ - helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, - helper::check_pallet_struct_usage(&item_impl.self_ty)?, - ]; - - if let Some((_, _, for_)) = item_impl.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + inherited_call_weight: Option, + ) -> syn::Result { + let item_impl = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) + }; + + let instances = vec![ + helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, + helper::check_pallet_struct_usage(&item_impl.self_ty)?, + ]; + + if let Some((_, _, for_)) = item_impl.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)); - } - - let mut methods = vec![]; - let mut indices = HashMap::new(); - let mut last_index: Option = None; - for item in &mut item_impl.items { - if let syn::ImplItem::Fn(method) = item { - if !matches!(method.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::call, dispatchable function must be public: \ + return Err(syn::Error::new(for_.span(), msg)) + } + + let mut methods = vec![]; + let mut indices = HashMap::new(); + let mut last_index: Option = None; + for item in &mut item_impl.items { + if let syn::ImplItem::Fn(method) = item { + if !matches!(method.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::call, dispatchable function must be public: \ `pub fn`"; - let span = match method.vis { - syn::Visibility::Inherited => method.sig.span(), - _ => method.vis.span(), - }; - - return Err(syn::Error::new(span, msg)); - } - - match method.sig.inputs.first() { - None => { - let msg = "Invalid pallet::call, must have at least origin arg"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - Some(syn::FnArg::Receiver(_)) => { - let msg = "Invalid pallet::call, first argument must be a typed argument, \ + let span = match method.vis { + syn::Visibility::Inherited => method.sig.span(), + _ => method.vis.span(), + }; + + return Err(syn::Error::new(span, msg)) + } + + match method.sig.inputs.first() { + None => { + let msg = "Invalid pallet::call, must have at least origin arg"; + return Err(syn::Error::new(method.sig.span(), msg)) + }, + Some(syn::FnArg::Receiver(_)) => { + let msg = "Invalid pallet::call, first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - Some(syn::FnArg::Typed(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, false)?; - } - } - - if let syn::ReturnType::Type(_, type_) = &method.sig.output { - helper::check_pallet_call_return_type(type_)?; - } else { - let msg = "Invalid pallet::call, require return type \ + return Err(syn::Error::new(method.sig.span(), msg)) + }, + Some(syn::FnArg::Typed(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, false)?; + }, + } + + if let syn::ReturnType::Type(_, type_) = &method.sig.output { + helper::check_pallet_call_return_type(type_)?; + } else { + let msg = "Invalid pallet::call, require return type \ DispatchResultWithPostInfo"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); - let mut call_idx_attrs = vec![]; - let mut weight_attrs = vec![]; - let mut feeless_attrs = vec![]; - for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { - match attr { - FunctionAttr::CallIndex(_) => { - call_idx_attrs.push(attr); - } - FunctionAttr::Weight(_) => { - weight_attrs.push(attr); - } - FunctionAttr::FeelessIf(span, _) => { - feeless_attrs.push((span, attr)); - } - } - } - - if weight_attrs.is_empty() && dev_mode { - // inject a default O(1) weight when dev mode is enabled and no weight has - // been specified on the call - let empty_weight: syn::Expr = syn::parse_quote!(0); - weight_attrs.push(FunctionAttr::Weight(empty_weight)); - } - - let weight = match weight_attrs.len() { - 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, - 0 if dev_mode => CallWeightDef::DevModeDefault, - 0 => return Err(syn::Error::new( - method.sig.span(), - "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an + return Err(syn::Error::new(method.sig.span(), msg)) + } + + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); + let mut call_idx_attrs = vec![]; + let mut weight_attrs = vec![]; + let mut feeless_attrs = vec![]; + for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { + match attr { + FunctionAttr::CallIndex(_) => { + call_idx_attrs.push(attr); + }, + FunctionAttr::Weight(_) => { + weight_attrs.push(attr); + }, + FunctionAttr::FeelessIf(span, _) => { + feeless_attrs.push((span, attr)); + }, + } + } + + if weight_attrs.is_empty() && dev_mode { + // inject a default O(1) weight when dev mode is enabled and no weight has + // been specified on the call + let empty_weight: syn::Expr = syn::parse_quote!(0); + weight_attrs.push(FunctionAttr::Weight(empty_weight)); + } + + let weight = match weight_attrs.len() { + 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, + 0 if dev_mode => CallWeightDef::DevModeDefault, + 0 => return Err(syn::Error::new( + method.sig.span(), + "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an inherited weight from the `#[pallet:call(weight($type))]` attribute, but none were given.", - )), - 1 => match weight_attrs.pop().unwrap() { - FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), - _ => unreachable!("checked during creation of the let binding"), - }, - _ => { - let msg = "Invalid pallet::call, too many weight attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - }; - - if call_idx_attrs.len() > 1 { - let msg = "Invalid pallet::call, too many call_index attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - let call_index = call_idx_attrs.pop().map(|attr| match attr { - FunctionAttr::CallIndex(idx) => idx, - _ => unreachable!("checked during creation of the let binding"), - }); - let explicit_call_index = call_index.is_some(); - - let final_index = match call_index { - Some(i) => i, - None => last_index - .map_or(Some(0), |idx| idx.checked_add(1)) - .ok_or_else(|| { - let msg = "Call index doesn't fit into u8, index is 256"; - syn::Error::new(method.sig.span(), msg) - })?, - }; - last_index = Some(final_index); - - if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { - let msg = format!( - "Call indices are conflicting: Both functions {} and {} are at index {}", - used_fn, method.sig.ident, final_index, - ); - let mut err = syn::Error::new(used_fn.span(), &msg); - err.combine(syn::Error::new(method.sig.ident.span(), msg)); - return Err(err); - } - - let mut args = vec![]; - for arg in method.sig.inputs.iter_mut().skip(1) { - let arg = if let syn::FnArg::Typed(arg) = arg { - arg - } else { - unreachable!("Only first argument can be receiver"); - }; - - let arg_attrs: Vec = - helper::take_item_pallet_attrs(&mut arg.attrs)?; - - if arg_attrs.len() > 1 { - let msg = "Invalid pallet::call, argument has too many attributes"; - return Err(syn::Error::new(arg.span(), msg)); - } - - let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { - pat.ident.clone() - } else { - let msg = "Invalid pallet::call, argument must be ident"; - return Err(syn::Error::new(arg.pat.span(), msg)); - }; - - args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); - } - - let docs = get_doc_literals(&method.attrs); - - if feeless_attrs.len() > 1 { - let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; - return Err(syn::Error::new(feeless_attrs[1].0, msg)); - } - let feeless_check: Option = - feeless_attrs.pop().map(|(_, attr)| match attr { - FunctionAttr::FeelessIf(_, closure) => closure, - _ => unreachable!("checked during creation of the let binding"), - }); - - if let Some(ref feeless_check) = feeless_check { - if feeless_check.inputs.len() != args.len() + 1 { - let msg = "Invalid pallet::call, feeless_if closure must have same \ + )), + 1 => match weight_attrs.pop().unwrap() { + FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), + _ => unreachable!("checked during creation of the let binding"), + }, + _ => { + let msg = "Invalid pallet::call, too many weight attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)) + }, + }; + + if call_idx_attrs.len() > 1 { + let msg = "Invalid pallet::call, too many call_index attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)) + } + let call_index = call_idx_attrs.pop().map(|attr| match attr { + FunctionAttr::CallIndex(idx) => idx, + _ => unreachable!("checked during creation of the let binding"), + }); + let explicit_call_index = call_index.is_some(); + + let final_index = match call_index { + Some(i) => i, + None => + last_index.map_or(Some(0), |idx| idx.checked_add(1)).ok_or_else(|| { + let msg = "Call index doesn't fit into u8, index is 256"; + syn::Error::new(method.sig.span(), msg) + })?, + }; + last_index = Some(final_index); + + if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { + let msg = format!( + "Call indices are conflicting: Both functions {} and {} are at index {}", + used_fn, method.sig.ident, final_index, + ); + let mut err = syn::Error::new(used_fn.span(), &msg); + err.combine(syn::Error::new(method.sig.ident.span(), msg)); + return Err(err) + } + + let mut args = vec![]; + for arg in method.sig.inputs.iter_mut().skip(1) { + let arg = if let syn::FnArg::Typed(arg) = arg { + arg + } else { + unreachable!("Only first argument can be receiver"); + }; + + let arg_attrs: Vec = + helper::take_item_pallet_attrs(&mut arg.attrs)?; + + if arg_attrs.len() > 1 { + let msg = "Invalid pallet::call, argument has too many attributes"; + return Err(syn::Error::new(arg.span(), msg)) + } + + let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { + pat.ident.clone() + } else { + let msg = "Invalid pallet::call, argument must be ident"; + return Err(syn::Error::new(arg.pat.span(), msg)) + }; + + args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); + } + + let docs = get_doc_literals(&method.attrs); + + if feeless_attrs.len() > 1 { + let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; + return Err(syn::Error::new(feeless_attrs[1].0, msg)) + } + let feeless_check: Option = + feeless_attrs.pop().map(|(_, attr)| match attr { + FunctionAttr::FeelessIf(_, closure) => closure, + _ => unreachable!("checked during creation of the let binding"), + }); + + if let Some(ref feeless_check) = feeless_check { + if feeless_check.inputs.len() != args.len() + 1 { + let msg = "Invalid pallet::call, feeless_if closure must have same \ number of arguments as the dispatchable function"; - return Err(syn::Error::new(feeless_check.span(), msg)); - } - - match feeless_check.inputs.first() { - None => { - let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; - return Err(syn::Error::new(feeless_check.span(), msg)); - } - Some(syn::Pat::Type(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, true)?; - } - _ => { - let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ + return Err(syn::Error::new(feeless_check.span(), msg)) + } + + match feeless_check.inputs.first() { + None => { + let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; + return Err(syn::Error::new(feeless_check.span(), msg)) + }, + Some(syn::Pat::Type(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, true)?; + }, + _ => { + let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(feeless_check.span(), msg)); - } - } - - for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { - let feeless_arg_type = if let syn::Pat::Type(syn::PatType { ty, .. }) = - feeless_arg.clone() - { - if let syn::Type::Reference(pat) = *ty { - pat.elem.clone() - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; - return Err(syn::Error::new(ty.span(), msg)); - } - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; - return Err(syn::Error::new(feeless_arg.span(), msg)); - }; - - if feeless_arg_type != arg.2 { - let msg = - "Invalid pallet::call, feeless_if closure argument must have \ + return Err(syn::Error::new(feeless_check.span(), msg)) + }, + } + + for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { + let feeless_arg_type = + if let syn::Pat::Type(syn::PatType { ty, .. }) = feeless_arg.clone() { + if let syn::Type::Reference(pat) = *ty { + pat.elem.clone() + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; + return Err(syn::Error::new(ty.span(), msg)) + } + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; + return Err(syn::Error::new(feeless_arg.span(), msg)) + }; + + if feeless_arg_type != arg.2 { + let msg = + "Invalid pallet::call, feeless_if closure argument must have \ a reference to the same type as the dispatchable function argument"; - return Err(syn::Error::new(feeless_arg.span(), msg)); - } - } - - let valid_return = match &feeless_check.output { - syn::ReturnType::Type(_, type_) => match *(type_.clone()) { - syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), - _ => false, - }, - _ => false, - }; - if !valid_return { - let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; - return Err(syn::Error::new(feeless_check.output.span(), msg)); - } - } - - methods.push(CallVariantDef { - name: method.sig.ident.clone(), - weight, - call_index: final_index, - explicit_call_index, - args, - docs, - attrs: method.attrs.clone(), - cfg_attrs, - feeless_check, - }); - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(item.span(), msg)); - } - } - - Ok(Self { - index, - attr_span, - instances, - methods, - where_clause: item_impl.generics.where_clause.clone(), - docs: get_doc_literals(&item_impl.attrs), - inherited_call_weight, - }) - } + return Err(syn::Error::new(feeless_arg.span(), msg)) + } + } + + let valid_return = match &feeless_check.output { + syn::ReturnType::Type(_, type_) => match *(type_.clone()) { + syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), + _ => false, + }, + _ => false, + }; + if !valid_return { + let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; + return Err(syn::Error::new(feeless_check.output.span(), msg)) + } + } + + methods.push(CallVariantDef { + name: method.sig.ident.clone(), + weight, + call_index: final_index, + explicit_call_index, + args, + docs, + attrs: method.attrs.clone(), + cfg_attrs, + feeless_check, + }); + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(item.span(), msg)) + } + } + + Ok(Self { + index, + attr_span, + instances, + methods, + where_clause: item_impl.generics.where_clause.clone(), + docs: get_doc_literals(&item_impl.attrs), + inherited_call_weight, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/composite.rs b/support/procedural-fork/src/pallet/parse/composite.rs index 38da1f205..c3ac74846 100644 --- a/support/procedural-fork/src/pallet/parse/composite.rs +++ b/support/procedural-fork/src/pallet/parse/composite.rs @@ -20,178 +20,172 @@ use quote::ToTokens; use syn::spanned::Spanned; pub mod keyword { - use super::*; - - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(Task); - - pub enum CompositeKeyword { - FreezeReason(FreezeReason), - HoldReason(HoldReason), - LockId(LockId), - SlashReason(SlashReason), - Task(Task), - } - - impl ToTokens for CompositeKeyword { - fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - use CompositeKeyword::*; - match self { - FreezeReason(inner) => inner.to_tokens(tokens), - HoldReason(inner) => inner.to_tokens(tokens), - LockId(inner) => inner.to_tokens(tokens), - SlashReason(inner) => inner.to_tokens(tokens), - Task(inner) => inner.to_tokens(tokens), - } - } - } - - impl syn::parse::Parse for CompositeKeyword { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else if lookahead.peek(Task) { - Ok(Self::Task(input.parse()?)) - } else { - Err(lookahead.error()) - } - } - } - - impl std::fmt::Display for CompositeKeyword { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use CompositeKeyword::*; - write!( - f, - "{}", - match self { - FreezeReason(_) => "FreezeReason", - HoldReason(_) => "HoldReason", - Task(_) => "Task", - LockId(_) => "LockId", - SlashReason(_) => "SlashReason", - } - ) - } - } + use super::*; + + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(Task); + + pub enum CompositeKeyword { + FreezeReason(FreezeReason), + HoldReason(HoldReason), + LockId(LockId), + SlashReason(SlashReason), + Task(Task), + } + + impl ToTokens for CompositeKeyword { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + use CompositeKeyword::*; + match self { + FreezeReason(inner) => inner.to_tokens(tokens), + HoldReason(inner) => inner.to_tokens(tokens), + LockId(inner) => inner.to_tokens(tokens), + SlashReason(inner) => inner.to_tokens(tokens), + Task(inner) => inner.to_tokens(tokens), + } + } + } + + impl syn::parse::Parse for CompositeKeyword { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else if lookahead.peek(Task) { + Ok(Self::Task(input.parse()?)) + } else { + Err(lookahead.error()) + } + } + } + + impl std::fmt::Display for CompositeKeyword { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CompositeKeyword::*; + write!( + f, + "{}", + match self { + FreezeReason(_) => "FreezeReason", + HoldReason(_) => "HoldReason", + Task(_) => "Task", + LockId(_) => "LockId", + SlashReason(_) => "SlashReason", + } + ) + } + } } pub struct CompositeDef { - /// The index of the CompositeDef item in the pallet module. - pub index: usize, - /// The composite keyword used (contains span). - pub composite_keyword: keyword::CompositeKeyword, - /// Name of the associated type. - pub ident: syn::Ident, - /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. - pub generics: syn::Generics, - /// The span of the pallet::composite_enum attribute. - pub attr_span: proc_macro2::Span, - /// Variant count of the pallet::composite_enum. - pub variant_count: u32, + /// The index of the CompositeDef item in the pallet module. + pub index: usize, + /// The composite keyword used (contains span). + pub composite_keyword: keyword::CompositeKeyword, + /// Name of the associated type. + pub ident: syn::Ident, + /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. + pub generics: syn::Generics, + /// The span of the pallet::composite_enum attribute. + pub attr_span: proc_macro2::Span, + /// Variant count of the pallet::composite_enum. + pub variant_count: u32, } impl CompositeDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - scrate: &syn::Path, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - // check variants: composite enums support only field-less enum variants. This is - // because fields can introduce too many possibilities, making it challenging to compute - // a fixed variant count. - for variant in &item.variants { - match variant.fields { - syn::Fields::Named(_) | syn::Fields::Unnamed(_) => { - return Err(syn::Error::new( - variant.ident.span(), - "The composite enum does not support variants with fields!", - )) - } - syn::Fields::Unit => (), - } - } - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::composite_enum, expected enum item", - )); - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = format!( - "Invalid pallet::composite_enum, `{}` must be public", - item.ident - ); - return Err(syn::Error::new(item.span(), msg)); - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_derive_attr = item.attrs.iter().any(|attr| { - if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { - path.get_ident() - .map(|ident| ident == "derive") - .unwrap_or(false) - } else { - false - } - }); - - if !has_derive_attr { - let derive_attr: syn::Attribute = syn::parse_quote! { - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - }; - item.attrs.push(derive_attr); - } - - if has_instance { - item.attrs.push(syn::parse_quote! { - #[scale_info(skip_type_params(I))] - }); - - item.variants.push(syn::parse_quote! { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData, - ) - }); - } - - let composite_keyword = - syn::parse2::(item.ident.to_token_stream())?; - - Ok(CompositeDef { - index, - composite_keyword, - attr_span, - generics: item.generics.clone(), - variant_count: item.variants.len() as u32, - ident: item.ident.clone(), - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + scrate: &syn::Path, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + // check variants: composite enums support only field-less enum variants. This is + // because fields can introduce too many possibilities, making it challenging to compute + // a fixed variant count. + for variant in &item.variants { + match variant.fields { + syn::Fields::Named(_) | syn::Fields::Unnamed(_) => + return Err(syn::Error::new( + variant.ident.span(), + "The composite enum does not support variants with fields!", + )), + syn::Fields::Unit => (), + } + } + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::composite_enum, expected enum item", + )) + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = format!("Invalid pallet::composite_enum, `{}` must be public", item.ident); + return Err(syn::Error::new(item.span(), msg)) + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_derive_attr = item.attrs.iter().any(|attr| { + if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { + path.get_ident().map(|ident| ident == "derive").unwrap_or(false) + } else { + false + } + }); + + if !has_derive_attr { + let derive_attr: syn::Attribute = syn::parse_quote! { + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + }; + item.attrs.push(derive_attr); + } + + if has_instance { + item.attrs.push(syn::parse_quote! { + #[scale_info(skip_type_params(I))] + }); + + item.variants.push(syn::parse_quote! { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData, + ) + }); + } + + let composite_keyword = + syn::parse2::(item.ident.to_token_stream())?; + + Ok(CompositeDef { + index, + composite_keyword, + attr_span, + generics: item.generics.clone(), + variant_count: item.variants.len() as u32, + ident: item.ident.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs index 9ecdbddc3..fbab92db1 100644 --- a/support/procedural-fork/src/pallet/parse/config.rs +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -22,592 +22,569 @@ use syn::{spanned::Spanned, token, Token}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Config); - syn::custom_keyword!(From); - syn::custom_keyword!(T); - syn::custom_keyword!(I); - syn::custom_keyword!(config); - syn::custom_keyword!(pallet); - syn::custom_keyword!(IsType); - syn::custom_keyword!(RuntimeEvent); - syn::custom_keyword!(Event); - syn::custom_keyword!(frame_system); - syn::custom_keyword!(disable_frame_system_supertrait_check); - syn::custom_keyword!(no_default); - syn::custom_keyword!(no_default_bounds); - syn::custom_keyword!(constant); + syn::custom_keyword!(Config); + syn::custom_keyword!(From); + syn::custom_keyword!(T); + syn::custom_keyword!(I); + syn::custom_keyword!(config); + syn::custom_keyword!(pallet); + syn::custom_keyword!(IsType); + syn::custom_keyword!(RuntimeEvent); + syn::custom_keyword!(Event); + syn::custom_keyword!(frame_system); + syn::custom_keyword!(disable_frame_system_supertrait_check); + syn::custom_keyword!(no_default); + syn::custom_keyword!(no_default_bounds); + syn::custom_keyword!(constant); } #[derive(Default)] pub struct DefaultTrait { - /// A bool for each sub-trait item indicates whether the item has - /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds - /// in the generated default sub-trait. - pub items: Vec<(syn::TraitItem, bool)>, - pub has_system: bool, + /// A bool for each sub-trait item indicates whether the item has + /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds + /// in the generated default sub-trait. + pub items: Vec<(syn::TraitItem, bool)>, + pub has_system: bool, } /// Input definition for the pallet config. pub struct ConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// Whether the trait has instance (i.e. define with `Config`) - pub has_instance: bool, - /// Const associated type. - pub consts_metadata: Vec, - /// Whether the trait has the associated type `Event`, note that those bounds are - /// checked: - /// * `IsType::RuntimeEvent` - /// * `From` or `From>` or `From>` - pub has_event_type: bool, - /// The where clause on trait definition but modified so `Self` is `T`. - pub where_clause: Option, - /// The span of the pallet::config attribute. - pub attr_span: proc_macro2::Span, - /// Whether a default sub-trait should be generated. - /// - /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). - /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are - /// no trait items. - pub default_sub_trait: Option, + /// The index of item in pallet module. + pub index: usize, + /// Whether the trait has instance (i.e. define with `Config`) + pub has_instance: bool, + /// Const associated type. + pub consts_metadata: Vec, + /// Whether the trait has the associated type `Event`, note that those bounds are + /// checked: + /// * `IsType::RuntimeEvent` + /// * `From` or `From>` or `From>` + pub has_event_type: bool, + /// The where clause on trait definition but modified so `Self` is `T`. + pub where_clause: Option, + /// The span of the pallet::config attribute. + pub attr_span: proc_macro2::Span, + /// Whether a default sub-trait should be generated. + /// + /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). + /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are + /// no trait items. + pub default_sub_trait: Option, } /// Input definition for a constant in pallet config. pub struct ConstMetadataDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, } impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { - type Error = syn::Error; - - fn try_from(trait_ty: &syn::TraitItemType) -> Result { - let err = |span, msg| { - syn::Error::new( - span, - format!("Invalid usage of `#[pallet::constant]`: {}", msg), - ) - }; - let doc = get_doc_literals(&trait_ty.attrs); - let ident = trait_ty.ident.clone(); - let bound = trait_ty - .bounds - .iter() - .find_map(|b| { - if let syn::TypeParamBound::Trait(tb) = b { - tb.path - .segments - .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) - } else { - None - } - }) - .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { - if ab.args.len() == 1 { - if let syn::GenericArgument::Type(ref ty) = ab.args[0] { - Ok(ty) - } else { - Err(err(ab.args[0].span(), "Expected a type argument")) - } - } else { - Err(err(bound.span(), "Expected a single type argument")) - } - } else { - Err(err(bound.span(), "Expected trait generic args")) - }?; - let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) - .expect("Internal error: replacing `Self` by `T` should result in valid type"); - - Ok(Self { ident, type_, doc }) - } + type Error = syn::Error; + + fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let err = |span, msg| { + syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) + }; + let doc = get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty + .bounds + .iter() + .find_map(|b| { + if let syn::TypeParamBound::Trait(tb) = b { + tb.path + .segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) + } else { + None + } + }) + .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(err(ab.args[0].span(), "Expected a type argument")) + } + } else { + Err(err(bound.span(), "Expected a single type argument")) + } + } else { + Err(err(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); + + Ok(Self { ident, type_, doc }) + } } /// Parse for `#[pallet::disable_frame_system_supertrait_check]` pub struct DisableFrameSystemSupertraitCheck; impl syn::parse::Parse for DisableFrameSystemSupertraitCheck { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(Self) + } } /// Parsing for the `typ` portion of `PalletAttr` #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::no_default, name = "no_default")] - NoDefault(keyword::no_default), - #[peek(keyword::no_default_bounds, name = "no_default_bounds")] - NoBounds(keyword::no_default_bounds), - #[peek(keyword::constant, name = "constant")] - Constant(keyword::constant), + #[peek(keyword::no_default, name = "no_default")] + NoDefault(keyword::no_default), + #[peek(keyword::no_default_bounds, name = "no_default_bounds")] + NoBounds(keyword::no_default_bounds), + #[peek(keyword::constant, name = "constant")] + Constant(keyword::constant), } /// Parsing for `#[pallet::X]` #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - _pallet: keyword::pallet, - #[prefix(Token![::] in _bracket)] - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + _pallet: keyword::pallet, + #[prefix(Token![::] in _bracket)] + #[inside(_bracket)] + typ: PalletAttrType, } /// Parse for `IsType<::RuntimeEvent>` and retrieve `$path` pub struct IsTypeBoundEventParse(syn::Path); impl syn::parse::Parse for IsTypeBoundEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - let config_path = input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(config_path)) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + let config_path = input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(config_path)) + } } /// Parse for `From` or `From>` or `From>` pub struct FromEventParse { - is_generic: bool, - has_instance: bool, + is_generic: bool, + has_instance: bool, } impl syn::parse::Parse for FromEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut is_generic = false; - let mut has_instance = false; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - is_generic = true; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - input.parse::()?; - input.parse::()?; - has_instance = true; - } - input.parse::]>()?; - } - input.parse::]>()?; - - Ok(Self { - is_generic, - has_instance, - }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut is_generic = false; + let mut has_instance = false; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + is_generic = true; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + input.parse::()?; + input.parse::()?; + has_instance = true; + } + input.parse::]>()?; + } + input.parse::]>()?; + + Ok(Self { is_generic, has_instance }) + } } /// Check if trait_item is `type RuntimeEvent`, if so checks its bounds are those expected. /// (Event type is reserved type) fn check_event_type( - frame_system: &syn::Path, - trait_item: &syn::TraitItem, - trait_has_instance: bool, + frame_system: &syn::Path, + trait_item: &syn::TraitItem, + trait_has_instance: bool, ) -> syn::Result { - if let syn::TraitItem::Type(type_) = trait_item { - if type_.ident == "RuntimeEvent" { - // Check event has no generics - if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ + if let syn::TraitItem::Type(type_) = trait_item { + if type_.ident == "RuntimeEvent" { + // Check event has no generics + if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ no generics nor where_clause"; - return Err(syn::Error::new(trait_item.span(), msg)); - } + return Err(syn::Error::new(trait_item.span(), msg)) + } - // Check bound contains IsType and From - let has_is_type_bound = type_.bounds.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b.0, frame_system)) - }); + // Check bound contains IsType and From + let has_is_type_bound = type_.bounds.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b.0, frame_system)) + }); - if !has_is_type_bound { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + if !has_is_type_bound { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `IsType<::RuntimeEvent>`".to_string(); - return Err(syn::Error::new(type_.span(), msg)); - } - - let from_event_bound = type_ - .bounds - .iter() - .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); - - let from_event_bound = if let Some(b) = from_event_bound { - b - } else { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + return Err(syn::Error::new(type_.span(), msg)) + } + + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); + + let from_event_bound = if let Some(b) = from_event_bound { + b + } else { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `From` or `From>` or `From>`"; - return Err(syn::Error::new(type_.span(), msg)); - }; + return Err(syn::Error::new(type_.span(), msg)) + }; - if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) - { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) + { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ `From`. Config and generic Event must be both with instance or \ without instance"; - return Err(syn::Error::new(type_.span(), msg)); - } - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) - } + return Err(syn::Error::new(type_.span(), msg)) + } + + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } } /// Check that the path to `frame_system::Config` is valid, this is that the path is just /// `frame_system::Config` or when using the `frame` crate it is `frame::xyz::frame_system::Config`. fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool { - // Check if `frame_system` is actually 'frame_system'. - if path.segments.iter().all(|s| s.ident != "frame_system") { - return false; - } - - let mut expected_system_config = match ( - is_using_frame_crate(&path), - is_using_frame_crate(frame_system), - ) { - (true, false) => - // We can't use the path to `frame_system` from `frame` if `frame_system` is not being - // in scope through `frame`. - { - return false - } - (false, true) => - // We know that the only valid frame_system path is one that is `frame_system`, as - // `frame` re-exports it as such. - { - syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed") - } - (_, _) => - // They are either both `frame_system` or both `frame::xyz::frame_system`. - { - frame_system.clone() - } - }; - - expected_system_config - .segments - .push(syn::PathSegment::from(syn::Ident::new( - "Config", - path.span(), - ))); - - // the parse path might be something like `frame_system::Config<...>`, so we - // only compare the idents along the path. - expected_system_config - .segments - .into_iter() - .map(|ps| ps.ident) - .collect::>() - == path - .segments - .into_iter() - .map(|ps| ps.ident) - .collect::>() + // Check if `frame_system` is actually 'frame_system'. + if path.segments.iter().all(|s| s.ident != "frame_system") { + return false + } + + let mut expected_system_config = + match (is_using_frame_crate(&path), is_using_frame_crate(&frame_system)) { + (true, false) => + // We can't use the path to `frame_system` from `frame` if `frame_system` is not being + // in scope through `frame`. + return false, + (false, true) => + // We know that the only valid frame_system path is one that is `frame_system`, as + // `frame` re-exports it as such. + syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed"), + (_, _) => + // They are either both `frame_system` or both `frame::xyz::frame_system`. + frame_system.clone(), + }; + + expected_system_config + .segments + .push(syn::PathSegment::from(syn::Ident::new("Config", path.span()))); + + // the parse path might be something like `frame_system::Config<...>`, so we + // only compare the idents along the path. + expected_system_config + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() == + path.segments.into_iter().map(|ps| ps.ident).collect::>() } /// Replace ident `Self` by `T` pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream { - input - .into_iter() - .map(|token_tree| match token_tree { - proc_macro2::TokenTree::Group(group) => { - proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into() - } - proc_macro2::TokenTree::Ident(ident) if ident == "Self" => { - proc_macro2::Ident::new("T", ident.span()).into() - } - other => other, - }) - .collect() + input + .into_iter() + .map(|token_tree| match token_tree { + proc_macro2::TokenTree::Group(group) => + proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into(), + proc_macro2::TokenTree::Ident(ident) if ident == "Self" => + proc_macro2::Ident::new("T", ident.span()).into(), + other => other, + }) + .collect() } impl ConfigDef { - pub fn try_from( - frame_system: &syn::Path, - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - enable_default: bool, - ) -> syn::Result { - let item = if let syn::Item::Trait(item) = item { - item - } else { - let msg = "Invalid pallet::config, expected trait definition"; - return Err(syn::Error::new(item.span(), msg)); - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::config, trait must be public"; - return Err(syn::Error::new(item.span(), msg)); - } - - syn::parse2::(item.ident.to_token_stream())?; - - let where_clause = { - let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); - syn::parse2::>(stream).expect( - "Internal error: replacing `Self` by `T` should result in valid where + pub fn try_from( + frame_system: &syn::Path, + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + enable_default: bool, + ) -> syn::Result { + let item = if let syn::Item::Trait(item) = item { + item + } else { + let msg = "Invalid pallet::config, expected trait definition"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::config, trait must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + syn::parse2::(item.ident.to_token_stream())?; + + let where_clause = { + let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); + syn::parse2::>(stream).expect( + "Internal error: replacing `Self` by `T` should result in valid where clause", - ) - }; - - if item.generics.params.len() > 1 { - let msg = "Invalid pallet::config, expected no more than one generic"; - return Err(syn::Error::new(item.generics.params[2].span(), msg)); - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_frame_system_supertrait = item.supertraits.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b, frame_system)) - }); - - let mut has_event_type = false; - let mut consts_metadata = vec![]; - let mut default_sub_trait = if enable_default { - Some(DefaultTrait { - items: Default::default(), - has_system: has_frame_system_supertrait, - }) - } else { - None - }; - for trait_item in &mut item.items { - let is_event = check_event_type(frame_system, trait_item, has_instance)?; - has_event_type = has_event_type || is_event; - - let mut already_no_default = false; - let mut already_constant = false; - let mut already_no_default_bounds = false; - - while let Ok(Some(pallet_attr)) = - helper::take_first_item_pallet_attr::(trait_item) - { - match (pallet_attr.typ, &trait_item) { - (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { - if already_constant { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::constant] attribute not allowed.", - )); - } - already_constant = true; - consts_metadata.push(ConstMetadataDef::try_from(typ)?); - } - (PalletAttrType::Constant(_), _) => { - return Err(syn::Error::new( - trait_item.span(), - "Invalid #[pallet::constant] in #[pallet::config], expected type item", - )) - } - (PalletAttrType::NoDefault(_), _) => { - if !enable_default { - return Err(syn::Error::new( + ) + }; + + if item.generics.params.len() > 1 { + let msg = "Invalid pallet::config, expected no more than one generic"; + return Err(syn::Error::new(item.generics.params[2].span(), msg)) + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_frame_system_supertrait = item.supertraits.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b, frame_system)) + }); + + let mut has_event_type = false; + let mut consts_metadata = vec![]; + let mut default_sub_trait = if enable_default { + Some(DefaultTrait { + items: Default::default(), + has_system: has_frame_system_supertrait, + }) + } else { + None + }; + for trait_item in &mut item.items { + let is_event = check_event_type(frame_system, trait_item, has_instance)?; + has_event_type = has_event_type || is_event; + + let mut already_no_default = false; + let mut already_constant = false; + let mut already_no_default_bounds = false; + + while let Ok(Some(pallet_attr)) = + helper::take_first_item_pallet_attr::(trait_item) + { + match (pallet_attr.typ, &trait_item) { + (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { + if already_constant { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::constant] attribute not allowed.", + )) + } + already_constant = true; + consts_metadata.push(ConstMetadataDef::try_from(typ)?); + }, + (PalletAttrType::Constant(_), _) => + return Err(syn::Error::new( + trait_item.span(), + "Invalid #[pallet::constant] in #[pallet::config], expected type item", + )), + (PalletAttrType::NoDefault(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )); - } - if already_no_default { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default] attribute not allowed.", - )); - } - - already_no_default = true; - } - (PalletAttrType::NoBounds(_), _) => { - if !enable_default { - return Err(syn::Error::new( + )) + } + if already_no_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default] attribute not allowed.", + )) + } + + already_no_default = true; + }, + (PalletAttrType::NoBounds(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default_bounds]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )); - } - if already_no_default_bounds { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default_bounds] attribute not allowed.", - )); - } - already_no_default_bounds = true; - } - } - } - - if !already_no_default && enable_default { - default_sub_trait - .as_mut() - .expect("is 'Some(_)' if 'enable_default'; qed") - .items - .push((trait_item.clone(), already_no_default_bounds)); - } - } - - let attr: Option = - helper::take_first_item_pallet_attr(&mut item.attrs)?; - let disable_system_supertrait_check = attr.is_some(); - - if !has_frame_system_supertrait && !disable_system_supertrait_check { - let found = if item.supertraits.is_empty() { - "none".to_string() - } else { - let mut found = item.supertraits.iter().fold(String::new(), |acc, s| { - format!("{}`{}`, ", acc, quote::quote!(#s)) - }); - found.pop(); - found.pop(); - found - }; - - let msg = format!( - "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ + )) + } + if already_no_default_bounds { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default_bounds] attribute not allowed.", + )) + } + already_no_default_bounds = true; + }, + } + } + + if !already_no_default && enable_default { + default_sub_trait + .as_mut() + .expect("is 'Some(_)' if 'enable_default'; qed") + .items + .push((trait_item.clone(), already_no_default_bounds)); + } + } + + let attr: Option = + helper::take_first_item_pallet_attr(&mut item.attrs)?; + let disable_system_supertrait_check = attr.is_some(); + + if !has_frame_system_supertrait && !disable_system_supertrait_check { + let found = if item.supertraits.is_empty() { + "none".to_string() + } else { + let mut found = item + .supertraits + .iter() + .fold(String::new(), |acc, s| format!("{}`{}`, ", acc, quote::quote!(#s))); + found.pop(); + found.pop(); + found + }; + + let msg = format!( + "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ found {}. \ (try `pub trait Config: frame_system::Config {{ ...` or \ `pub trait Config: frame_system::Config {{ ...`). \ To disable this check, use `#[pallet::disable_frame_system_supertrait_check]`", - frame_system.to_token_stream(), - found, - ); - return Err(syn::Error::new(item.span(), msg)); - } - - Ok(Self { - index, - has_instance, - consts_metadata, - has_event_type, - where_clause, - attr_span, - default_sub_trait, - }) - } + frame_system.to_token_stream(), + found, + ); + return Err(syn::Error::new(item.span(), msg)) + } + + Ok(Self { + index, + has_instance, + consts_metadata, + has_event_type, + where_clause, + attr_span, + default_sub_trait, + }) + } } #[cfg(test)] mod tests { - use super::*; - #[test] - fn has_expected_system_config_works() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_assoc_type() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame_system::Config)) - .unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_other_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_frame_system() { - let frame_system = - syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_path() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_not_frame_system() { - let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); - let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } + use super::*; + #[test] + fn has_expected_system_config_works() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_assoc_type() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame_system::Config)) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_other_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_frame_system() { + let frame_system = + syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_path() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_not_frame_system() { + let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); + let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } } diff --git a/support/procedural-fork/src/pallet/parse/error.rs b/support/procedural-fork/src/pallet/parse/error.rs index e93e2113f..362df8d73 100644 --- a/support/procedural-fork/src/pallet/parse/error.rs +++ b/support/procedural-fork/src/pallet/parse/error.rs @@ -22,108 +22,94 @@ use syn::{spanned::Spanned, Fields}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); + syn::custom_keyword!(Error); } /// Records information about the error enum variant field. pub struct VariantField { - /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. - pub is_named: bool, + /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. + pub is_named: bool, } /// Records information about the error enum variants. pub struct VariantDef { - /// The variant ident. - pub ident: syn::Ident, - /// The variant field, if any. - pub field: Option, - /// The variant doc literals. - pub docs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, + /// The variant ident. + pub ident: syn::Ident, + /// The variant field, if any. + pub field: Option, + /// The variant doc literals. + pub docs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, } /// This checks error declaration as a enum declaration with only variants without fields nor /// discriminant. pub struct ErrorDef { - /// The index of error item in pallet module. - pub index: usize, - /// Variant definitions. - pub variants: Vec, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The keyword error used (contains span). - pub error: keyword::Error, - /// The span of the pallet::error attribute. - pub attr_span: proc_macro2::Span, + /// The index of error item in pallet module. + pub index: usize, + /// Variant definitions. + pub variants: Vec, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The keyword error used (contains span). + pub error: keyword::Error, + /// The span of the pallet::error attribute. + pub attr_span: proc_macro2::Span, } impl ErrorDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::error, expected item enum", - )); - }; - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::error, `Error` must be public"; - return Err(syn::Error::new(item.span(), msg)); - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")) + }; + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::error, `Error` must be public"; + return Err(syn::Error::new(item.span(), msg)) + } - let instances = vec![helper::check_type_def_gen_no_bounds( - &item.generics, - item.ident.span(), - )?]; + let instances = + vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; - return Err(syn::Error::new( - item.generics.where_clause.as_ref().unwrap().span(), - msg, - )); - } + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; + return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)) + } - let error = syn::parse2::(item.ident.to_token_stream())?; + let error = syn::parse2::(item.ident.to_token_stream())?; - let variants = item - .variants - .iter() - .map(|variant| { - let field_ty = match &variant.fields { - Fields::Unit => None, - Fields::Named(_) => Some(VariantField { is_named: true }), - Fields::Unnamed(_) => Some(VariantField { is_named: false }), - }; - if variant.discriminant.is_some() { - let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ + let variants = item + .variants + .iter() + .map(|variant| { + let field_ty = match &variant.fields { + Fields::Unit => None, + Fields::Named(_) => Some(VariantField { is_named: true }), + Fields::Unnamed(_) => Some(VariantField { is_named: false }), + }; + if variant.discriminant.is_some() { + let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ are not supported"; - let span = variant.discriminant.as_ref().unwrap().0.span(); - return Err(syn::Error::new(span, msg)); - } - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); + let span = variant.discriminant.as_ref().unwrap().0.span(); + return Err(syn::Error::new(span, msg)) + } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); - Ok(VariantDef { - ident: variant.ident.clone(), - field: field_ty, - docs: get_doc_literals(&variant.attrs), - cfg_attrs, - }) - }) - .collect::>()?; + Ok(VariantDef { + ident: variant.ident.clone(), + field: field_ty, + docs: get_doc_literals(&variant.attrs), + cfg_attrs, + }) + }) + .collect::>()?; - Ok(ErrorDef { - attr_span, - index, - variants, - instances, - error, - }) - } + Ok(ErrorDef { attr_span, index, variants, instances, error }) + } } diff --git a/support/procedural-fork/src/pallet/parse/event.rs b/support/procedural-fork/src/pallet/parse/event.rs index 6102dd31f..0fb8ee4f5 100644 --- a/support/procedural-fork/src/pallet/parse/event.rs +++ b/support/procedural-fork/src/pallet/parse/event.rs @@ -21,28 +21,28 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Event); - syn::custom_keyword!(pallet); - syn::custom_keyword!(generate_deposit); - syn::custom_keyword!(deposit_event); + syn::custom_keyword!(Event); + syn::custom_keyword!(pallet); + syn::custom_keyword!(generate_deposit); + syn::custom_keyword!(deposit_event); } /// Definition for pallet event enum. pub struct EventDef { - /// The index of event item in pallet module. - pub index: usize, - /// The keyword Event used (contains span). - pub event: keyword::Event, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The kind of generic the type `Event` has. - pub gen_kind: super::GenericKind, - /// Whether the function `deposit_event` must be generated. - pub deposit_event: Option, - /// Where clause used in event definition. - pub where_clause: Option, - /// The span of the pallet::event attribute. - pub attr_span: proc_macro2::Span, + /// The index of event item in pallet module. + pub index: usize, + /// The keyword Event used (contains span). + pub event: keyword::Event, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The kind of generic the type `Event` has. + pub gen_kind: super::GenericKind, + /// Whether the function `deposit_event` must be generated. + pub deposit_event: Option, + /// Where clause used in event definition. + pub where_clause: Option, + /// The span of the pallet::event attribute. + pub attr_span: proc_macro2::Span, } /// Attribute for a pallet's Event. @@ -50,110 +50,92 @@ pub struct EventDef { /// Syntax is: /// * `#[pallet::generate_deposit($vis fn deposit_event)]` pub struct PalletEventDepositAttr { - pub fn_vis: syn::Visibility, - // Span for the keyword deposit_event - pub fn_span: proc_macro2::Span, - // Span of the attribute - pub span: proc_macro2::Span, + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, } impl syn::parse::Parse for PalletEventDepositAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let span = content.parse::()?.span(); - let generate_content; - syn::parenthesized!(generate_content in content); - let fn_vis = generate_content.parse::()?; - generate_content.parse::()?; - let fn_span = generate_content.parse::()?.span(); - - Ok(PalletEventDepositAttr { - fn_vis, - span, - fn_span, - }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); + + Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) + } } struct PalletEventAttrInfo { - deposit_event: Option, + deposit_event: Option, } impl PalletEventAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut deposit_event = None; - for attr in attrs { - if deposit_event.is_none() { - deposit_event = Some(attr) - } else { - return Err(syn::Error::new(attr.span, "Duplicate attribute")); - } - } - - Ok(PalletEventAttrInfo { deposit_event }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut deposit_event = None; + for attr in attrs { + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")) + } + } + + Ok(PalletEventAttrInfo { deposit_event }) + } } impl EventDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::event, expected enum item", - )); - }; - - let event_attrs: Vec = - helper::take_item_pallet_attrs(&mut item.attrs)?; - let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; - let deposit_event = attr_info.deposit_event; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::event, `Event` must be public"; - return Err(syn::Error::new(item.span(), msg)); - } - - let where_clause = item.generics.where_clause.clone(); - - let mut instances = vec![]; - // NOTE: Event is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { - instances.push(u); - } else { - // construct_runtime only allow non generic event for non instantiable pallet. - instances.push(helper::InstanceUsage { - has_instance: false, - span: item.ident.span(), - }) - } - - let has_instance = item.generics.type_params().any(|t| t.ident == "I"); - let has_config = item.generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); - - let event = syn::parse2::(item.ident.to_token_stream())?; - - Ok(EventDef { - attr_span, - index, - instances, - deposit_event, - event, - gen_kind, - where_clause, - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected enum item")) + }; + + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; + let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; + let deposit_event = attr_info.deposit_event; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::event, `Event` must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + let where_clause = item.generics.where_clause.clone(); + + let mut instances = vec![]; + // NOTE: Event is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { + instances.push(u); + } else { + // construct_runtime only allow non generic event for non instantiable pallet. + instances.push(helper::InstanceUsage { has_instance: false, span: item.ident.span() }) + } + + let has_instance = item.generics.type_params().any(|t| t.ident == "I"); + let has_config = item.generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); + + let event = syn::parse2::(item.ident.to_token_stream())?; + + Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) + } } diff --git a/support/procedural-fork/src/pallet/parse/extra_constants.rs b/support/procedural-fork/src/pallet/parse/extra_constants.rs index 38acea21a..2ba6c44b7 100644 --- a/support/procedural-fork/src/pallet/parse/extra_constants.rs +++ b/support/procedural-fork/src/pallet/parse/extra_constants.rs @@ -21,148 +21,140 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(DispatchResultWithPostInfo); - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(weight); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(constant_name); + syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(weight); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(constant_name); } /// Definition of extra constants typically `impl Pallet { ... }` pub struct ExtraConstantsDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// The extra constant defined. - pub extra_constants: Vec, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// The extra constant defined. + pub extra_constants: Vec, } /// Input definition for an constant in pallet. pub struct ExtraConstantDef { - /// Name of the function - pub ident: syn::Ident, - /// The type returned by the function - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// Optional MetaData Name - pub metadata_name: Option, + /// Name of the function + pub ident: syn::Ident, + /// The type returned by the function + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// Optional MetaData Name + pub metadata_name: Option, } /// Attributes for functions in extra_constants impl block. /// Parse for `#[pallet::constant_name(ConstantName)]` pub struct ExtraConstAttr { - metadata_name: syn::Ident, + metadata_name: syn::Ident, } impl syn::parse::Parse for ExtraConstAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - content.parse::()?; - - let metadata_name; - syn::parenthesized!(metadata_name in content); - Ok(ExtraConstAttr { - metadata_name: metadata_name.parse::()?, - }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + content.parse::()?; + + let metadata_name; + syn::parenthesized!(metadata_name in content); + Ok(ExtraConstAttr { metadata_name: metadata_name.parse::()? }) + } } impl ExtraConstantsDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::extra_constants, expected item impl", - )); - }; - - let instances = vec![ - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - helper::check_pallet_struct_usage(&item.self_ty)?, - ]; - - if let Some((_, _, for_)) = item.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::extra_constants, expected item impl", + )) + }; + + let instances = vec![ + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + helper::check_pallet_struct_usage(&item.self_ty)?, + ]; + + if let Some((_, _, for_)) = item.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)); - } - - let mut extra_constants = vec![]; - for impl_item in &mut item.items { - let method = if let syn::ImplItem::Fn(method) = impl_item { - method - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(impl_item.span(), msg)); - }; - - if !method.sig.inputs.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 args"; - return Err(syn::Error::new(method.sig.span(), msg)); - } - - if !method.sig.generics.params.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 generics"; - return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)); - } - - if method.sig.generics.where_clause.is_some() { - let msg = "Invalid pallet::extra_constants, method must have no where clause"; - return Err(syn::Error::new( - method.sig.generics.where_clause.span(), - msg, - )); - } - - let type_ = match &method.sig.output { - syn::ReturnType::Default => { - let msg = "Invalid pallet::extra_constants, method must have a return type"; - return Err(syn::Error::new(method.span(), msg)); - } - syn::ReturnType::Type(_, type_) => *type_.clone(), - }; - - // parse metadata_name - let mut extra_constant_attrs: Vec = - helper::take_item_pallet_attrs(method)?; - - if extra_constant_attrs.len() > 1 { - let msg = - "Invalid attribute in pallet::constant_name, only one attribute is expected"; - return Err(syn::Error::new( - extra_constant_attrs[1].metadata_name.span(), - msg, - )); - } - - let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); - - extra_constants.push(ExtraConstantDef { - ident: method.sig.ident.clone(), - type_, - doc: get_doc_literals(&method.attrs), - metadata_name, - }); - } - - Ok(Self { - index, - instances, - where_clause: item.generics.where_clause.clone(), - extra_constants, - }) - } + return Err(syn::Error::new(for_.span(), msg)) + } + + let mut extra_constants = vec![]; + for impl_item in &mut item.items { + let method = if let syn::ImplItem::Fn(method) = impl_item { + method + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(impl_item.span(), msg)) + }; + + if !method.sig.inputs.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 args"; + return Err(syn::Error::new(method.sig.span(), msg)) + } + + if !method.sig.generics.params.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 generics"; + return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)) + } + + if method.sig.generics.where_clause.is_some() { + let msg = "Invalid pallet::extra_constants, method must have no where clause"; + return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)) + } + + let type_ = match &method.sig.output { + syn::ReturnType::Default => { + let msg = "Invalid pallet::extra_constants, method must have a return type"; + return Err(syn::Error::new(method.span(), msg)) + }, + syn::ReturnType::Type(_, type_) => *type_.clone(), + }; + + // parse metadata_name + let mut extra_constant_attrs: Vec = + helper::take_item_pallet_attrs(method)?; + + if extra_constant_attrs.len() > 1 { + let msg = + "Invalid attribute in pallet::constant_name, only one attribute is expected"; + return Err(syn::Error::new(extra_constant_attrs[1].metadata_name.span(), msg)) + } + + let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); + + extra_constants.push(ExtraConstantDef { + ident: method.sig.ident.clone(), + type_, + doc: get_doc_literals(&method.attrs), + metadata_name, + }); + } + + Ok(Self { + index, + instances, + where_clause: item.generics.where_clause.clone(), + extra_constants, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_build.rs b/support/procedural-fork/src/pallet/parse/genesis_build.rs index 670d4d5ef..d0e1d9ec9 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_build.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_build.rs @@ -20,47 +20,42 @@ use syn::spanned::Spanned; /// Definition for pallet genesis build implementation. pub struct GenesisBuildDef { - /// The index of item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Option>, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::genesis_build attribute. - pub attr_span: proc_macro2::Span, + /// The index of item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Option>, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::genesis_build attribute. + pub attr_span: proc_macro2::Span, } impl GenesisBuildDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::genesis_build, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); - }; - - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::genesis_build, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ for GenesisConfig<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - let instances = - helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); + let instances = + helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); - Ok(Self { - attr_span, - index, - instances, - where_clause: item.generics.where_clause.clone(), - }) - } + Ok(Self { attr_span, index, instances, where_clause: item.generics.where_clause.clone() }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_config.rs b/support/procedural-fork/src/pallet/parse/genesis_config.rs index 1c52345eb..62da6ba13 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_config.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_config.rs @@ -24,55 +24,50 @@ use syn::spanned::Spanned; /// * `struct GenesisConfig` /// * `enum GenesisConfig` pub struct GenesisConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// The kind of generic the type `GenesisConfig` has. - pub gen_kind: super::GenericKind, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The ident of genesis_config, can be used for span. - pub genesis_config: syn::Ident, + /// The index of item in pallet module. + pub index: usize, + /// The kind of generic the type `GenesisConfig` has. + pub gen_kind: super::GenericKind, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The ident of genesis_config, can be used for span. + pub genesis_config: syn::Ident, } impl GenesisConfigDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::genesis_config, expected enum or struct"; - return Err(syn::Error::new(item.span(), msg)); - } - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::genesis_config, expected enum or struct"; + return Err(syn::Error::new(item.span(), msg)) + }, + }; - let mut instances = vec![]; - // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { - instances.push(u); - } + let mut instances = vec![]; + // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { + instances.push(u); + } - let has_instance = generics.type_params().any(|t| t.ident == "I"); - let has_config = generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); + let has_instance = generics.type_params().any(|t| t.ident == "I"); + let has_config = generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; - return Err(syn::Error::new(item_span, msg)); - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; + return Err(syn::Error::new(item_span, msg)) + } - if ident != "GenesisConfig" { - let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; - return Err(syn::Error::new(ident.span(), msg)); - } + if ident != "GenesisConfig" { + let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; + return Err(syn::Error::new(ident.span(), msg)) + } - Ok(GenesisConfigDef { - index, - genesis_config: ident.clone(), - instances, - gen_kind, - }) - } + Ok(GenesisConfigDef { index, genesis_config: ident.clone(), instances, gen_kind }) + } } diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs index 3d39e0aa0..3187c9139 100644 --- a/support/procedural-fork/src/pallet/parse/helper.rs +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -21,176 +21,164 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(I); - syn::custom_keyword!(compact); - syn::custom_keyword!(GenesisBuild); - syn::custom_keyword!(BuildGenesisConfig); - syn::custom_keyword!(Config); - syn::custom_keyword!(T); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(origin); - syn::custom_keyword!(DispatchResult); - syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(I); + syn::custom_keyword!(compact); + syn::custom_keyword!(GenesisBuild); + syn::custom_keyword!(BuildGenesisConfig); + syn::custom_keyword!(Config); + syn::custom_keyword!(T); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(origin); + syn::custom_keyword!(DispatchResult); + syn::custom_keyword!(DispatchResultWithPostInfo); } /// A usage of instance, either the trait `Config` has been used with instance or without instance. /// Used to check for consistency. #[derive(Clone)] pub struct InstanceUsage { - pub has_instance: bool, - pub span: proc_macro2::Span, + pub has_instance: bool, + pub span: proc_macro2::Span, } /// Trait implemented for syn items to get mutable references on their attributes. /// /// NOTE: verbatim variants are not supported. pub trait MutItemAttrs { - fn mut_item_attrs(&mut self) -> Option<&mut Vec>; + fn mut_item_attrs(&mut self) -> Option<&mut Vec>; } /// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr` pub(crate) fn take_first_item_pallet_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { - attrs - } else { - return Ok(None); - }; - - if let Some(index) = attrs.iter().position(|attr| { - attr.path() - .segments - .first() - .map_or(false, |segment| segment.ident == "pallet") - }) { - let pallet_attr = attrs.remove(index); - Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) - } else { - Ok(None) - } + let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path().segments.first().map_or(false, |segment| segment.ident == "pallet") + }) { + let pallet_attr = attrs.remove(index); + Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) + } else { + Ok(None) + } } /// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` pub(crate) fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let mut pallet_attrs = Vec::new(); + let mut pallet_attrs = Vec::new(); - while let Some(attr) = take_first_item_pallet_attr(item)? { - pallet_attrs.push(attr) - } + while let Some(attr) = take_first_item_pallet_attr(item)? { + pallet_attrs.push(attr) + } - Ok(pallet_attrs) + Ok(pallet_attrs) } /// Get all the cfg attributes (e.g. attribute like `#[cfg..]`) and decode them to `Attr` pub fn get_item_cfg_attrs(attrs: &[syn::Attribute]) -> Vec { - attrs - .iter() - .filter_map(|attr| { - if attr - .path() - .segments - .first() - .map_or(false, |segment| segment.ident == "cfg") - { - Some(attr.clone()) - } else { - None - } - }) - .collect::>() + attrs + .iter() + .filter_map(|attr| { + if attr.path().segments.first().map_or(false, |segment| segment.ident == "cfg") { + Some(attr.clone()) + } else { + None + } + }) + .collect::>() } impl MutItemAttrs for syn::Item { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Enum(item) => Some(item.attrs.as_mut()), - Self::ExternCrate(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::ForeignMod(item) => Some(item.attrs.as_mut()), - Self::Impl(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - Self::Mod(item) => Some(item.attrs.as_mut()), - Self::Static(item) => Some(item.attrs.as_mut()), - Self::Struct(item) => Some(item.attrs.as_mut()), - Self::Trait(item) => Some(item.attrs.as_mut()), - Self::TraitAlias(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Union(item) => Some(item.attrs.as_mut()), - Self::Use(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Enum(item) => Some(item.attrs.as_mut()), + Self::ExternCrate(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::ForeignMod(item) => Some(item.attrs.as_mut()), + Self::Impl(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + Self::Mod(item) => Some(item.attrs.as_mut()), + Self::Static(item) => Some(item.attrs.as_mut()), + Self::Struct(item) => Some(item.attrs.as_mut()), + Self::Trait(item) => Some(item.attrs.as_mut()), + Self::TraitAlias(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Union(item) => Some(item.attrs.as_mut()), + Self::Use(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for syn::TraitItem { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for Vec { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(self) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(self) + } } impl MutItemAttrs for syn::ItemMod { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ImplItemFn { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ItemType { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } /// Parse for `()` struct Unit; impl syn::parse::Parse for Unit { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - if !content.is_empty() { - let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; - return Err(syn::Error::new(content.span(), msg)); - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + if !content.is_empty() { + let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; + return Err(syn::Error::new(content.span(), msg)) + } + Ok(Self) + } } /// Parse for `'static` struct StaticLifetime; impl syn::parse::Parse for StaticLifetime { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lifetime = input.parse::()?; - if lifetime.ident != "static" { - let msg = "unexpected tokens, expected `static`"; - return Err(syn::Error::new(lifetime.ident.span(), msg)); - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lifetime = input.parse::()?; + if lifetime.ident != "static" { + let msg = "unexpected tokens, expected `static`"; + return Err(syn::Error::new(lifetime.ident.span(), msg)) + } + Ok(Self) + } } /// Check the syntax: `I: 'static = ()` @@ -199,28 +187,28 @@ impl syn::parse::Parse for StaticLifetime { /// /// return the instance if found. pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result<()> { - let expected = "expected `I: 'static = ()`"; - pub struct CheckTraitDefGenerics; - impl syn::parse::Parse for CheckTraitDefGenerics { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - syn::parse2::(gen.params.to_token_stream()).map_err(|e| { - let msg = format!("Invalid generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })?; - - Ok(()) + let expected = "expected `I: 'static = ()`"; + pub struct CheckTraitDefGenerics; + impl syn::parse::Parse for CheckTraitDefGenerics { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + syn::parse2::(gen.params.to_token_stream()).map_err(|e| { + let msg = format!("Invalid generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })?; + + Ok(()) } /// Check the syntax: @@ -231,41 +219,38 @@ pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn /// /// return the instance if found. pub fn check_type_def_gen_no_bounds( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T, I = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - has_instance: false, - span: input.span(), - }; - - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `T` or `T, I = ()`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { has_instance: false, span: input.span() }; + + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -279,79 +264,76 @@ pub fn check_type_def_gen_no_bounds( /// /// return some instance usage if there is some generic, or none otherwise. pub fn check_type_def_optional_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result> { - let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)); - } - - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))); - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))); - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else { - Err(lookahead.error()) - } - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)) + } + + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else { + Err(lookahead.error()) + } + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the syntax: @@ -360,39 +342,36 @@ pub fn check_type_def_optional_gen( /// /// return the instance if found. pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result { - let expected = "expected `Pallet` or `Pallet`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid pallet struct: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `Pallet` or `Pallet`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid pallet struct: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the generic is: @@ -403,42 +382,39 @@ pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result syn::Result { - let expected = "expected `impl` or `impl, I: 'static>`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `impl` or `impl, I: 'static>`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -451,73 +427,70 @@ pub fn check_impl_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Resu /// /// return the instance if found. pub fn check_type_def_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)); - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)); - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else { - Err(lookahead.error()) - } - } - } - - let mut i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - // Span can be call_site if generic is empty. Thus we replace it. - i.span = span; - - Ok(i) + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)) + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else { + Err(lookahead.error()) + } + } + } + + let mut i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + // Span can be call_site if generic is empty. Thus we replace it. + i.span = span; + + Ok(i) } /// Check the syntax: @@ -528,43 +501,40 @@ pub fn check_type_def_gen( /// return the instance if found for `GenesisBuild` /// return None for BuildGenesisConfig pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result> { - let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - if input.peek(keyword::GenesisBuild) { - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - Ok(Self(Some(instance_usage))) - } else { - input.parse::()?; - Ok(Self(None)) - } - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid genesis builder: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + if input.peek(keyword::GenesisBuild) { + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + return Ok(Self(Some(instance_usage))) + } else { + input.parse::()?; + return Ok(Self(None)) + } + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid genesis builder: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -576,89 +546,87 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result syn::Result> { - let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)); - } - - input.parse::()?; - input.parse::()?; - input.parse::()?; - - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))); - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)) + } + + input.parse::()?; + input.parse::()?; + input.parse::()?; + + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))) + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the keyword `DispatchResultWithPostInfo` or `DispatchResult`. pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { - pub struct Checker; - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keyword::DispatchResultWithPostInfo) { - input.parse::()?; - Ok(Self) - } else if lookahead.peek(keyword::DispatchResult) { - input.parse::()?; - Ok(Self) - } else { - Err(lookahead.error()) - } - } - } - - syn::parse2::(type_.to_token_stream()).map(|_| ()) + pub struct Checker; + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keyword::DispatchResultWithPostInfo) { + input.parse::()?; + Ok(Self) + } else if lookahead.peek(keyword::DispatchResult) { + input.parse::()?; + Ok(Self) + } else { + Err(lookahead.error()) + } + } + } + + syn::parse2::(type_.to_token_stream()).map(|_| ()) } pub(crate) fn two128_str(s: &str) -> TokenStream { - bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes())) + bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) } pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream { - let bytes = bytes.into_iter(); + let bytes = bytes.into_iter(); - quote!( - [ #( #bytes ),* ] - ) + quote!( + [ #( #bytes ),* ] + ) + .into() } diff --git a/support/procedural-fork/src/pallet/parse/hooks.rs b/support/procedural-fork/src/pallet/parse/hooks.rs index 1cf5c72cc..37d7d22f4 100644 --- a/support/procedural-fork/src/pallet/parse/hooks.rs +++ b/support/procedural-fork/src/pallet/parse/hooks.rs @@ -20,67 +20,67 @@ use syn::spanned::Spanned; /// Implementation of the pallet hooks. pub struct HooksDef { - /// The index of item in pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::hooks attribute. - pub attr_span: proc_macro2::Span, - /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. - pub has_runtime_upgrade: bool, + /// The index of item in pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::hooks attribute. + pub attr_span: proc_macro2::Span, + /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. + pub has_runtime_upgrade: bool, } impl HooksDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::hooks, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::hooks, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ for Pallet<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { - let msg = format!( - "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ + if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { + let msg = format!( + "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ , you can import from `frame_support::pallet_prelude`", - quote::quote!(#item_trait) - ); + quote::quote!(#item_trait) + ); - return Err(syn::Error::new(item_trait.span(), msg)); - } + return Err(syn::Error::new(item_trait.span(), msg)) + } - let has_runtime_upgrade = item.items.iter().any(|i| match i { - syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", - _ => false, - }); + let has_runtime_upgrade = item.items.iter().any(|i| match i { + syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", + _ => false, + }); - Ok(Self { - attr_span, - index, - instances, - has_runtime_upgrade, - where_clause: item.generics.where_clause.clone(), - }) - } + Ok(Self { + attr_span, + index, + instances, + has_runtime_upgrade, + where_clause: item.generics.where_clause.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/inherent.rs b/support/procedural-fork/src/pallet/parse/inherent.rs index 4eb04e914..d8641691a 100644 --- a/support/procedural-fork/src/pallet/parse/inherent.rs +++ b/support/procedural-fork/src/pallet/parse/inherent.rs @@ -20,41 +20,41 @@ use syn::spanned::Spanned; /// The definition of the pallet inherent implementation. pub struct InherentDef { - /// The index of inherent item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of inherent item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl InherentDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::inherent, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ProvideInherent" { - let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; - return Err(syn::Error::new(last.span(), msg)); - } - } else { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); - } - - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; - - Ok(InherentDef { index, instances }) - } + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::inherent, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ProvideInherent" { + let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; + return Err(syn::Error::new(last.span(), msg)) + } + } else { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)) + } + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + Ok(InherentDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index 53e65fd12..6e1277461 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -47,68 +47,68 @@ use syn::spanned::Spanned; /// Parsed definition of a pallet. pub struct Def { - /// The module items. - /// (their order must not be modified because they are registered in individual definitions). - pub item: syn::ItemMod, - pub config: config::ConfigDef, - pub pallet_struct: pallet_struct::PalletStructDef, - pub hooks: Option, - pub call: Option, - pub tasks: Option, - pub task_enum: Option, - pub storages: Vec, - pub error: Option, - pub event: Option, - pub origin: Option, - pub inherent: Option, - pub genesis_config: Option, - pub genesis_build: Option, - pub validate_unsigned: Option, - pub extra_constants: Option, - pub composites: Vec, - pub type_values: Vec, - pub frame_system: syn::Path, - pub frame_support: syn::Path, - pub dev_mode: bool, + /// The module items. + /// (their order must not be modified because they are registered in individual definitions). + pub item: syn::ItemMod, + pub config: config::ConfigDef, + pub pallet_struct: pallet_struct::PalletStructDef, + pub hooks: Option, + pub call: Option, + pub tasks: Option, + pub task_enum: Option, + pub storages: Vec, + pub error: Option, + pub event: Option, + pub origin: Option, + pub inherent: Option, + pub genesis_config: Option, + pub genesis_build: Option, + pub validate_unsigned: Option, + pub extra_constants: Option, + pub composites: Vec, + pub type_values: Vec, + pub frame_system: syn::Path, + pub frame_support: syn::Path, + pub dev_mode: bool, } impl Def { - pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid pallet definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; - - let mut config = None; - let mut pallet_struct = None; - let mut hooks = None; - let mut call = None; - let mut tasks = None; - let mut task_enum = None; - let mut error = None; - let mut event = None; - let mut origin = None; - let mut inherent = None; - let mut genesis_config = None; - let mut genesis_build = None; - let mut validate_unsigned = None; - let mut extra_constants = None; - let mut storages = vec![]; - let mut type_values = vec![]; - let mut composites: Vec = vec![]; - - for (index, item) in items.iter_mut().enumerate() { - let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; - - match pallet_attr { + pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid pallet definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; + + let mut config = None; + let mut pallet_struct = None; + let mut hooks = None; + let mut call = None; + let mut tasks = None; + let mut task_enum = None; + let mut error = None; + let mut event = None; + let mut origin = None; + let mut inherent = None; + let mut genesis_config = None; + let mut genesis_build = None; + let mut validate_unsigned = None; + let mut extra_constants = None; + let mut storages = vec![]; + let mut type_values = vec![]; + let mut composites: Vec = vec![]; + + for (index, item) in items.iter_mut().enumerate() { + let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; + + match pallet_attr { Some(PalletAttr::Config(span, with_default)) if config.is_none() => config = Some(config::ConfigDef::try_from( &frame_system, @@ -212,594 +212,538 @@ impl Def { }, None => (), } - } + } - if genesis_config.is_some() != genesis_build.is_some() { - let msg = format!( - "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ + if genesis_config.is_some() != genesis_build.is_some() { + let msg = format!( + "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ either both used or both not used, instead genesis_config is {} and genesis_build \ is {}", - genesis_config.as_ref().map_or("unused", |_| "used"), - genesis_build.as_ref().map_or("unused", |_| "used"), - ); - return Err(syn::Error::new(item_span, msg)); - } - - Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; - - let def = Def { - item, - config: config - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, - pallet_struct: pallet_struct - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, - hooks, - call, - tasks, - task_enum, - extra_constants, - genesis_config, - genesis_build, - validate_unsigned, - error, - event, - origin, - inherent, - storages, - composites, - type_values, - frame_system, - frame_support, - dev_mode, - }; - - def.check_instance_usage()?; - def.check_event_usage()?; - - Ok(def) - } - - /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. - fn resolve_tasks( - item_span: &proc_macro2::Span, - tasks: &mut Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - // fallback for manual (without macros) definition of tasks impl - Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; - - // fallback for manual (without macros) definition of task enum - Self::resolve_manual_task_enum(tasks, task_enum, items)?; - - // ensure that if `task_enum` is specified, `tasks` is also specified - match (&task_enum, &tasks) { - (Some(_), None) => { - return Err(syn::Error::new( - *item_span, - "Missing `#[pallet::tasks_experimental]` impl", - )) - } - (None, Some(tasks)) => { - if tasks.tasks_attr.is_none() { - return Err(syn::Error::new( + genesis_config.as_ref().map_or("unused", |_| "used"), + genesis_build.as_ref().map_or("unused", |_| "used"), + ); + return Err(syn::Error::new(item_span, msg)) + } + + Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; + + let def = Def { + item, + config: config + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, + pallet_struct: pallet_struct + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, + hooks, + call, + tasks, + task_enum, + extra_constants, + genesis_config, + genesis_build, + validate_unsigned, + error, + event, + origin, + inherent, + storages, + composites, + type_values, + frame_system, + frame_support, + dev_mode, + }; + + def.check_instance_usage()?; + def.check_event_usage()?; + + Ok(def) + } + + /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. + fn resolve_tasks( + item_span: &proc_macro2::Span, + tasks: &mut Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + // fallback for manual (without macros) definition of tasks impl + Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; + + // fallback for manual (without macros) definition of task enum + Self::resolve_manual_task_enum(tasks, task_enum, items)?; + + // ensure that if `task_enum` is specified, `tasks` is also specified + match (&task_enum, &tasks) { + (Some(_), None) => + return Err(syn::Error::new( + *item_span, + "Missing `#[pallet::tasks_experimental]` impl", + )), + (None, Some(tasks)) => + if tasks.tasks_attr.is_none() { + return Err(syn::Error::new( tasks.item_impl.impl_token.span(), "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ task enum has been omitted", - )); - } - } - _ => (), - } - - Ok(()) - } - - /// Tries to locate task enum based on the tasks impl target if attribute is not specified - /// but impl is present. If one is found, `task_enum` is set appropriately. - fn resolve_manual_task_enum( - tasks: &Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - let (None, Some(tasks)) = (&task_enum, &tasks) else { - return Ok(()); - }; - let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { - return Ok(()); - }; - let type_path = type_path.path.segments.iter().collect::>(); - let (Some(seg), None) = (type_path.first(), type_path.get(1)) else { - return Ok(()); - }; - let mut result = None; - for item in items { - let syn::Item::Enum(item_enum) = item else { - continue; - }; - if item_enum.ident == seg.ident { - result = Some(syn::parse2::( - item_enum.to_token_stream(), - )?); - // replace item with a no-op because it will be handled by the expansion of - // `task_enum`. We use a no-op instead of simply removing it from the vec - // so that any indices collected by `Def::try_from` remain accurate - *item = syn::Item::Verbatim(quote::quote!()); - break; - } - } - *task_enum = result; - Ok(()) - } - - /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is - /// `Task`) in the event that one has not been found already via the attribute macro - pub fn resolve_manual_tasks_impl( - tasks: &mut Option, - task_enum: &Option, - items: &Vec, - ) -> syn::Result<()> { - let None = tasks else { return Ok(()) }; - let mut result = None; - for item in items { - let syn::Item::Impl(item_impl) = item else { - continue; - }; - let Some((_, path, _)) = &item_impl.trait_ else { - continue; - }; - let Some(trait_last_seg) = path.segments.last() else { - continue; - }; - let syn::Type::Path(target_path) = &*item_impl.self_ty else { - continue; - }; - let target_path = target_path.path.segments.iter().collect::>(); - let (Some(target_ident), None) = (target_path.first(), target_path.get(1)) else { - continue; - }; - let matches_task_enum = match task_enum { - Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, - None => true, - }; - if trait_last_seg.ident == "Task" && matches_task_enum { - result = Some(syn::parse2::(item_impl.to_token_stream())?); - break; - } - } - *tasks = result; - Ok(()) - } - - /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared - /// and trait defines type RuntimeEvent, or not declared and no trait associated type. - fn check_event_usage(&self) -> syn::Result<()> { - match (self.config.has_event_type, self.event.is_some()) { - (true, false) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ + )) + } else { + }, + _ => (), + } + + Ok(()) + } + + /// Tries to locate task enum based on the tasks impl target if attribute is not specified + /// but impl is present. If one is found, `task_enum` is set appropriately. + fn resolve_manual_task_enum( + tasks: &Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + let (None, Some(tasks)) = (&task_enum, &tasks) else { return Ok(()) }; + let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { return Ok(()) }; + let type_path = type_path.path.segments.iter().collect::>(); + let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Enum(item_enum) = item else { continue }; + if item_enum.ident == seg.ident { + result = Some(syn::parse2::(item_enum.to_token_stream())?); + // replace item with a no-op because it will be handled by the expansion of + // `task_enum`. We use a no-op instead of simply removing it from the vec + // so that any indices collected by `Def::try_from` remain accurate + *item = syn::Item::Verbatim(quote::quote!()); + break + } + } + *task_enum = result; + Ok(()) + } + + /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is + /// `Task`) in the event that one has not been found already via the attribute macro + pub fn resolve_manual_tasks_impl( + tasks: &mut Option, + task_enum: &Option, + items: &Vec, + ) -> syn::Result<()> { + let None = tasks else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Impl(item_impl) = item else { continue }; + let Some((_, path, _)) = &item_impl.trait_ else { continue }; + let Some(trait_last_seg) = path.segments.last() else { continue }; + let syn::Type::Path(target_path) = &*item_impl.self_ty else { continue }; + let target_path = target_path.path.segments.iter().collect::>(); + let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + continue + }; + let matches_task_enum = match task_enum { + Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, + None => true, + }; + if trait_last_seg.ident == "Task" && matches_task_enum { + result = Some(syn::parse2::(item_impl.to_token_stream())?); + break + } + } + *tasks = result; + Ok(()) + } + + /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared + /// and trait defines type RuntimeEvent, or not declared and no trait associated type. + fn check_event_usage(&self) -> syn::Result<()> { + match (self.config.has_event_type, self.event.is_some()) { + (true, false) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ but enum `Event` is not declared (i.e. no use of `#[pallet::event]`). \ Note that type `RuntimeEvent` in trait is reserved to work alongside pallet event."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - } - (false, true) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + }, + (false, true) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ `RuntimeEvent`, but enum `Event` is declared (in use of `#[pallet::event]`). \ An RuntimeEvent associated type must be declare on trait `Config`."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - } - _ => Ok(()), - } - } - - /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with - /// instance iff it is defined with instance. - fn check_instance_usage(&self) -> syn::Result<()> { - let mut instances = vec![]; - instances.extend_from_slice(&self.pallet_struct.instances[..]); - instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); - if let Some(call) = &self.call { - instances.extend_from_slice(&call.instances[..]); - } - if let Some(hooks) = &self.hooks { - instances.extend_from_slice(&hooks.instances[..]); - } - if let Some(event) = &self.event { - instances.extend_from_slice(&event.instances[..]); - } - if let Some(error) = &self.error { - instances.extend_from_slice(&error.instances[..]); - } - if let Some(inherent) = &self.inherent { - instances.extend_from_slice(&inherent.instances[..]); - } - if let Some(origin) = &self.origin { - instances.extend_from_slice(&origin.instances[..]); - } - if let Some(genesis_config) = &self.genesis_config { - instances.extend_from_slice(&genesis_config.instances[..]); - } - if let Some(genesis_build) = &self.genesis_build { - if let Some(i) = genesis_build.instances.as_ref() { - instances.extend_from_slice(i) - } - } - if let Some(extra_constants) = &self.extra_constants { - instances.extend_from_slice(&extra_constants.instances[..]); - } - - let mut errors = instances.into_iter().filter_map(|instances| { - if instances.has_instance == self.config.has_instance { - return None; - } - let msg = if self.config.has_instance { - "Invalid generic declaration, trait is defined with instance but generic use none" - } else { - "Invalid generic declaration, trait is defined without instance but generic use \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + }, + _ => Ok(()), + } + } + + /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with + /// instance iff it is defined with instance. + fn check_instance_usage(&self) -> syn::Result<()> { + let mut instances = vec![]; + instances.extend_from_slice(&self.pallet_struct.instances[..]); + instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); + if let Some(call) = &self.call { + instances.extend_from_slice(&call.instances[..]); + } + if let Some(hooks) = &self.hooks { + instances.extend_from_slice(&hooks.instances[..]); + } + if let Some(event) = &self.event { + instances.extend_from_slice(&event.instances[..]); + } + if let Some(error) = &self.error { + instances.extend_from_slice(&error.instances[..]); + } + if let Some(inherent) = &self.inherent { + instances.extend_from_slice(&inherent.instances[..]); + } + if let Some(origin) = &self.origin { + instances.extend_from_slice(&origin.instances[..]); + } + if let Some(genesis_config) = &self.genesis_config { + instances.extend_from_slice(&genesis_config.instances[..]); + } + if let Some(genesis_build) = &self.genesis_build { + genesis_build.instances.as_ref().map(|i| instances.extend_from_slice(&i)); + } + if let Some(extra_constants) = &self.extra_constants { + instances.extend_from_slice(&extra_constants.instances[..]); + } + + let mut errors = instances.into_iter().filter_map(|instances| { + if instances.has_instance == self.config.has_instance { + return None + } + let msg = if self.config.has_instance { + "Invalid generic declaration, trait is defined with instance but generic use none" + } else { + "Invalid generic declaration, trait is defined without instance but generic use \ some" - }; - Some(syn::Error::new(instances.span, msg)) - }); - - if let Some(mut first_error) = errors.next() { - for error in errors { - first_error.combine(error) - } - Err(first_error) - } else { - Ok(()) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static` - pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static = ()` - pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static = ()) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I = ()` - pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I = ()) - } else { - quote::quote_spanned!(span => T) - } - } - - /// Depending on if pallet is instantiable: - /// * either `` - /// * or `` - /// to be used when using pallet trait `Config` - pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => ) - } else { - quote::quote_spanned!(span => ) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I` - pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I) - } else { - quote::quote_spanned!(span => T) - } - } + }; + Some(syn::Error::new(instances.span, msg)) + }); + + if let Some(mut first_error) = errors.next() { + for error in errors { + first_error.combine(error) + } + Err(first_error) + } else { + Ok(()) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static` + pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static = ()` + pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static = ()) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I = ()` + pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I = ()) + } else { + quote::quote_spanned!(span => T) + } + } + + /// Depending on if pallet is instantiable: + /// * either `` + /// * or `` + /// to be used when using pallet trait `Config` + pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => ) + } else { + quote::quote_spanned!(span => ) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I` + pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I) + } else { + quote::quote_spanned!(span => T) + } + } } /// Some generic kind for type which can be not generic, or generic over config, /// or generic over config and instance, but not generic only over instance. pub enum GenericKind { - None, - Config, - ConfigAndInstance, + None, + Config, + ConfigAndInstance, } impl GenericKind { - /// Return Err if it is only generics over instance but not over config. - pub fn from_gens(has_config: bool, has_instance: bool) -> Result { - match (has_config, has_instance) { - (false, false) => Ok(GenericKind::None), - (true, false) => Ok(GenericKind::Config), - (true, true) => Ok(GenericKind::ConfigAndInstance), - (false, true) => Err(()), - } - } - - /// Return the generic to be used when using the type. - /// - /// Depending on its definition it can be: ``, `T` or `T, I` - pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T), - GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), - } - } - - /// Return the generic to be used in `impl<..>` when implementing on the type. - pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T: Config), - GenericKind::ConfigAndInstance => { - quote::quote_spanned!(span => T: Config, I: 'static) - } - } - } - - /// Return whereas the type has some generic. - pub fn is_generic(&self) -> bool { - match self { - GenericKind::None => false, - GenericKind::Config | GenericKind::ConfigAndInstance => true, - } - } + /// Return Err if it is only generics over instance but not over config. + pub fn from_gens(has_config: bool, has_instance: bool) -> Result { + match (has_config, has_instance) { + (false, false) => Ok(GenericKind::None), + (true, false) => Ok(GenericKind::Config), + (true, true) => Ok(GenericKind::ConfigAndInstance), + (false, true) => Err(()), + } + } + + /// Return the generic to be used when using the type. + /// + /// Depending on its definition it can be: ``, `T` or `T, I` + pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T), + GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), + } + } + + /// Return the generic to be used in `impl<..>` when implementing on the type. + pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T: Config), + GenericKind::ConfigAndInstance => { + quote::quote_spanned!(span => T: Config, I: 'static) + }, + } + } + + /// Return whereas the type has some generic. + pub fn is_generic(&self) -> bool { + match self { + GenericKind::None => false, + GenericKind::Config | GenericKind::ConfigAndInstance => true, + } + } } /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(origin); - syn::custom_keyword!(call); - syn::custom_keyword!(tasks_experimental); - syn::custom_keyword!(task_enum); - syn::custom_keyword!(task_list); - syn::custom_keyword!(task_condition); - syn::custom_keyword!(task_index); - syn::custom_keyword!(weight); - syn::custom_keyword!(event); - syn::custom_keyword!(config); - syn::custom_keyword!(with_default); - syn::custom_keyword!(hooks); - syn::custom_keyword!(inherent); - syn::custom_keyword!(error); - syn::custom_keyword!(storage); - syn::custom_keyword!(genesis_build); - syn::custom_keyword!(genesis_config); - syn::custom_keyword!(validate_unsigned); - syn::custom_keyword!(type_value); - syn::custom_keyword!(pallet); - syn::custom_keyword!(extra_constants); - syn::custom_keyword!(composite_enum); + syn::custom_keyword!(origin); + syn::custom_keyword!(call); + syn::custom_keyword!(tasks_experimental); + syn::custom_keyword!(task_enum); + syn::custom_keyword!(task_list); + syn::custom_keyword!(task_condition); + syn::custom_keyword!(task_index); + syn::custom_keyword!(weight); + syn::custom_keyword!(event); + syn::custom_keyword!(config); + syn::custom_keyword!(with_default); + syn::custom_keyword!(hooks); + syn::custom_keyword!(inherent); + syn::custom_keyword!(error); + syn::custom_keyword!(storage); + syn::custom_keyword!(genesis_build); + syn::custom_keyword!(genesis_config); + syn::custom_keyword!(validate_unsigned); + syn::custom_keyword!(type_value); + syn::custom_keyword!(pallet); + syn::custom_keyword!(extra_constants); + syn::custom_keyword!(composite_enum); } /// Parse attributes for item in pallet module /// syntax must be `pallet::` (e.g. `#[pallet::config]`) enum PalletAttr { - Config(proc_macro2::Span, bool), - Pallet(proc_macro2::Span), - Hooks(proc_macro2::Span), - /// A `#[pallet::call]` with optional attributes to specialize the behaviour. - /// - /// # Attributes - /// - /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or - /// `#[pallet::call(attr(…))]`. The possible attributes are: - /// - /// ## `weight` - /// - /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one - /// argument that is expected to be an implementation of the `WeightInfo` or something that - /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. - /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the - /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: - /// - /// ```ignore - /// #[pallet::call] - /// impl Pallet { - /// #[pallet::weight(T::WeightInfo::create())] - /// pub fn create( - /// ``` - /// you can now omit it on the call itself, if the name of the weigh function matches the call: - /// - /// ```ignore - /// #[pallet::call(weight = ::WeightInfo)] - /// impl Pallet { - /// pub fn create( - /// ``` - /// - /// It is possible to use this syntax together with instantiated pallets by using `Config` - /// instead. - /// - /// ### Dev Mode - /// - /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation - /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used - /// instead of the zero weight. So to say: it works together with `dev_mode`. - RuntimeCall(Option, proc_macro2::Span), - Error(proc_macro2::Span), - Tasks(proc_macro2::Span), - TaskList(proc_macro2::Span), - TaskCondition(proc_macro2::Span), - TaskIndex(proc_macro2::Span), - RuntimeTask(proc_macro2::Span), - RuntimeEvent(proc_macro2::Span), - RuntimeOrigin(proc_macro2::Span), - Inherent(proc_macro2::Span), - Storage(proc_macro2::Span), - GenesisConfig(proc_macro2::Span), - GenesisBuild(proc_macro2::Span), - ValidateUnsigned(proc_macro2::Span), - TypeValue(proc_macro2::Span), - ExtraConstants(proc_macro2::Span), - Composite(proc_macro2::Span), + Config(proc_macro2::Span, bool), + Pallet(proc_macro2::Span), + Hooks(proc_macro2::Span), + /// A `#[pallet::call]` with optional attributes to specialize the behaviour. + /// + /// # Attributes + /// + /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or + /// `#[pallet::call(attr(…))]`. The possible attributes are: + /// + /// ## `weight` + /// + /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one + /// argument that is expected to be an implementation of the `WeightInfo` or something that + /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. + /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the + /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: + /// + /// ```ignore + /// #[pallet::call] + /// impl Pallet { + /// #[pallet::weight(T::WeightInfo::create())] + /// pub fn create( + /// ``` + /// you can now omit it on the call itself, if the name of the weigh function matches the call: + /// + /// ```ignore + /// #[pallet::call(weight = ::WeightInfo)] + /// impl Pallet { + /// pub fn create( + /// ``` + /// + /// It is possible to use this syntax together with instantiated pallets by using `Config` + /// instead. + /// + /// ### Dev Mode + /// + /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation + /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used + /// instead of the zero weight. So to say: it works together with `dev_mode`. + RuntimeCall(Option, proc_macro2::Span), + Error(proc_macro2::Span), + Tasks(proc_macro2::Span), + TaskList(proc_macro2::Span), + TaskCondition(proc_macro2::Span), + TaskIndex(proc_macro2::Span), + RuntimeTask(proc_macro2::Span), + RuntimeEvent(proc_macro2::Span), + RuntimeOrigin(proc_macro2::Span), + Inherent(proc_macro2::Span), + Storage(proc_macro2::Span), + GenesisConfig(proc_macro2::Span), + GenesisBuild(proc_macro2::Span), + ValidateUnsigned(proc_macro2::Span), + TypeValue(proc_macro2::Span), + ExtraConstants(proc_macro2::Span), + Composite(proc_macro2::Span), } impl PalletAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Config(span, _) => *span, - Self::Pallet(span) => *span, - Self::Hooks(span) => *span, - Self::Tasks(span) => *span, - Self::TaskCondition(span) => *span, - Self::TaskIndex(span) => *span, - Self::TaskList(span) => *span, - Self::Error(span) => *span, - Self::RuntimeTask(span) => *span, - Self::RuntimeCall(_, span) => *span, - Self::RuntimeEvent(span) => *span, - Self::RuntimeOrigin(span) => *span, - Self::Inherent(span) => *span, - Self::Storage(span) => *span, - Self::GenesisConfig(span) => *span, - Self::GenesisBuild(span) => *span, - Self::ValidateUnsigned(span) => *span, - Self::TypeValue(span) => *span, - Self::ExtraConstants(span) => *span, - Self::Composite(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Config(span, _) => *span, + Self::Pallet(span) => *span, + Self::Hooks(span) => *span, + Self::Tasks(span) => *span, + Self::TaskCondition(span) => *span, + Self::TaskIndex(span) => *span, + Self::TaskList(span) => *span, + Self::Error(span) => *span, + Self::RuntimeTask(span) => *span, + Self::RuntimeCall(_, span) => *span, + Self::RuntimeEvent(span) => *span, + Self::RuntimeOrigin(span) => *span, + Self::Inherent(span) => *span, + Self::Storage(span) => *span, + Self::GenesisConfig(span) => *span, + Self::GenesisBuild(span) => *span, + Self::ValidateUnsigned(span) => *span, + Self::TypeValue(span) => *span, + Self::ExtraConstants(span) => *span, + Self::Composite(span) => *span, + } + } } impl syn::parse::Parse for PalletAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::config) { - let span = content.parse::()?.span(); - let with_default = content.peek(syn::token::Paren); - if with_default { - let inside_config; - let _paren = syn::parenthesized!(inside_config in content); - inside_config.parse::()?; - } - Ok(PalletAttr::Config(span, with_default)) - } else if lookahead.peek(keyword::pallet) { - Ok(PalletAttr::Pallet( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::hooks) { - Ok(PalletAttr::Hooks(content.parse::()?.span())) - } else if lookahead.peek(keyword::call) { - let span = content.parse::().expect("peeked").span(); - let attr = match content.is_empty() { - true => None, - false => Some(InheritedCallWeightAttr::parse(&content)?), - }; - Ok(PalletAttr::RuntimeCall(attr, span)) - } else if lookahead.peek(keyword::tasks_experimental) { - Ok(PalletAttr::Tasks( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::task_enum) { - Ok(PalletAttr::RuntimeTask( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::task_condition) { - Ok(PalletAttr::TaskCondition( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::task_index) { - Ok(PalletAttr::TaskIndex( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::task_list) { - Ok(PalletAttr::TaskList( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::error) { - Ok(PalletAttr::Error(content.parse::()?.span())) - } else if lookahead.peek(keyword::event) { - Ok(PalletAttr::RuntimeEvent( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::origin) { - Ok(PalletAttr::RuntimeOrigin( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::inherent) { - Ok(PalletAttr::Inherent( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::storage) { - Ok(PalletAttr::Storage( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::genesis_config) { - Ok(PalletAttr::GenesisConfig( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::genesis_build) { - Ok(PalletAttr::GenesisBuild( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::validate_unsigned) { - Ok(PalletAttr::ValidateUnsigned( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::type_value) { - Ok(PalletAttr::TypeValue( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::extra_constants) { - Ok(PalletAttr::ExtraConstants( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::composite_enum) { - Ok(PalletAttr::Composite( - content.parse::()?.span(), - )) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::config) { + let span = content.parse::()?.span(); + let with_default = content.peek(syn::token::Paren); + if with_default { + let inside_config; + let _paren = syn::parenthesized!(inside_config in content); + inside_config.parse::()?; + } + Ok(PalletAttr::Config(span, with_default)) + } else if lookahead.peek(keyword::pallet) { + Ok(PalletAttr::Pallet(content.parse::()?.span())) + } else if lookahead.peek(keyword::hooks) { + Ok(PalletAttr::Hooks(content.parse::()?.span())) + } else if lookahead.peek(keyword::call) { + let span = content.parse::().expect("peeked").span(); + let attr = match content.is_empty() { + true => None, + false => Some(InheritedCallWeightAttr::parse(&content)?), + }; + Ok(PalletAttr::RuntimeCall(attr, span)) + } else if lookahead.peek(keyword::tasks_experimental) { + Ok(PalletAttr::Tasks(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_enum) { + Ok(PalletAttr::RuntimeTask(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_condition) { + Ok(PalletAttr::TaskCondition(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_index) { + Ok(PalletAttr::TaskIndex(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_list) { + Ok(PalletAttr::TaskList(content.parse::()?.span())) + } else if lookahead.peek(keyword::error) { + Ok(PalletAttr::Error(content.parse::()?.span())) + } else if lookahead.peek(keyword::event) { + Ok(PalletAttr::RuntimeEvent(content.parse::()?.span())) + } else if lookahead.peek(keyword::origin) { + Ok(PalletAttr::RuntimeOrigin(content.parse::()?.span())) + } else if lookahead.peek(keyword::inherent) { + Ok(PalletAttr::Inherent(content.parse::()?.span())) + } else if lookahead.peek(keyword::storage) { + Ok(PalletAttr::Storage(content.parse::()?.span())) + } else if lookahead.peek(keyword::genesis_config) { + Ok(PalletAttr::GenesisConfig(content.parse::()?.span())) + } else if lookahead.peek(keyword::genesis_build) { + Ok(PalletAttr::GenesisBuild(content.parse::()?.span())) + } else if lookahead.peek(keyword::validate_unsigned) { + Ok(PalletAttr::ValidateUnsigned(content.parse::()?.span())) + } else if lookahead.peek(keyword::type_value) { + Ok(PalletAttr::TypeValue(content.parse::()?.span())) + } else if lookahead.peek(keyword::extra_constants) { + Ok(PalletAttr::ExtraConstants(content.parse::()?.span())) + } else if lookahead.peek(keyword::composite_enum) { + Ok(PalletAttr::Composite(content.parse::()?.span())) + } else { + Err(lookahead.error()) + } + } } /// The optional weight annotation on a `#[pallet::call]` like `#[pallet::call(weight($type))]`. #[derive(Clone)] pub struct InheritedCallWeightAttr { - pub typename: syn::Type, - pub span: proc_macro2::Span, + pub typename: syn::Type, + pub span: proc_macro2::Span, } impl syn::parse::Parse for InheritedCallWeightAttr { - // Parses `(weight($type))` or `(weight = $type)`. - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - content.parse::()?; - let lookahead = content.lookahead1(); - - let buffer = if lookahead.peek(syn::token::Paren) { - let inner; - syn::parenthesized!(inner in content); - inner - } else if lookahead.peek(syn::Token![=]) { - content.parse::().expect("peeked"); - content - } else { - return Err(lookahead.error()); - }; - - Ok(Self { - typename: buffer.parse()?, - span: input.span(), - }) - } + // Parses `(weight($type))` or `(weight = $type)`. + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + content.parse::()?; + let lookahead = content.lookahead1(); + + let buffer = if lookahead.peek(syn::token::Paren) { + let inner; + syn::parenthesized!(inner in content); + inner + } else if lookahead.peek(syn::Token![=]) { + content.parse::().expect("peeked"); + content + } else { + return Err(lookahead.error()) + }; + + Ok(Self { typename: buffer.parse()?, span: input.span() }) + } } diff --git a/support/procedural-fork/src/pallet/parse/origin.rs b/support/procedural-fork/src/pallet/parse/origin.rs index 2dd84c40d..76e2a8841 100644 --- a/support/procedural-fork/src/pallet/parse/origin.rs +++ b/support/procedural-fork/src/pallet/parse/origin.rs @@ -25,56 +25,48 @@ use syn::spanned::Spanned; /// * `struct Origin` /// * `enum Origin` pub struct OriginDef { - /// The index of item in pallet module. - pub index: usize, - pub has_instance: bool, - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of item in pallet module. + pub index: usize, + pub has_instance: bool, + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl OriginDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::origin, expected enum or struct or type"; - return Err(syn::Error::new(item.span(), msg)); - } - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::origin, expected enum or struct or type"; + return Err(syn::Error::new(item.span(), msg)) + }, + }; - let has_instance = generics.params.len() == 2; - let is_generic = !generics.params.is_empty(); + let has_instance = generics.params.len() == 2; + let is_generic = !generics.params.is_empty(); - let mut instances = vec![]; - if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { - instances.push(u); - } else { - // construct_runtime only allow generic event for instantiable pallet. - instances.push(helper::InstanceUsage { - has_instance: false, - span: ident.span(), - }) - } + let mut instances = vec![]; + if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { + instances.push(u); + } else { + // construct_runtime only allow generic event for instantiable pallet. + instances.push(helper::InstanceUsage { has_instance: false, span: ident.span() }) + } - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::origin, Origin must be public"; - return Err(syn::Error::new(item_span, msg)); - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::origin, Origin must be public"; + return Err(syn::Error::new(item_span, msg)) + } - if ident != "Origin" { - let msg = "Invalid pallet::origin, ident must `Origin`"; - return Err(syn::Error::new(ident.span(), msg)); - } + if ident != "Origin" { + let msg = "Invalid pallet::origin, ident must `Origin`"; + return Err(syn::Error::new(ident.span(), msg)) + } - Ok(OriginDef { - index, - has_instance, - is_generic, - instances, - }) - } + Ok(OriginDef { index, has_instance, is_generic, instances }) + } } diff --git a/support/procedural-fork/src/pallet/parse/pallet_struct.rs b/support/procedural-fork/src/pallet/parse/pallet_struct.rs index 320cf01fa..b64576099 100644 --- a/support/procedural-fork/src/pallet/parse/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/parse/pallet_struct.rs @@ -21,137 +21,129 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(pallet); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(without_storage_info); - syn::custom_keyword!(storage_version); + syn::custom_keyword!(pallet); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(without_storage_info); + syn::custom_keyword!(storage_version); } /// Definition of the pallet pallet. pub struct PalletStructDef { - /// The index of item in pallet pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config trait. - pub instances: Vec, - /// The keyword Pallet used (contains span). - pub pallet: keyword::Pallet, - /// The span of the pallet::pallet attribute. - pub attr_span: proc_macro2::Span, - /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. - /// Contains the span of the attribute. - pub without_storage_info: Option, - /// The in-code storage version of the pallet. - pub storage_version: Option, + /// The index of item in pallet pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config trait. + pub instances: Vec, + /// The keyword Pallet used (contains span). + pub pallet: keyword::Pallet, + /// The span of the pallet::pallet attribute. + pub attr_span: proc_macro2::Span, + /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. + /// Contains the span of the attribute. + pub without_storage_info: Option, + /// The in-code storage version of the pallet. + pub storage_version: Option, } /// Parse for one variant of: /// * `#[pallet::without_storage_info]` /// * `#[pallet::storage_version(STORAGE_VERSION)]` pub enum PalletStructAttr { - WithoutStorageInfoTrait(proc_macro2::Span), - StorageVersion { - storage_version: syn::Path, - span: proc_macro2::Span, - }, + WithoutStorageInfoTrait(proc_macro2::Span), + StorageVersion { storage_version: syn::Path, span: proc_macro2::Span }, } impl PalletStructAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, + } + } } impl syn::parse::Parse for PalletStructAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::without_storage_info) { - let span = content.parse::()?.span(); - Ok(Self::WithoutStorageInfoTrait(span)) - } else if lookahead.peek(keyword::storage_version) { - let span = content.parse::()?.span(); - - let version_content; - syn::parenthesized!(version_content in content); - let storage_version = version_content.parse::()?; - - Ok(Self::StorageVersion { - storage_version, - span, - }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::without_storage_info) { + let span = content.parse::()?.span(); + Ok(Self::WithoutStorageInfoTrait(span)) + } else if lookahead.peek(keyword::storage_version) { + let span = content.parse::()?.span(); + + let version_content; + syn::parenthesized!(version_content in content); + let storage_version = version_content.parse::()?; + + Ok(Self::StorageVersion { storage_version, span }) + } else { + Err(lookahead.error()) + } + } } impl PalletStructDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid pallet::pallet, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)); - }; - - let mut without_storage_info = None; - let mut storage_version_found = None; - - let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - for attr in struct_attrs { - match attr { - PalletStructAttr::WithoutStorageInfoTrait(span) - if without_storage_info.is_none() => - { - without_storage_info = Some(span); - } - PalletStructAttr::StorageVersion { - storage_version, .. - } if storage_version_found.is_none() => { - storage_version_found = Some(storage_version); - } - attr => { - let msg = "Unexpected duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)); - } - } - } - - let pallet = syn::parse2::(item.ident.to_token_stream())?; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::pallet, Pallet must be public"; - return Err(syn::Error::new(item.span(), msg)); - } - - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; - return Err(syn::Error::new(item.generics.where_clause.span(), msg)); - } - - let instances = vec![helper::check_type_def_gen_no_bounds( - &item.generics, - item.ident.span(), - )?]; - - Ok(Self { - index, - instances, - pallet, - attr_span, - without_storage_info, - storage_version: storage_version_found, - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid pallet::pallet, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)) + }; + + let mut without_storage_info = None; + let mut storage_version_found = None; + + let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + for attr in struct_attrs { + match attr { + PalletStructAttr::WithoutStorageInfoTrait(span) + if without_storage_info.is_none() => + { + without_storage_info = Some(span); + }, + PalletStructAttr::StorageVersion { storage_version, .. } + if storage_version_found.is_none() => + { + storage_version_found = Some(storage_version); + }, + attr => { + let msg = "Unexpected duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)) + }, + } + } + + let pallet = syn::parse2::(item.ident.to_token_stream())?; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::pallet, Pallet must be public"; + return Err(syn::Error::new(item.span(), msg)) + } + + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; + return Err(syn::Error::new(item.generics.where_clause.span(), msg)) + } + + let instances = + vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; + + Ok(Self { + index, + instances, + pallet, + attr_span, + without_storage_info, + storage_version: storage_version_found, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs index dac0782bd..9d96a18b5 100644 --- a/support/procedural-fork/src/pallet/parse/storage.rs +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -23,16 +23,16 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); - syn::custom_keyword!(pallet); - syn::custom_keyword!(getter); - syn::custom_keyword!(storage_prefix); - syn::custom_keyword!(unbounded); - syn::custom_keyword!(whitelist_storage); - syn::custom_keyword!(disable_try_decode_storage); - syn::custom_keyword!(OptionQuery); - syn::custom_keyword!(ResultQuery); - syn::custom_keyword!(ValueQuery); + syn::custom_keyword!(Error); + syn::custom_keyword!(pallet); + syn::custom_keyword!(getter); + syn::custom_keyword!(storage_prefix); + syn::custom_keyword!(unbounded); + syn::custom_keyword!(whitelist_storage); + syn::custom_keyword!(disable_try_decode_storage); + syn::custom_keyword!(OptionQuery); + syn::custom_keyword!(ResultQuery); + syn::custom_keyword!(ValueQuery); } /// Parse for one of the following: @@ -42,1003 +42,906 @@ mod keyword { /// * `#[pallet::whitelist_storage] /// * `#[pallet::disable_try_decode_storage]` pub enum PalletStorageAttr { - Getter(syn::Ident, proc_macro2::Span), - StorageName(syn::LitStr, proc_macro2::Span), - Unbounded(proc_macro2::Span), - WhitelistStorage(proc_macro2::Span), - DisableTryDecodeStorage(proc_macro2::Span), + Getter(syn::Ident, proc_macro2::Span), + StorageName(syn::LitStr, proc_macro2::Span), + Unbounded(proc_macro2::Span), + WhitelistStorage(proc_macro2::Span), + DisableTryDecodeStorage(proc_macro2::Span), } impl PalletStorageAttr { - fn attr_span(&self) -> proc_macro2::Span { - match self { - Self::Getter(_, span) - | Self::StorageName(_, span) - | Self::Unbounded(span) - | Self::WhitelistStorage(span) => *span, - Self::DisableTryDecodeStorage(span) => *span, - } - } + fn attr_span(&self) -> proc_macro2::Span { + match self { + Self::Getter(_, span) | + Self::StorageName(_, span) | + Self::Unbounded(span) | + Self::WhitelistStorage(span) => *span, + Self::DisableTryDecodeStorage(span) => *span, + } + } } impl syn::parse::Parse for PalletStorageAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let attr_span = input.span(); - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::getter) { - content.parse::()?; - - let generate_content; - syn::parenthesized!(generate_content in content); - generate_content.parse::()?; - Ok(Self::Getter( - generate_content.parse::()?, - attr_span, - )) - } else if lookahead.peek(keyword::storage_prefix) { - content.parse::()?; - content.parse::()?; - - let renamed_prefix = content.parse::()?; - // Ensure the renamed prefix is a proper Rust identifier - syn::parse_str::(&renamed_prefix.value()).map_err(|_| { - let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); - syn::Error::new(renamed_prefix.span(), msg) - })?; - - Ok(Self::StorageName(renamed_prefix, attr_span)) - } else if lookahead.peek(keyword::unbounded) { - content.parse::()?; - - Ok(Self::Unbounded(attr_span)) - } else if lookahead.peek(keyword::whitelist_storage) { - content.parse::()?; - Ok(Self::WhitelistStorage(attr_span)) - } else if lookahead.peek(keyword::disable_try_decode_storage) { - content.parse::()?; - Ok(Self::DisableTryDecodeStorage(attr_span)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let attr_span = input.span(); + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::getter) { + content.parse::()?; + + let generate_content; + syn::parenthesized!(generate_content in content); + generate_content.parse::()?; + Ok(Self::Getter(generate_content.parse::()?, attr_span)) + } else if lookahead.peek(keyword::storage_prefix) { + content.parse::()?; + content.parse::()?; + + let renamed_prefix = content.parse::()?; + // Ensure the renamed prefix is a proper Rust identifier + syn::parse_str::(&renamed_prefix.value()).map_err(|_| { + let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); + syn::Error::new(renamed_prefix.span(), msg) + })?; + + Ok(Self::StorageName(renamed_prefix, attr_span)) + } else if lookahead.peek(keyword::unbounded) { + content.parse::()?; + + Ok(Self::Unbounded(attr_span)) + } else if lookahead.peek(keyword::whitelist_storage) { + content.parse::()?; + Ok(Self::WhitelistStorage(attr_span)) + } else if lookahead.peek(keyword::disable_try_decode_storage) { + content.parse::()?; + Ok(Self::DisableTryDecodeStorage(attr_span)) + } else { + Err(lookahead.error()) + } + } } struct PalletStorageAttrInfo { - getter: Option, - rename_as: Option, - unbounded: bool, - whitelisted: bool, - try_decode: bool, + getter: Option, + rename_as: Option, + unbounded: bool, + whitelisted: bool, + try_decode: bool, } impl PalletStorageAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut getter = None; - let mut rename_as = None; - let mut unbounded = false; - let mut whitelisted = false; - let mut disable_try_decode_storage = false; - for attr in attrs { - match attr { - PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), - PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => { - rename_as = Some(name) - } - PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, - PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, - PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => { - disable_try_decode_storage = true - } - attr => { - return Err(syn::Error::new( - attr.attr_span(), - "Invalid attribute: Duplicate attribute", - )) - } - } - } - - Ok(PalletStorageAttrInfo { - getter, - rename_as, - unbounded, - whitelisted, - try_decode: !disable_try_decode_storage, - }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut getter = None; + let mut rename_as = None; + let mut unbounded = false; + let mut whitelisted = false; + let mut disable_try_decode_storage = false; + for attr in attrs { + match attr { + PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), + PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => + rename_as = Some(name), + PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, + PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, + PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => + disable_try_decode_storage = true, + attr => + return Err(syn::Error::new( + attr.attr_span(), + "Invalid attribute: Duplicate attribute", + )), + } + } + + Ok(PalletStorageAttrInfo { + getter, + rename_as, + unbounded, + whitelisted, + try_decode: !disable_try_decode_storage, + }) + } } /// The value and key types used by storages. Needed to expand metadata. pub enum Metadata { - Value { - value: syn::Type, - }, - Map { - value: syn::Type, - key: syn::Type, - }, - CountedMap { - value: syn::Type, - key: syn::Type, - }, - DoubleMap { - value: syn::Type, - key1: syn::Type, - key2: syn::Type, - }, - NMap { - keys: Vec, - keygen: syn::Type, - value: syn::Type, - }, - CountedNMap { - keys: Vec, - keygen: syn::Type, - value: syn::Type, - }, + Value { value: syn::Type }, + Map { value: syn::Type, key: syn::Type }, + CountedMap { value: syn::Type, key: syn::Type }, + DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, + NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, + CountedNMap { keys: Vec, keygen: syn::Type, value: syn::Type }, } pub enum QueryKind { - OptionQuery, - ResultQuery(syn::Path, syn::Ident), - ValueQuery, + OptionQuery, + ResultQuery(syn::Path, syn::Ident), + ValueQuery, } /// Definition of a storage, storage is a storage type like /// `type MyStorage = StorageValue` /// The keys and values types are parsed in order to get metadata pub struct StorageDef { - /// The index of storage item in pallet module. - pub index: usize, - /// Visibility of the storage type. - pub vis: syn::Visibility, - /// The type ident, to generate the StoragePrefix for. - pub ident: syn::Ident, - /// The keys and value metadata of the storage. - pub metadata: Metadata, - /// The doc associated to the storage. - pub docs: Vec, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// Optional getter to generate. If some then query_kind is ensured to be some as well. - pub getter: Option, - /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of - /// ident. - pub rename_as: Option, - /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. - /// Note that this is best effort as it can't be determined when QueryKind is generic, and - /// result can be false if user do some unexpected type alias. - pub query_kind: Option, - /// Where clause of type definition. - pub where_clause: Option, - /// The span of the pallet::storage attribute. - pub attr_span: proc_macro2::Span, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// If generics are named (e.g. `StorageValue`) then this contains all the - /// generics of the storage. - /// If generics are not named, this is none. - pub named_generics: Option, - /// If the value stored in this storage is unbounded. - pub unbounded: bool, - /// Whether or not reads to this storage key will be ignored by benchmarking - pub whitelisted: bool, - /// Whether or not to try to decode the storage key when running try-runtime checks. - pub try_decode: bool, - /// Whether or not a default hasher is allowed to replace `_` - pub use_default_hasher: bool, + /// The index of storage item in pallet module. + pub index: usize, + /// Visibility of the storage type. + pub vis: syn::Visibility, + /// The type ident, to generate the StoragePrefix for. + pub ident: syn::Ident, + /// The keys and value metadata of the storage. + pub metadata: Metadata, + /// The doc associated to the storage. + pub docs: Vec, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// Optional getter to generate. If some then query_kind is ensured to be some as well. + pub getter: Option, + /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of + /// ident. + pub rename_as: Option, + /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. + /// Note that this is best effort as it can't be determined when QueryKind is generic, and + /// result can be false if user do some unexpected type alias. + pub query_kind: Option, + /// Where clause of type definition. + pub where_clause: Option, + /// The span of the pallet::storage attribute. + pub attr_span: proc_macro2::Span, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// If generics are named (e.g. `StorageValue`) then this contains all the + /// generics of the storage. + /// If generics are not named, this is none. + pub named_generics: Option, + /// If the value stored in this storage is unbounded. + pub unbounded: bool, + /// Whether or not reads to this storage key will be ignored by benchmarking + pub whitelisted: bool, + /// Whether or not to try to decode the storage key when running try-runtime checks. + pub try_decode: bool, + /// Whether or not a default hasher is allowed to replace `_` + pub use_default_hasher: bool, } /// The parsed generic from the #[derive(Clone)] pub enum StorageGenerics { - DoubleMap { - hasher1: syn::Type, - key1: syn::Type, - hasher2: syn::Type, - key2: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Map { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedMap { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Value { - value: syn::Type, - query_kind: Option, - on_empty: Option, - }, - NMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedNMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, + DoubleMap { + hasher1: syn::Type, + key1: syn::Type, + hasher2: syn::Type, + key2: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Map { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedMap { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Value { + value: syn::Type, + query_kind: Option, + on_empty: Option, + }, + NMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedNMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, } impl StorageGenerics { - /// Return the metadata from the defined generics - fn metadata(&self) -> syn::Result { - let res = match self.clone() { - Self::DoubleMap { - value, key1, key2, .. - } => Metadata::DoubleMap { value, key1, key2 }, - Self::Map { value, key, .. } => Metadata::Map { value, key }, - Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, - Self::Value { value, .. } => Metadata::Value { value }, - Self::NMap { keygen, value, .. } => Metadata::NMap { - keys: collect_keys(&keygen)?, - keygen, - value, - }, - Self::CountedNMap { keygen, value, .. } => Metadata::CountedNMap { - keys: collect_keys(&keygen)?, - keygen, - value, - }, - }; - - Ok(res) - } - - /// Return the query kind from the defined generics - fn query_kind(&self) -> Option { - match &self { - Self::DoubleMap { query_kind, .. } - | Self::Map { query_kind, .. } - | Self::CountedMap { query_kind, .. } - | Self::Value { query_kind, .. } - | Self::NMap { query_kind, .. } - | Self::CountedNMap { query_kind, .. } => query_kind.clone(), - } - } + /// Return the metadata from the defined generics + fn metadata(&self) -> syn::Result { + let res = match self.clone() { + Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, + Self::Map { value, key, .. } => Metadata::Map { value, key }, + Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, + Self::Value { value, .. } => Metadata::Value { value }, + Self::NMap { keygen, value, .. } => + Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, + Self::CountedNMap { keygen, value, .. } => + Metadata::CountedNMap { keys: collect_keys(&keygen)?, keygen, value }, + }; + + Ok(res) + } + + /// Return the query kind from the defined generics + fn query_kind(&self) -> Option { + match &self { + Self::DoubleMap { query_kind, .. } | + Self::Map { query_kind, .. } | + Self::CountedMap { query_kind, .. } | + Self::Value { query_kind, .. } | + Self::NMap { query_kind, .. } | + Self::CountedNMap { query_kind, .. } => query_kind.clone(), + } + } } enum StorageKind { - Value, - Map, - CountedMap, - DoubleMap, - NMap, - CountedNMap, + Value, + Map, + CountedMap, + DoubleMap, + NMap, + CountedNMap, } /// Check the generics in the `map` contains the generics in `gen` may contains generics in /// `optional_gen`, and doesn't contains any other. fn check_generics( - map: &HashMap, - mandatory_generics: &[&str], - optional_generics: &[&str], - storage_type_name: &str, - args_span: proc_macro2::Span, + map: &HashMap, + mandatory_generics: &[&str], + optional_generics: &[&str], + storage_type_name: &str, + args_span: proc_macro2::Span, ) -> syn::Result<()> { - let mut errors = vec![]; - - let expectation = { - let mut e = format!( - "`{}` expect generics {}and optional generics {}", - storage_type_name, - mandatory_generics - .iter() - .map(|name| format!("`{}`, ", name)) - .collect::(), - &optional_generics - .iter() - .map(|name| format!("`{}`, ", name)) - .collect::(), - ); - e.pop(); - e.pop(); - e.push('.'); - e - }; - - for (gen_name, gen_binding) in map { - if !mandatory_generics.contains(&gen_name.as_str()) - && !optional_generics.contains(&gen_name.as_str()) - { - let msg = format!( - "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", - gen_name, storage_type_name, expectation, - ); - errors.push(syn::Error::new(gen_binding.span(), msg)); - } - } - - for mandatory_generic in mandatory_generics { - if !map.contains_key(&mandatory_generic.to_string()) { - let msg = format!( - "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", - mandatory_generic, storage_type_name - ); - errors.push(syn::Error::new(args_span, msg)); - } - } - - let mut errors = errors.drain(..); - if let Some(mut error) = errors.next() { - for other_error in errors { - error.combine(other_error); - } - Err(error) - } else { - Ok(()) - } + let mut errors = vec![]; + + let expectation = { + let mut e = format!( + "`{}` expect generics {}and optional generics {}", + storage_type_name, + mandatory_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + &optional_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), + ); + e.pop(); + e.pop(); + e.push('.'); + e + }; + + for (gen_name, gen_binding) in map { + if !mandatory_generics.contains(&gen_name.as_str()) && + !optional_generics.contains(&gen_name.as_str()) + { + let msg = format!( + "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", + gen_name, storage_type_name, expectation, + ); + errors.push(syn::Error::new(gen_binding.span(), msg)); + } + } + + for mandatory_generic in mandatory_generics { + if !map.contains_key(&mandatory_generic.to_string()) { + let msg = format!( + "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", + mandatory_generic, storage_type_name + ); + errors.push(syn::Error::new(args_span, msg)); + } + } + + let mut errors = errors.drain(..); + if let Some(mut error) = errors.next() { + for other_error in errors { + error.combine(other_error); + } + Err(error) + } else { + Ok(()) + } } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_named_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::AssocType], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::AssocType], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let mut parsed = HashMap::::new(); - - // Ensure no duplicate. - for arg in args { - if let Some(other) = parsed.get(&arg.ident.to_string()) { - let msg = "Invalid pallet::storage, Duplicated named generic"; - let mut err = syn::Error::new(arg.ident.span(), msg); - err.combine(syn::Error::new(other.ident.span(), msg)); - return Err(err); - } - parsed.insert(arg.ident.to_string(), arg.clone()); - } - - let mut map_mandatory_generics = vec!["Key", "Value"]; - let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; - if dev_mode { - map_optional_generics.push("Hasher"); - } else { - map_mandatory_generics.push("Hasher"); - } - - let generics = match storage { - StorageKind::Value => { - check_generics( - &parsed, - &["Value"], - &["QueryKind", "OnEmpty"], - "StorageValue", - args_span, - )?; - - StorageGenerics::Value { - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - } - } - StorageKind::Map => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "StorageMap", - args_span, - )?; - - StorageGenerics::Map { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - } - StorageKind::CountedMap => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "CountedStorageMap", - args_span, - )?; - - StorageGenerics::CountedMap { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - } - StorageKind::DoubleMap => { - let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; - if dev_mode { - map_optional_generics.extend(["Hasher1", "Hasher2"]); - } else { - double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); - } - - check_generics( - &parsed, - &double_map_mandatory_generics, - &map_optional_generics, - "StorageDoubleMap", - args_span, - )?; - - StorageGenerics::DoubleMap { - hasher1: parsed - .remove("Hasher1") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key1: parsed - .remove("Key1") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - hasher2: parsed - .remove("Hasher2") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key2: parsed - .remove("Key2") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - } - StorageKind::NMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "StorageNMap", - args_span, - )?; - - StorageGenerics::NMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - } - StorageKind::CountedNMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "CountedStorageNMap", - args_span, - )?; - - StorageGenerics::CountedNMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - } - }; - - let metadata = generics.metadata()?; - let query_kind = generics.query_kind(); - - Ok((Some(generics), metadata, query_kind, false)) + let mut parsed = HashMap::::new(); + + // Ensure no duplicate. + for arg in args { + if let Some(other) = parsed.get(&arg.ident.to_string()) { + let msg = "Invalid pallet::storage, Duplicated named generic"; + let mut err = syn::Error::new(arg.ident.span(), msg); + err.combine(syn::Error::new(other.ident.span(), msg)); + return Err(err) + } + parsed.insert(arg.ident.to_string(), arg.clone()); + } + + let mut map_mandatory_generics = vec!["Key", "Value"]; + let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; + if dev_mode { + map_optional_generics.push("Hasher"); + } else { + map_mandatory_generics.push("Hasher"); + } + + let generics = match storage { + StorageKind::Value => { + check_generics( + &parsed, + &["Value"], + &["QueryKind", "OnEmpty"], + "StorageValue", + args_span, + )?; + + StorageGenerics::Value { + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + } + }, + StorageKind::Map => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "StorageMap", + args_span, + )?; + + StorageGenerics::Map { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::CountedMap => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "CountedStorageMap", + args_span, + )?; + + StorageGenerics::CountedMap { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::DoubleMap => { + let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; + if dev_mode { + map_optional_generics.extend(["Hasher1", "Hasher2"]); + } else { + double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); + } + + check_generics( + &parsed, + &double_map_mandatory_generics, + &map_optional_generics, + "StorageDoubleMap", + args_span, + )?; + + StorageGenerics::DoubleMap { + hasher1: parsed + .remove("Hasher1") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key1: parsed + .remove("Key1") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + hasher2: parsed + .remove("Hasher2") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key2: parsed + .remove("Key2") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::NMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "StorageNMap", + args_span, + )?; + + StorageGenerics::NMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + StorageKind::CountedNMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "CountedStorageNMap", + args_span, + )?; + + StorageGenerics::CountedNMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, + }; + + let metadata = generics.metadata()?; + let query_kind = generics.query_kind(); + + Ok((Some(generics), metadata, query_kind, false)) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_unnamed_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::Type], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::Type], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let retrieve_arg = |arg_pos| { - args.get(arg_pos).cloned().ok_or_else(|| { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic argument, \ + let retrieve_arg = |arg_pos| { + args.get(arg_pos).cloned().ok_or_else(|| { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic argument, \ expect at least {} args, found {}.", - arg_pos + 1, - args.len(), - ); - syn::Error::new(args_span, msg) - }) - }; - - let prefix_arg = retrieve_arg(0)?; - syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { - let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ + arg_pos + 1, + args.len(), + ); + syn::Error::new(args_span, msg) + }) + }; + + let prefix_arg = retrieve_arg(0)?; + syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { + let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ first generic argument must be `_`, the argument is then replaced by macro."; - let mut err = syn::Error::new(prefix_arg.span(), msg); - err.combine(e); - err - })?; - - let use_default_hasher = |arg_pos| { - let arg = retrieve_arg(arg_pos)?; - if syn::parse2::(arg.to_token_stream()).is_ok() { - if dev_mode { - Ok(true) - } else { - let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; - Err(syn::Error::new(arg.span(), msg)) - } - } else { - Ok(false) - } - }; - - let res = match storage { - StorageKind::Value => ( - None, - Metadata::Value { - value: retrieve_arg(1)?, - }, - retrieve_arg(2).ok(), - false, - ), - StorageKind::Map => ( - None, - Metadata::Map { - key: retrieve_arg(2)?, - value: retrieve_arg(3)?, - }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::CountedMap => ( - None, - Metadata::CountedMap { - key: retrieve_arg(2)?, - value: retrieve_arg(3)?, - }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::DoubleMap => ( - None, - Metadata::DoubleMap { - key1: retrieve_arg(2)?, - key2: retrieve_arg(4)?, - value: retrieve_arg(5)?, - }, - retrieve_arg(6).ok(), - use_default_hasher(1)? && use_default_hasher(3)?, - ), - StorageKind::NMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::NMap { - keys, - keygen, - value: retrieve_arg(2)?, - }, - retrieve_arg(3).ok(), - false, - ) - } - StorageKind::CountedNMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::CountedNMap { - keys, - keygen, - value: retrieve_arg(2)?, - }, - retrieve_arg(3).ok(), - false, - ) - } - }; - - Ok(res) + let mut err = syn::Error::new(prefix_arg.span(), msg); + err.combine(e); + err + })?; + + let use_default_hasher = |arg_pos| { + let arg = retrieve_arg(arg_pos)?; + if syn::parse2::(arg.to_token_stream()).is_ok() { + if dev_mode { + Ok(true) + } else { + let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; + Err(syn::Error::new(arg.span(), msg)) + } + } else { + Ok(false) + } + }; + + let res = match storage { + StorageKind::Value => + (None, Metadata::Value { value: retrieve_arg(1)? }, retrieve_arg(2).ok(), false), + StorageKind::Map => ( + None, + Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::CountedMap => ( + None, + Metadata::CountedMap { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::DoubleMap => ( + None, + Metadata::DoubleMap { + key1: retrieve_arg(2)?, + key2: retrieve_arg(4)?, + value: retrieve_arg(5)?, + }, + retrieve_arg(6).ok(), + use_default_hasher(1)? && use_default_hasher(3)?, + ), + StorageKind::NMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::NMap { keys, keygen, value: retrieve_arg(2)? }, + retrieve_arg(3).ok(), + false, + ) + }, + StorageKind::CountedNMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::CountedNMap { keys, keygen, value: retrieve_arg(2)? }, + retrieve_arg(3).ok(), + false, + ) + }, + }; + + Ok(res) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_generics( - segment: &syn::PathSegment, - dev_mode: bool, + segment: &syn::PathSegment, + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let storage_kind = match &*segment.ident.to_string() { - "StorageValue" => StorageKind::Value, - "StorageMap" => StorageKind::Map, - "CountedStorageMap" => StorageKind::CountedMap, - "StorageDoubleMap" => StorageKind::DoubleMap, - "StorageNMap" => StorageKind::NMap, - "CountedStorageNMap" => StorageKind::CountedNMap, - found => { - let msg = format!( + let storage_kind = match &*segment.ident.to_string() { + "StorageValue" => StorageKind::Value, + "StorageMap" => StorageKind::Map, + "CountedStorageMap" => StorageKind::CountedMap, + "StorageDoubleMap" => StorageKind::DoubleMap, + "StorageNMap" => StorageKind::NMap, + "CountedStorageNMap" => StorageKind::CountedNMap, + found => { + let msg = format!( "Invalid pallet::storage, expected ident: `StorageValue` or \ `StorageMap` or `CountedStorageMap` or `StorageDoubleMap` or `StorageNMap` or `CountedStorageNMap` \ in order to expand metadata, found `{}`.", found, ); - return Err(syn::Error::new(segment.ident.span(), msg)); - } - }; + return Err(syn::Error::new(segment.ident.span(), msg)) + }, + }; - let args_span = segment.arguments.span(); + let args_span = segment.arguments.span(); - let args = match &segment.arguments { - syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, - _ => { - let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ + let args = match &segment.arguments { + syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, + _ => { + let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ expect more that 0 generic arguments."; - return Err(syn::Error::new(segment.span(), msg)); - } - }; - - if args - .args - .iter() - .all(|gen| matches!(gen, syn::GenericArgument::Type(_))) - { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::Type(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are types"), - }) - .collect::>(); - process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) - } else if args - .args - .iter() - .all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) - { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::AssocType(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are bindings"), - }) - .collect::>(); - process_named_generics(&storage_kind, args_span, &args, dev_mode) - } else { - let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ + return Err(syn::Error::new(segment.span(), msg)) + }, + }; + + if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Type(_))) { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::Type(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are types"), + }) + .collect::>(); + process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) + } else if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::AssocType(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are bindings"), + }) + .collect::>(); + process_named_generics(&storage_kind, args_span, &args, dev_mode) + } else { + let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ type generics or binding generics, e.g. `` or \ ``."; - Err(syn::Error::new(segment.span(), msg)) - } + Err(syn::Error::new(segment.span(), msg)) + } } /// Parse the 2nd type argument to `StorageNMap` and return its keys. fn collect_keys(keygen: &syn::Type) -> syn::Result> { - if let syn::Type::Tuple(tup) = keygen { - tup.elems - .iter() - .map(extract_key) - .collect::>>() - } else { - Ok(vec![extract_key(keygen)?]) - } + if let syn::Type::Tuple(tup) = keygen { + tup.elems.iter().map(extract_key).collect::>>() + } else { + Ok(vec![extract_key(keygen)?]) + } } /// In `Key`, extract K and return it. fn extract_key(ty: &syn::Type) -> syn::Result { - let typ = if let syn::Type::Path(typ) = ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(ty.span(), msg)); - }; - - let key_struct = typ.path.segments.last().ok_or_else(|| { - let msg = "Invalid pallet::storage, expected type path with at least one segment"; - syn::Error::new(typ.path.span(), msg) - })?; - if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { - let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; - return Err(syn::Error::new(key_struct.ident.span(), msg)); - } - - let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { - args - } else { - let msg = "Invalid pallet::storage, expected angle bracketed arguments"; - return Err(syn::Error::new(key_struct.arguments.span(), msg)); - }; - - if ty_params.args.len() != 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + let typ = if let syn::Type::Path(typ) = ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(ty.span(), msg)) + }; + + let key_struct = typ.path.segments.last().ok_or_else(|| { + let msg = "Invalid pallet::storage, expected type path with at least one segment"; + syn::Error::new(typ.path.span(), msg) + })?; + if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { + let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; + return Err(syn::Error::new(key_struct.ident.span(), msg)) + } + + let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { + args + } else { + let msg = "Invalid pallet::storage, expected angle bracketed arguments"; + return Err(syn::Error::new(key_struct.arguments.span(), msg)) + }; + + if ty_params.args.len() != 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for Key struct, expected 2 args, found {}", - ty_params.args.len() - ); - return Err(syn::Error::new(ty_params.span(), msg)); - } - - let key = match &ty_params.args[1] { - syn::GenericArgument::Type(key_ty) => key_ty.clone(), - _ => { - let msg = "Invalid pallet::storage, expected type"; - return Err(syn::Error::new(ty_params.args[1].span(), msg)); - } - }; - - Ok(key) + ty_params.args.len() + ); + return Err(syn::Error::new(ty_params.span(), msg)) + } + + let key = match &ty_params.args[1] { + syn::GenericArgument::Type(key_ty) => key_ty.clone(), + _ => { + let msg = "Invalid pallet::storage, expected type"; + return Err(syn::Error::new(ty_params.args[1].span(), msg)) + }, + }; + + Ok(key) } impl StorageDef { - /// Return the storage prefix for this storage item - pub fn prefix(&self) -> String { - self.rename_as - .as_ref() - .map(syn::LitStr::value) - .unwrap_or_else(|| self.ident.to_string()) - } - - /// Return either the span of the ident or the span of the literal in the - /// #[storage_prefix] attribute - pub fn prefix_span(&self) -> proc_macro2::Span { - self.rename_as - .as_ref() - .map(syn::LitStr::span) - .unwrap_or_else(|| self.ident.span()) - } - - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - ) -> syn::Result { - let item = if let syn::Item::Type(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::storage, expect item type.", - )); - }; - - let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - let PalletStorageAttrInfo { - getter, - rename_as, - mut unbounded, - whitelisted, - try_decode, - } = PalletStorageAttrInfo::from_attrs(attrs)?; - - // set all storages to be unbounded if dev_mode is enabled - unbounded |= dev_mode; - let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); - - let instances = vec![helper::check_type_def_gen( - &item.generics, - item.ident.span(), - )?]; - - let where_clause = item.generics.where_clause.clone(); - let docs = get_doc_literals(&item.attrs); - - let typ = if let syn::Type::Path(typ) = &*item.ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(item.ty.span(), msg)); - }; - - if typ.path.segments.len() != 1 { - let msg = "Invalid pallet::storage, expected type path with one segment"; - return Err(syn::Error::new(item.ty.span(), msg)); - } - - let (named_generics, metadata, query_kind, use_default_hasher) = - process_generics(&typ.path.segments[0], dev_mode)?; - - let query_kind = query_kind - .map(|query_kind| { - use syn::{ - AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, - TypePath, - }; - - let result_query = match query_kind { - Type::Path(path) - if path - .path - .segments - .last() - .map_or(false, |s| s.ident == "OptionQuery") => - { - return Ok(Some(QueryKind::OptionQuery)) - } - Type::Path(TypePath { - path: Path { segments, .. }, - .. - }) if segments.last().map_or(false, |s| s.ident == "ResultQuery") => segments - .last() - .expect("segments is checked to have the last value; qed") - .clone(), - Type::Path(path) - if path - .path - .segments - .last() - .map_or(false, |s| s.ident == "ValueQuery") => - { - return Ok(Some(QueryKind::ValueQuery)) - } - _ => return Ok(None), - }; - - let error_type = match result_query.arguments { - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - args, .. - }) => { - if args.len() != 1 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + /// Return the storage prefix for this storage item + pub fn prefix(&self) -> String { + self.rename_as + .as_ref() + .map(syn::LitStr::value) + .unwrap_or_else(|| self.ident.to_string()) + } + + /// Return either the span of the ident or the span of the literal in the + /// #[storage_prefix] attribute + pub fn prefix_span(&self) -> proc_macro2::Span { + self.rename_as + .as_ref() + .map(syn::LitStr::span) + .unwrap_or_else(|| self.ident.span()) + } + + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + ) -> syn::Result { + let item = if let syn::Item::Type(item) = item { + item + } else { + return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")) + }; + + let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let PalletStorageAttrInfo { getter, rename_as, mut unbounded, whitelisted, try_decode } = + PalletStorageAttrInfo::from_attrs(attrs)?; + + // set all storages to be unbounded if dev_mode is enabled + unbounded |= dev_mode; + let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); + + let instances = vec![helper::check_type_def_gen(&item.generics, item.ident.span())?]; + + let where_clause = item.generics.where_clause.clone(); + let docs = get_doc_literals(&item.attrs); + + let typ = if let syn::Type::Path(typ) = &*item.ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(item.ty.span(), msg)) + }; + + if typ.path.segments.len() != 1 { + let msg = "Invalid pallet::storage, expected type path with one segment"; + return Err(syn::Error::new(item.ty.span(), msg)) + } + + let (named_generics, metadata, query_kind, use_default_hasher) = + process_generics(&typ.path.segments[0], dev_mode)?; + + let query_kind = query_kind + .map(|query_kind| { + use syn::{ + AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, + TypePath, + }; + + let result_query = match query_kind { + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "OptionQuery") => + return Ok(Some(QueryKind::OptionQuery)), + Type::Path(TypePath { path: Path { segments, .. }, .. }) + if segments.last().map_or(false, |s| s.ident == "ResultQuery") => + segments + .last() + .expect("segments is checked to have the last value; qed") + .clone(), + Type::Path(path) + if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") => + return Ok(Some(QueryKind::ValueQuery)), + _ => return Ok(None), + }; + + let error_type = match result_query.arguments { + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + args, .. + }) => { + if args.len() != 1 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for ResultQuery, expected 1 type argument, found {}", - args.len(), - ); - return Err(syn::Error::new(args.span(), msg)); - } - - args[0].clone() - } - args => { - let msg = format!( - "Invalid pallet::storage, unexpected generic args for ResultQuery, \ + args.len(), + ); + return Err(syn::Error::new(args.span(), msg)) + } + + args[0].clone() + }, + args => { + let msg = format!( + "Invalid pallet::storage, unexpected generic args for ResultQuery, \ expected angle-bracketed arguments, found `{}`", - args.to_token_stream() - ); - return Err(syn::Error::new(args.span(), msg)); - } - }; - - match error_type { - GenericArgument::Type(Type::Path(TypePath { - path: - Path { - segments: err_variant, - leading_colon, - }, - .. - })) => { - if err_variant.len() < 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of path segments for \ + args.to_token_stream().to_string() + ); + return Err(syn::Error::new(args.span(), msg)) + }, + }; + + match error_type { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments: err_variant, leading_colon }, + .. + })) => { + if err_variant.len() < 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of path segments for \ the generics in ResultQuery, expected a path with at least 2 \ segments, found {}", - err_variant.len(), - ); - return Err(syn::Error::new(err_variant.span(), msg)); - } - let mut error = err_variant.clone(); - let err_variant = error - .pop() - .expect("Checked to have at least 2; qed") - .into_value() - .ident; - - // Necessary here to eliminate the last double colon - let last = error - .pop() - .expect("Checked to have at least 2; qed") - .into_value(); - error.push_value(last); - - Ok(Some(QueryKind::ResultQuery( - syn::Path { - leading_colon, - segments: error, - }, - err_variant, - ))) - } - gen_arg => { - let msg = format!( + err_variant.len(), + ); + return Err(syn::Error::new(err_variant.span(), msg)) + } + let mut error = err_variant.clone(); + let err_variant = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value() + .ident; + + // Necessary here to eliminate the last double colon + let last = + error.pop().expect("Checked to have at least 2; qed").into_value(); + error.push_value(last); + + Ok(Some(QueryKind::ResultQuery( + syn::Path { leading_colon, segments: error }, + err_variant, + ))) + }, + gen_arg => { + let msg = format!( "Invalid pallet::storage, unexpected generic argument kind, expected a \ type path to a `PalletError` enum variant, found `{}`", - gen_arg.to_token_stream(), + gen_arg.to_token_stream().to_string(), ); - Err(syn::Error::new(gen_arg.span(), msg)) - } - } - }) - .transpose()? - .unwrap_or(Some(QueryKind::OptionQuery)); - - if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { - let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ + Err(syn::Error::new(gen_arg.span(), msg)) + }, + } + }) + .transpose()? + .unwrap_or(Some(QueryKind::OptionQuery)); + + if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { + let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ identifiable. QueryKind must be `OptionQuery`, `ResultQuery`, `ValueQuery`, or default \ one to be identifiable."; - return Err(syn::Error::new(getter.span(), msg)); - } - - Ok(StorageDef { - attr_span, - index, - vis: item.vis.clone(), - ident: item.ident.clone(), - instances, - metadata, - docs, - getter, - rename_as, - query_kind, - where_clause, - cfg_attrs, - named_generics, - unbounded, - whitelisted, - try_decode, - use_default_hasher, - }) - } + return Err(syn::Error::new(getter.span(), msg)) + } + + Ok(StorageDef { + attr_span, + index, + vis: item.vis.clone(), + ident: item.ident.clone(), + instances, + metadata, + docs, + getter, + rename_as, + query_kind, + where_clause, + cfg_attrs, + named_generics, + unbounded, + whitelisted, + try_decode, + use_default_hasher, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index 66ee1a7ef..6405bb415 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -30,103 +30,96 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use syn::{ - parse::ParseStream, - parse2, - spanned::Spanned, - token::{Bracket, Paren, PathSep, Pound}, - Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, - PathArguments, Result, TypePath, + parse::ParseStream, + parse2, + spanned::Spanned, + token::{Bracket, Paren, PathSep, Pound}, + Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, + PathArguments, Result, TypePath, }; pub mod keywords { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(tasks_experimental); - custom_keyword!(task_enum); - custom_keyword!(task_list); - custom_keyword!(task_condition); - custom_keyword!(task_index); - custom_keyword!(task_weight); - custom_keyword!(pallet); + custom_keyword!(tasks_experimental); + custom_keyword!(task_enum); + custom_keyword!(task_list); + custom_keyword!(task_condition); + custom_keyword!(task_index); + custom_keyword!(task_weight); + custom_keyword!(pallet); } /// Represents the `#[pallet::tasks_experimental]` attribute and its attached item. Also includes /// metadata about the linked [`TaskEnumDef`] if applicable. #[derive(Clone, Debug)] pub struct TasksDef { - pub tasks_attr: Option, - pub tasks: Vec, - pub item_impl: ItemImpl, - /// Path to `frame_support` - pub scrate: Path, - pub enum_ident: Ident, - pub enum_arguments: PathArguments, + pub tasks_attr: Option, + pub tasks: Vec, + pub item_impl: ItemImpl, + /// Path to `frame_support` + pub scrate: Path, + pub enum_ident: Ident, + pub enum_arguments: PathArguments, } impl syn::parse::Parse for TasksDef { - fn parse(input: ParseStream) -> Result { - let item_impl: ItemImpl = input.parse()?; - let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); - let tasks_attr = match tasks_attrs.first() { - Some(attr) => Some(parse2::(attr.to_token_stream())?), - None => None, - }; - if let Some(extra_tasks_attr) = tasks_attrs.get(1) { - return Err(Error::new( - extra_tasks_attr.span(), - "unexpected extra `#[pallet::tasks_experimental]` attribute", - )); - } - let tasks: Vec = if tasks_attr.is_some() { - item_impl - .items - .clone() - .into_iter() - .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) - .map(|item| parse2::(item.to_token_stream())) - .collect::>()? - } else { - Vec::new() - }; - let mut task_indices = HashSet::::new(); - for task in tasks.iter() { - let task_index = &task.index_attr.meta.index; - if !task_indices.insert(task_index.clone()) { - return Err(Error::new( - task_index.span(), - format!("duplicate task index `{}`", task_index), - )); - } - } - let mut item_impl = item_impl; - item_impl.attrs = normal_attrs; - - // we require the path on the impl to be a TypePath - let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; - let segments = enum_path.path.segments.iter().collect::>(); - let (Some(last_seg), None) = (segments.first(), segments.get(1)) else { - return Err(Error::new( - enum_path.span(), - "if specified manually, the task enum must be defined locally in this \ + fn parse(input: ParseStream) -> Result { + let item_impl: ItemImpl = input.parse()?; + let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); + let tasks_attr = match tasks_attrs.first() { + Some(attr) => Some(parse2::(attr.to_token_stream())?), + None => None, + }; + if let Some(extra_tasks_attr) = tasks_attrs.get(1) { + return Err(Error::new( + extra_tasks_attr.span(), + "unexpected extra `#[pallet::tasks_experimental]` attribute", + )) + } + let tasks: Vec = if tasks_attr.is_some() { + item_impl + .items + .clone() + .into_iter() + .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) + .map(|item| parse2::(item.to_token_stream())) + .collect::>()? + } else { + Vec::new() + }; + let mut task_indices = HashSet::::new(); + for task in tasks.iter() { + let task_index = &task.index_attr.meta.index; + if !task_indices.insert(task_index.clone()) { + return Err(Error::new( + task_index.span(), + format!("duplicate task index `{}`", task_index), + )) + } + } + let mut item_impl = item_impl; + item_impl.attrs = normal_attrs; + + // we require the path on the impl to be a TypePath + let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; + let segments = enum_path.path.segments.iter().collect::>(); + let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + return Err(Error::new( + enum_path.span(), + "if specified manually, the task enum must be defined locally in this \ pallet and cannot be a re-export", - )); - }; - let enum_ident = last_seg.ident.clone(); - let enum_arguments = last_seg.arguments.clone(); - - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; - - Ok(TasksDef { - tasks_attr, - item_impl, - tasks, - scrate, - enum_ident, - enum_arguments, - }) - } + )) + }; + let enum_ident = last_seg.ident.clone(); + let enum_arguments = last_seg.arguments.clone(); + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + Ok(TasksDef { tasks_attr, item_impl, tasks, scrate, enum_ident, enum_arguments }) + } } /// Parsing for a `#[pallet::tasks_experimental]` attr. @@ -155,843 +148,821 @@ pub type PalletTaskEnumAttr = PalletTaskAttr; /// attached `#[pallet::task_enum]` attribute. #[derive(Clone, Debug)] pub struct TaskEnumDef { - pub attr: Option, - pub item_enum: ItemEnum, - pub scrate: Path, - pub type_use_generics: TokenStream2, + pub attr: Option, + pub item_enum: ItemEnum, + pub scrate: Path, + pub type_use_generics: TokenStream2, } impl syn::parse::Parse for TaskEnumDef { - fn parse(input: ParseStream) -> Result { - let mut item_enum = input.parse::()?; - let attr = extract_pallet_attr(&mut item_enum)?; - let attr = match attr { - Some(attr) => Some(parse2(attr)?), - None => None, - }; + fn parse(input: ParseStream) -> Result { + let mut item_enum = input.parse::()?; + let attr = extract_pallet_attr(&mut item_enum)?; + let attr = match attr { + Some(attr) => Some(parse2(attr)?), + None => None, + }; - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; - let type_use_generics = quote!(T); + let type_use_generics = quote!(T); - Ok(TaskEnumDef { - attr, - item_enum, - scrate, - type_use_generics, - }) - } + Ok(TaskEnumDef { attr, item_enum, scrate, type_use_generics }) + } } /// Represents an individual tasks within a [`TasksDef`]. #[derive(Debug, Clone)] pub struct TaskDef { - pub index_attr: TaskIndexAttr, - pub condition_attr: TaskConditionAttr, - pub list_attr: TaskListAttr, - pub weight_attr: TaskWeightAttr, - pub normal_attrs: Vec, - pub item: ImplItemFn, - pub arg_names: Vec, + pub index_attr: TaskIndexAttr, + pub condition_attr: TaskConditionAttr, + pub list_attr: TaskListAttr, + pub weight_attr: TaskWeightAttr, + pub normal_attrs: Vec, + pub item: ImplItemFn, + pub arg_names: Vec, } impl syn::parse::Parse for TaskDef { - fn parse(input: ParseStream) -> Result { - let item = input.parse::()?; - // we only want to activate TaskAttrType parsing errors for tasks-related attributes, - // so we filter them here - let (task_attrs, normal_attrs) = partition_task_attrs(&item); - - let task_attrs: Vec = task_attrs - .into_iter() - .map(|attr| parse2(attr.to_token_stream())) - .collect::>()?; - - let Some(index_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_index(..)]` attribute", - )); - }; - - let Some(condition_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_condition(..)]` attribute", - )); - }; - - let Some(list_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_list(..)]` attribute", - )); - }; - - let Some(weight_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_weight(..)]` attribute", - )); - }; - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_condition(..)]` attribute", - )); - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_list(..)]` attribute", - )); - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_index(..)]` attribute", - )); - } - - let mut arg_names = vec![]; - for input in item.sig.inputs.iter() { - match input { - syn::FnArg::Typed(pat_type) => match &*pat_type.pat { - syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), - _ => return Err(Error::new(input.span(), "unexpected pattern type")), - }, - _ => { - return Err(Error::new( - input.span(), - "unexpected function argument type", - )) - } - } - } - - let index_attr = index_attr.try_into().expect("we check the type above; QED"); - let condition_attr = condition_attr - .try_into() - .expect("we check the type above; QED"); - let list_attr = list_attr.try_into().expect("we check the type above; QED"); - let weight_attr = weight_attr - .try_into() - .expect("we check the type above; QED"); - - Ok(TaskDef { - index_attr, - condition_attr, - list_attr, - weight_attr, - normal_attrs, - item, - arg_names, - }) - } + fn parse(input: ParseStream) -> Result { + let item = input.parse::()?; + // we only want to activate TaskAttrType parsing errors for tasks-related attributes, + // so we filter them here + let (task_attrs, normal_attrs) = partition_task_attrs(&item); + + let task_attrs: Vec = task_attrs + .into_iter() + .map(|attr| parse2(attr.to_token_stream())) + .collect::>()?; + + let Some(index_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_index(..)]` attribute", + )) + }; + + let Some(condition_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_condition(..)]` attribute", + )) + }; + + let Some(list_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_list(..)]` attribute", + )) + }; + + let Some(weight_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_weight(..)]` attribute", + )) + }; + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_condition(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_list(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_index(..)]` attribute", + )) + } + + let mut arg_names = vec![]; + for input in item.sig.inputs.iter() { + match input { + syn::FnArg::Typed(pat_type) => match &*pat_type.pat { + syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), + _ => return Err(Error::new(input.span(), "unexpected pattern type")), + }, + _ => return Err(Error::new(input.span(), "unexpected function argument type")), + } + } + + let index_attr = index_attr.try_into().expect("we check the type above; QED"); + let condition_attr = condition_attr.try_into().expect("we check the type above; QED"); + let list_attr = list_attr.try_into().expect("we check the type above; QED"); + let weight_attr = weight_attr.try_into().expect("we check the type above; QED"); + + Ok(TaskDef { + index_attr, + condition_attr, + list_attr, + weight_attr, + normal_attrs, + item, + arg_names, + }) + } } /// The contents of a [`TasksDef`]-related attribute. #[derive(Parse, Debug, Clone)] pub enum TaskAttrMeta { - #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] - TaskList(TaskListAttrMeta), - #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] - TaskIndex(TaskIndexAttrMeta), - #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] - TaskCondition(TaskConditionAttrMeta), - #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] - TaskWeight(TaskWeightAttrMeta), + #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] + TaskList(TaskListAttrMeta), + #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] + TaskIndex(TaskIndexAttrMeta), + #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] + TaskCondition(TaskConditionAttrMeta), + #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] + TaskWeight(TaskWeightAttrMeta), } /// The contents of a `#[pallet::task_list]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskListAttrMeta { - pub task_list: keywords::task_list, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_list: keywords::task_list, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_index]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskIndexAttrMeta { - pub task_index: keywords::task_index, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub index: LitInt, + pub task_index: keywords::task_index, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub index: LitInt, } /// The contents of a `#[pallet::task_condition]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskConditionAttrMeta { - pub task_condition: keywords::task_condition, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_condition: keywords::task_condition, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_weight]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskWeightAttrMeta { - pub task_weight: keywords::task_weight, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_weight: keywords::task_weight, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task]` attribute. #[derive(Parse, Debug, Clone)] pub struct PalletTaskAttr { - pub pound: Pound, - #[bracket] - _bracket: Bracket, - #[inside(_bracket)] - pub pallet: keywords::pallet, - #[inside(_bracket)] - pub colons: PathSep, - #[inside(_bracket)] - pub meta: T, + pub pound: Pound, + #[bracket] + _bracket: Bracket, + #[inside(_bracket)] + pub pallet: keywords::pallet, + #[inside(_bracket)] + pub colons: PathSep, + #[inside(_bracket)] + pub meta: T, } impl ToTokens for TaskListAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_list = self.task_list; - let expr = &self.expr; - tokens.extend(quote!(#task_list(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_list = self.task_list; + let expr = &self.expr; + tokens.extend(quote!(#task_list(#expr))); + } } impl ToTokens for TaskConditionAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_condition = self.task_condition; - let expr = &self.expr; - tokens.extend(quote!(#task_condition(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_condition = self.task_condition; + let expr = &self.expr; + tokens.extend(quote!(#task_condition(#expr))); + } } impl ToTokens for TaskWeightAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_weight = self.task_weight; - let expr = &self.expr; - tokens.extend(quote!(#task_weight(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_weight = self.task_weight; + let expr = &self.expr; + tokens.extend(quote!(#task_weight(#expr))); + } } impl ToTokens for TaskIndexAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_index = self.task_index; - let index = &self.index; - tokens.extend(quote!(#task_index(#index))) - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_index = self.task_index; + let index = &self.index; + tokens.extend(quote!(#task_index(#index))) + } } impl ToTokens for TaskAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), - TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), - TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), - TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), + TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), + TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), + TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), + } + } } impl ToTokens for PalletTaskAttr { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let pound = self.pound; - let pallet = self.pallet; - let colons = self.colons; - let meta = &self.meta; - tokens.extend(quote!(#pound[#pallet #colons #meta])); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let pound = self.pound; + let pallet = self.pallet; + let colons = self.colons; + let meta = &self.meta; + tokens.extend(quote!(#pound[#pallet #colons #meta])); + } } impl TryFrom> for TaskIndexAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskIndexAttr`", - value.meta - ), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskIndexAttr`", value.meta), + )), + } + } } impl TryFrom> for TaskConditionAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskConditionAttr`", - value.meta - ), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskConditionAttr`", value.meta), + )), + } + } } impl TryFrom> for TaskWeightAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => Err(Error::new( - value.span(), - format!( - "`{:?}` cannot be converted to a `TaskWeightAttr`", - value.meta - ), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskWeightAttr`", value.meta), + )), + } + } } impl TryFrom> for TaskListAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )), + } + } } fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> { - let mut duplicate = None; - let mut attr = None; - item_enum.attrs = item_enum - .attrs - .iter() - .filter(|found_attr| { - let segs = found_attr - .path() - .segments - .iter() - .map(|seg| seg.ident.clone()) - .collect::>(); - let (Some(seg1), Some(_), None) = (segs.first(), segs.get(1), segs.get(2)) else { - return true; - }; - if seg1 != "pallet" { - return true; - } - if attr.is_some() { - duplicate = Some(found_attr.span()); - } - attr = Some(found_attr.to_token_stream()); - false - }) - .cloned() - .collect(); - if let Some(span) = duplicate { - return Err(Error::new( - span, - "only one `#[pallet::_]` attribute is supported on this item", - )); - } - Ok(attr) + let mut duplicate = None; + let mut attr = None; + item_enum.attrs = item_enum + .attrs + .iter() + .filter(|found_attr| { + let segs = found_attr + .path() + .segments + .iter() + .map(|seg| seg.ident.clone()) + .collect::>(); + let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + return true + }; + if seg1 != "pallet" { + return true + } + if attr.is_some() { + duplicate = Some(found_attr.span()); + } + attr = Some(found_attr.to_token_stream()); + false + }) + .cloned() + .collect(); + if let Some(span) = duplicate { + return Err(Error::new(span, "only one `#[pallet::_]` attribute is supported on this item")) + } + Ok(attr) } fn partition_tasks_attrs(item_impl: &ItemImpl) -> (Vec, Vec) { - item_impl.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix), None) = - (path_segs.next(), path_segs.next(), path_segs.next()) - else { - return false; - }; - prefix.ident == "pallet" && suffix.ident == "tasks_experimental" - }) + item_impl.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix), None) = + (path_segs.next(), path_segs.next(), path_segs.next()) + else { + return false + }; + prefix.ident == "pallet" && suffix.ident == "tasks_experimental" + }) } fn partition_task_attrs(item: &ImplItemFn) -> (Vec, Vec) { - item.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { - return false; - }; - // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than - // parsing and makes no stack or heap allocations - prefix.ident == "pallet" - && (suffix.ident == "tasks_experimental" - || suffix.ident == "task_list" - || suffix.ident == "task_condition" - || suffix.ident == "task_weight" - || suffix.ident == "task_index") - }) + item.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { + return false + }; + // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than + // parsing and makes no stack or heap allocations + prefix.ident == "pallet" && + (suffix.ident == "tasks_experimental" || + suffix.ident == "task_list" || + suffix.ident == "task_condition" || + suffix.ident == "task_weight" || + suffix.ident == "task_index") + }) } #[test] fn test_parse_task_list_() { - parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); - parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); - parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list()])), - "expected an expression" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list])), - "expected parentheses" - ); + parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); + parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); + parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list()])), + "expected an expression" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list])), + "expected parentheses" + ); } #[test] fn test_parse_task_index() { - parse2::(quote!(#[pallet::task_index(3)])).unwrap(); - parse2::(quote!(#[pallet::task_index(0)])).unwrap(); - parse2::(quote!(#[pallet::task_index(17)])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index])), - "expected parentheses" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index("hey")])), - "expected integer literal" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index(0.3)])), - "expected integer literal" - ); + parse2::(quote!(#[pallet::task_index(3)])).unwrap(); + parse2::(quote!(#[pallet::task_index(0)])).unwrap(); + parse2::(quote!(#[pallet::task_index(17)])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index])), + "expected parentheses" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index("hey")])), + "expected integer literal" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index(0.3)])), + "expected integer literal" + ); } #[test] fn test_parse_task_condition() { - parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); } #[test] fn test_parse_tasks_attr() { - parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::taskss])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pal::tasks])), - "expected `pallet`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_experimental()])), - "unexpected token" - ); + parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::taskss])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pal::tasks])), + "expected `pallet`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_experimental()])), + "unexpected token" + ); } #[test] fn test_parse_tasks_def_basic() { - simulate_manifest_dir("../../pallets/subtensor", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Add a pair of numbers into the totals and remove them. - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn add_number_into_total(i: u32) -> DispatchResult { - let v = Numbers::::take(i).ok_or(Error::::NotFound)?; - Total::::mutate(|(total_keys, total_values)| { - *total_keys += i; - *total_values += v; - }); - Ok(()) - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 1); - }); + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 1); + }); } #[test] fn test_parse_tasks_def_basic_increment_decrement() { - simulate_manifest_dir("../../pallets/subtensor", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Get the value and check if it can be incremented - #[pallet::task_index(0)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value < 255 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn increment() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value >= 255 { - Err(Error::::ValueOverflow.into()) - } else { - let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Incremented { new_val }); - Ok(()) - } - } - - // Get the value and check if it can be decremented - #[pallet::task_index(1)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value > 0 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn decrement() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value == 0 { - Err(Error::::ValueUnderflow.into()) - } else { - let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Decremented { new_val }); - Ok(()) - } - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 2); - }); + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Get the value and check if it can be incremented + #[pallet::task_index(0)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value < 255 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn increment() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value >= 255 { + Err(Error::::ValueOverflow.into()) + } else { + let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Incremented { new_val }); + Ok(()) + } + } + + // Get the value and check if it can be decremented + #[pallet::task_index(1)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value > 0 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn decrement() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value == 0 { + Err(Error::::ValueUnderflow.into()) + } else { + let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Decremented { new_val }); + Ok(()) + } + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 2); + }); } #[test] fn test_parse_tasks_def_duplicate_index() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn bar(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - "duplicate task index `0`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn bar(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + "duplicate task index `0`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_list() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_condition() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_index() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_weight() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_weight\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_weight\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_list_attr() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_list(SomethingElse::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_list(SomethingElse::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_condition_attr() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_condition(|i| i % 4 == 0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_condition(|i| i % 4 == 0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_index_attr() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_extra_tasks_attribute() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - #[pallet::tasks_experimental] - impl, I: 'static> Pallet {} - }), - r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + #[pallet::tasks_experimental] + impl, I: 'static> Pallet {} + }), + r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" + ); + }); } #[test] fn test_parse_task_enum_def_basic() { - simulate_manifest_dir("../../pallets/subtensor", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_non_task_name() { - simulate_manifest_dir("../../pallets/subtensor", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Something { - Foo - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Something { + Foo + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_allowed() { - simulate_manifest_dir("../../pallets/subtensor", || { - parse2::(quote! { - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_alternate_name_allowed() { - simulate_manifest_dir("../../pallets/subtensor", || { - parse2::(quote! { - pub enum Foo { - Red, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Foo { + Red, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_wrong_attr() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::something] - pub enum Task { - Increment, - Decrement, - } - }), - "expected `task_enum`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::something] + pub enum Task { + Increment, + Decrement, + } + }), + "expected `task_enum`" + ); + }); } #[test] fn test_parse_task_enum_def_wrong_item() { - simulate_manifest_dir("../../pallets/subtensor", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::task_enum] - pub struct Something; - }), - "expected `enum`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::task_enum] + pub struct Something; + }), + "expected `enum`" + ); + }); } diff --git a/support/procedural-fork/src/pallet/parse/tests/mod.rs b/support/procedural-fork/src/pallet/parse/tests/mod.rs index 7206a20bf..a3661f307 100644 --- a/support/procedural-fork/src/pallet/parse/tests/mod.rs +++ b/support/procedural-fork/src/pallet/parse/tests/mod.rs @@ -20,7 +20,7 @@ use syn::parse_quote; #[doc(hidden)] pub mod __private { - pub use regex; + pub use regex; } /// Allows you to assert that the input expression resolves to an error whose string @@ -63,22 +63,22 @@ pub mod __private { /// enough that it will work with any error with a reasonable [`core::fmt::Display`] impl. #[macro_export] macro_rules! assert_parse_error_matches { - ($expr:expr, $reg:literal) => { - match $expr { - Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), - Err(e) => { - let error_message = e.to_string(); - let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) - .expect("Invalid regex pattern"); - assert!( - re.is_match(&error_message), - "Error message \"{}\" does not match the pattern \"{}\"", - error_message, - $reg - ); - } - } - }; + ($expr:expr, $reg:literal) => { + match $expr { + Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), + Err(e) => { + let error_message = e.to_string(); + let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) + .expect("Invalid regex pattern"); + assert!( + re.is_match(&error_message), + "Error message \"{}\" does not match the pattern \"{}\"", + error_message, + $reg + ); + }, + } + }; } /// Allows you to assert that an entire pallet parses successfully. A custom syntax is used for @@ -88,7 +88,7 @@ macro_rules! assert_parse_error_matches { /// /// ```ignore /// assert_pallet_parses! { -/// #[manifest_dir("../../pallets/subtensor")] +/// #[manifest_dir("../../examples/basic")] /// #[frame_support::pallet] /// pub mod pallet { /// #[pallet::config] @@ -142,7 +142,7 @@ macro_rules! assert_pallet_parses { /// /// ``` /// assert_pallet_parse_error! { -/// #[manifest_dir("../../pallets/subtensor")] +/// #[manifest_dir("../../examples/basic")] /// #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] /// #[frame_support::pallet] /// pub mod pallet { @@ -183,82 +183,82 @@ macro_rules! assert_pallet_parse_error { /// This function uses a [`Mutex`] to avoid a race condition created when multiple tests try to /// modify and then restore the `CARGO_MANIFEST_DIR` ENV var in an overlapping way. pub fn simulate_manifest_dir, F: FnOnce() + std::panic::UnwindSafe>( - path: P, - closure: F, + path: P, + closure: F, ) { - use std::{env::*, path::*}; + use std::{env::*, path::*}; - /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, - /// avoiding a race condition because `cargo test` runs tests in parallel. - /// - /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with - /// respect to each other, this is still several orders of magnitude faster than using UI - /// tests, even if they are run in parallel. - static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); + /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, + /// avoiding a race condition because `cargo test` runs tests in parallel. + /// + /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with + /// respect to each other, this is still several orders of magnitude faster than using UI + /// tests, even if they are run in parallel. + static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); - // avoid race condition when swapping out `CARGO_MANIFEST_DIR` - let guard = MANIFEST_DIR_LOCK.lock().unwrap(); + // avoid race condition when swapping out `CARGO_MANIFEST_DIR` + let guard = MANIFEST_DIR_LOCK.lock().unwrap(); - // obtain the current/original `CARGO_MANIFEST_DIR` - let orig = PathBuf::from( - var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), - ); + // obtain the current/original `CARGO_MANIFEST_DIR` + let orig = PathBuf::from( + var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), + ); - // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir - set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); + // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir + set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); - // safely run closure catching any panics - let result = panic::catch_unwind(closure); + // safely run closure catching any panics + let result = panic::catch_unwind(closure); - // restore original `CARGO_MANIFEST_DIR` before unwinding - set_var("CARGO_MANIFEST_DIR", &orig); + // restore original `CARGO_MANIFEST_DIR` before unwinding + set_var("CARGO_MANIFEST_DIR", &orig); - // unlock the mutex so we don't poison it if there is a panic - drop(guard); + // unlock the mutex so we don't poison it if there is a panic + drop(guard); - // unwind any panics originally encountered when running closure - result.unwrap(); + // unwind any panics originally encountered when running closure + result.unwrap(); } mod tasks; #[test] fn test_parse_minimal_pallet() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} - #[pallet::pallet] - pub struct Pallet(_); - } - }; + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_missing_pallet() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + } + } } #[test] fn test_parse_pallet_missing_config() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex("Missing `\\#\\[pallet::config\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::config\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::pallet] + pub struct Pallet(_); + } + } } diff --git a/support/procedural-fork/src/pallet/parse/tests/tasks.rs b/support/procedural-fork/src/pallet/parse/tests/tasks.rs index 22a757c8f..9f1436284 100644 --- a/support/procedural-fork/src/pallet/parse/tests/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tests/tasks.rs @@ -19,222 +19,222 @@ use syn::parse_quote; #[test] fn test_parse_pallet_with_task_enum_missing_impl() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum Task { - Something, - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum Task { + Something, + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_with_task_enum_wrong_attribute() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex("expected one of")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::wrong_attribute] - pub enum Task { - Something, - } - - #[pallet::task_list] - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("expected one of")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::wrong_attribute] + pub enum Task { + Something, + } + + #[pallet::task_list] + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_missing_task_enum() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::tasks_experimental] - #[cfg(test)] // aha, this means it's being eaten - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::tasks_experimental] + #[cfg(test)] // aha, this means it's being eaten + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_task_list_in_wrong_place() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex("can only be used on items within an `impl` statement.")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::task_list] - pub fn something() { - println!("hey"); - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("can only be used on items within an `impl` statement.")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::task_list] + pub fn something() { + println!("hey"); + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { - assert_pallet_parse_error! { - #[manifest_dir("../../pallets/subtensor")] - #[error_regex(".*attribute must be attached to your.*")] - #[frame_support::pallet] - pub mod pallet { - - impl frame_support::traits::Task for Task - where - T: TypeInfo, - { - type Enumeration = sp_std::vec::IntoIter>; - - fn iter() -> Self::Enumeration { - sp_std::vec![Task::increment, Task::decrement].into_iter() - } - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex(".*attribute must be attached to your.*")] + #[frame_support::pallet] + pub mod pallet { + + impl frame_support::traits::Task for Task + where + T: TypeInfo, + { + type Enumeration = sp_std::vec::IntoIter>; + + fn iter() -> Self::Enumeration { + sp_std::vec![Task::increment, Task::decrement].into_iter() + } + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_task_enum_non_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_non_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_mismatch_ident() { - assert_pallet_parses! { - #[manifest_dir("../../pallets/subtensor")] - #[frame_support::pallet] - pub mod pallet { - pub enum WrongIdent { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum WrongIdent { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } diff --git a/support/procedural-fork/src/pallet/parse/type_value.rs b/support/procedural-fork/src/pallet/parse/type_value.rs index d5c85248f..4d9db30b3 100644 --- a/support/procedural-fork/src/pallet/parse/type_value.rs +++ b/support/procedural-fork/src/pallet/parse/type_value.rs @@ -20,104 +20,104 @@ use syn::spanned::Spanned; /// Definition of type value. Just a function which is expanded to a struct implementing `Get`. pub struct TypeValueDef { - /// The index of error item in pallet module. - pub index: usize, - /// Visibility of the struct to generate. - pub vis: syn::Visibility, - /// Ident of the struct to generate. - pub ident: syn::Ident, - /// The type return by Get. - pub type_: Box, - /// The block returning the value to get - pub block: Box, - /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// The where clause of the function. - pub where_clause: Option, - /// The span of the pallet::type_value attribute. - pub attr_span: proc_macro2::Span, - /// Docs on the item. - pub docs: Vec, + /// The index of error item in pallet module. + pub index: usize, + /// Visibility of the struct to generate. + pub vis: syn::Visibility, + /// Ident of the struct to generate. + pub ident: syn::Ident, + /// The type return by Get. + pub type_: Box, + /// The block returning the value to get + pub block: Box, + /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// The where clause of the function. + pub where_clause: Option, + /// The span of the pallet::type_value attribute. + pub attr_span: proc_macro2::Span, + /// Docs on the item. + pub docs: Vec, } impl TypeValueDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Fn(item) = item { - item - } else { - let msg = "Invalid pallet::type_value, expected item fn"; - return Err(syn::Error::new(item.span(), msg)); - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Fn(item) = item { + item + } else { + let msg = "Invalid pallet::type_value, expected item fn"; + return Err(syn::Error::new(item.span(), msg)) + }; - let mut docs = vec![]; - for attr in &item.attrs { - if let syn::Meta::NameValue(meta) = &attr.meta { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - docs.push(meta.value.clone()); - continue; - } - } + let mut docs = vec![]; + for attr in &item.attrs { + if let syn::Meta::NameValue(meta) = &attr.meta { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + docs.push(meta.value.clone()); + continue + } + } - let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ + let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ allowed"; - return Err(syn::Error::new(attr.span(), msg)); - } + return Err(syn::Error::new(attr.span(), msg)) + } - if let Some(span) = item - .sig - .constness - .as_ref() - .map(|t| t.span()) - .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) - .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) - .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) - .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) - { - let msg = "Invalid pallet::type_value, unexpected token"; - return Err(syn::Error::new(span, msg)); - } + if let Some(span) = item + .sig + .constness + .as_ref() + .map(|t| t.span()) + .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) + .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) + .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) + .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) + { + let msg = "Invalid pallet::type_value, unexpected token"; + return Err(syn::Error::new(span, msg)) + } - if !item.sig.inputs.is_empty() { - let msg = "Invalid pallet::type_value, unexpected argument"; - return Err(syn::Error::new(item.sig.inputs[0].span(), msg)); - } + if !item.sig.inputs.is_empty() { + let msg = "Invalid pallet::type_value, unexpected argument"; + return Err(syn::Error::new(item.sig.inputs[0].span(), msg)) + } - let vis = item.vis.clone(); - let ident = item.sig.ident.clone(); - let block = item.block.clone(); - let type_ = match item.sig.output.clone() { - syn::ReturnType::Type(_, type_) => type_, - syn::ReturnType::Default => { - let msg = "Invalid pallet::type_value, expected return type"; - return Err(syn::Error::new(item.sig.span(), msg)); - } - }; + let vis = item.vis.clone(); + let ident = item.sig.ident.clone(); + let block = item.block.clone(); + let type_ = match item.sig.output.clone() { + syn::ReturnType::Type(_, type_) => type_, + syn::ReturnType::Default => { + let msg = "Invalid pallet::type_value, expected return type"; + return Err(syn::Error::new(item.sig.span(), msg)) + }, + }; - let mut instances = vec![]; - if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { - instances.push(usage); - } + let mut instances = vec![]; + if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { + instances.push(usage); + } - let is_generic = item.sig.generics.type_params().count() > 0; - let where_clause = item.sig.generics.where_clause.clone(); + let is_generic = item.sig.generics.type_params().count() > 0; + let where_clause = item.sig.generics.where_clause.clone(); - Ok(TypeValueDef { - attr_span, - index, - is_generic, - vis, - ident, - block, - type_, - instances, - where_clause, - docs, - }) - } + Ok(TypeValueDef { + attr_span, + index, + is_generic, + vis, + ident, + block, + type_, + instances, + where_clause, + docs, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs index 6e5109a74..2bf0a1b6c 100644 --- a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs @@ -20,43 +20,43 @@ use syn::spanned::Spanned; /// The definition of the pallet validate unsigned implementation. pub struct ValidateUnsignedDef { - /// The index of validate unsigned item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, + /// The index of validate unsigned item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, } impl ValidateUnsignedDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::validate_unsigned, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::validate_unsigned, expected item impl"; + return Err(syn::Error::new(item.span(), msg)) + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ValidateUnsigned" { - let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; - return Err(syn::Error::new(last.span(), msg)); - } - } else { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + return Err(syn::Error::new(item.span(), msg)) + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ValidateUnsigned" { + let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; + return Err(syn::Error::new(last.span(), msg)) + } + } else { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); - } + return Err(syn::Error::new(item.span(), msg)) + } - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - Ok(ValidateUnsignedDef { index, instances }) - } + Ok(ValidateUnsignedDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet_error.rs b/support/procedural-fork/src/pallet_error.rs index e78844c63..693a1e982 100644 --- a/support/procedural-fork/src/pallet_error.rs +++ b/support/procedural-fork/src/pallet_error.rs @@ -20,172 +20,159 @@ use quote::ToTokens; // Derive `PalletError` pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let syn::DeriveInput { - ident: name, - generics, - data, - .. - } = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; - - let frame_support = match generate_access_from_frame_or_crate("frame-support") { - Ok(c) => c, - Err(e) => return e.into_compile_error().into(), - }; - let frame_support = &frame_support; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - - let max_encoded_size = match data { - syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) - | syn::Fields::Unnamed(syn::FieldsUnnamed { - unnamed: fields, .. - }) => { - let maybe_field_tys = fields - .iter() - .map(|f| generate_field_types(f, frame_support)) - .collect::>>(); - let field_tys = match maybe_field_tys { - Ok(tys) => tys.into_iter().flatten(), - Err(e) => return e.into_compile_error().into(), - }; - quote::quote! { - 0_usize - #( - .saturating_add(< - #field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE) - )* - } - } - syn::Fields::Unit => quote::quote!(0), - }, - syn::Data::Enum(syn::DataEnum { variants, .. }) => { - let field_tys = variants - .iter() - .map(|variant| generate_variant_field_types(variant, frame_support)) - .collect::>>, syn::Error>>(); - - let field_tys = match field_tys { - Ok(tys) => tys.into_iter().flatten().collect::>(), - Err(e) => return e.to_compile_error().into(), - }; - - // We start with `1`, because the discriminant of an enum is stored as u8 - if field_tys.is_empty() { - quote::quote!(1) - } else { - let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { - quote::quote! { - 1_usize - #(.saturating_add(< - #variant_field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE))* - } - }); - - quote::quote! {{ - let mut size = 1_usize; - let mut tmp = 0_usize; - #( - tmp = #variant_sizes; - size = if tmp > size { tmp } else { size }; - tmp = 0_usize; - )* - size - }} - } - } - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - let msg = "Cannot derive `PalletError` for union; please implement it directly"; - return syn::Error::new(union_token.span, msg) - .into_compile_error() - .into(); - } - }; - - quote::quote!( - const _: () = { - impl #impl_generics #frame_support::traits::PalletError - for #name #ty_generics #where_clause - { - const MAX_ENCODED_SIZE: usize = #max_encoded_size; - } - }; - ) - .into() + let syn::DeriveInput { ident: name, generics, data, .. } = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let frame_support = match generate_access_from_frame_or_crate("frame-support") { + Ok(c) => c, + Err(e) => return e.into_compile_error().into(), + }; + let frame_support = &frame_support; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + let max_encoded_size = match data { + syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | + syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { + let maybe_field_tys = fields + .iter() + .map(|f| generate_field_types(f, &frame_support)) + .collect::>>(); + let field_tys = match maybe_field_tys { + Ok(tys) => tys.into_iter().flatten(), + Err(e) => return e.into_compile_error().into(), + }; + quote::quote! { + 0_usize + #( + .saturating_add(< + #field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE) + )* + } + }, + syn::Fields::Unit => quote::quote!(0), + }, + syn::Data::Enum(syn::DataEnum { variants, .. }) => { + let field_tys = variants + .iter() + .map(|variant| generate_variant_field_types(variant, &frame_support)) + .collect::>>, syn::Error>>(); + + let field_tys = match field_tys { + Ok(tys) => tys.into_iter().flatten().collect::>(), + Err(e) => return e.to_compile_error().into(), + }; + + // We start with `1`, because the discriminant of an enum is stored as u8 + if field_tys.is_empty() { + quote::quote!(1) + } else { + let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { + quote::quote! { + 1_usize + #(.saturating_add(< + #variant_field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE))* + } + }); + + quote::quote! {{ + let mut size = 1_usize; + let mut tmp = 0_usize; + #( + tmp = #variant_sizes; + size = if tmp > size { tmp } else { size }; + tmp = 0_usize; + )* + size + }} + } + }, + syn::Data::Union(syn::DataUnion { union_token, .. }) => { + let msg = "Cannot derive `PalletError` for union; please implement it directly"; + return syn::Error::new(union_token.span, msg).into_compile_error().into() + }, + }; + + quote::quote!( + const _: () = { + impl #impl_generics #frame_support::traits::PalletError + for #name #ty_generics #where_clause + { + const MAX_ENCODED_SIZE: usize = #max_encoded_size; + } + }; + ) + .into() } fn generate_field_types( - field: &syn::Field, - scrate: &syn::Path, + field: &syn::Field, + scrate: &syn::Path, ) -> syn::Result> { - let attrs = &field.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut res = None; - - attr.parse_nested_meta(|meta| { - if meta.path.is_ident("skip") { - res = Some(None); - } else if meta.path.is_ident("compact") { - let field_ty = &field.ty; - res = Some(Some( - quote::quote!(#scrate::__private::codec::Compact<#field_ty>), - )); - } else if meta.path.is_ident("compact") { - res = Some(Some(meta.value()?.parse()?)); - } - - Ok(()) - })?; - - if let Some(v) = res { - return Ok(v); - } - } - } - - Ok(Some(field.ty.to_token_stream())) + let attrs = &field.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut res = None; + + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("skip") { + res = Some(None); + } else if meta.path.is_ident("compact") { + let field_ty = &field.ty; + res = Some(Some(quote::quote!(#scrate::__private::codec::Compact<#field_ty>))); + } else if meta.path.is_ident("compact") { + res = Some(Some(meta.value()?.parse()?)); + } + + Ok(()) + })?; + + if let Some(v) = res { + return Ok(v) + } + } + } + + Ok(Some(field.ty.to_token_stream())) } fn generate_variant_field_types( - variant: &syn::Variant, - scrate: &syn::Path, + variant: &syn::Variant, + scrate: &syn::Path, ) -> syn::Result>> { - let attrs = &variant.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut skip = false; - - // We ignore the error intentionally as this isn't `codec(skip)` when - // `parse_nested_meta` fails. - let _ = attr.parse_nested_meta(|meta| { - skip = meta.path.is_ident("skip"); - Ok(()) - }); - - if skip { - return Ok(None); - } - } - } - - match &variant.fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) - | syn::Fields::Unnamed(syn::FieldsUnnamed { - unnamed: fields, .. - }) => { - let field_tys = fields - .iter() - .map(|field| generate_field_types(field, scrate)) - .collect::>>()?; - Ok(Some(field_tys.into_iter().flatten().collect())) - } - syn::Fields::Unit => Ok(None), - } + let attrs = &variant.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut skip = false; + + // We ignore the error intentionally as this isn't `codec(skip)` when + // `parse_nested_meta` fails. + let _ = attr.parse_nested_meta(|meta| { + skip = meta.path.is_ident("skip"); + Ok(()) + }); + + if skip { + return Ok(None) + } + } + } + + match &variant.fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | + syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { + let field_tys = fields + .iter() + .map(|field| generate_field_types(field, scrate)) + .collect::>>()?; + Ok(Some(field_tys.into_iter().flatten().collect())) + }, + syn::Fields::Unit => Ok(None), + } } diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index 38d40964b..93c88fce9 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -17,20 +17,20 @@ use super::parse::runtime_types::RuntimeType; use crate::{ - construct_runtime::{ - check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, - decl_static_assertions, expand, - }, - runtime::{ - parse::{ - AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, - }, - Def, - }, + construct_runtime::{ + check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, + decl_static_assertions, expand, + }, + runtime::{ + parse::{ + AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, + }, + Def, + }, }; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use proc_macro2::TokenStream as TokenStream2; use quote::quote; @@ -41,296 +41,280 @@ use syn::{Ident, Result}; const SYSTEM_PALLET_NAME: &str = "System"; pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { - let input = def.input; - - let (check_pallet_number_res, res) = match def.pallets { - AllPalletsDeclaration::Implicit(ref decl) => ( - check_pallet_number(input.clone(), decl.pallet_count), - construct_runtime_implicit_to_explicit(input, decl.clone(), legacy_ordering), - ), - AllPalletsDeclaration::Explicit(ref decl) => ( - check_pallet_number(input, decl.pallets.len()), - construct_runtime_final_expansion( - def.runtime_struct.ident.clone(), - decl.clone(), - def.runtime_types.clone(), - legacy_ordering, - ), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - expander::Expander::new("construct_runtime") - .dry(std::env::var("FRAME_EXPAND").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed") + let input = def.input; + + let (check_pallet_number_res, res) = match def.pallets { + AllPalletsDeclaration::Implicit(ref decl) => ( + check_pallet_number(input.clone(), decl.pallet_count), + construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), + ), + AllPalletsDeclaration::Explicit(ref decl) => ( + check_pallet_number(input, decl.pallets.len()), + construct_runtime_final_expansion( + def.runtime_struct.ident.clone(), + decl.clone(), + def.runtime_types.clone(), + legacy_ordering, + ), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("FRAME_EXPAND").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitAllPalletsDeclaration, - legacy_ordering: bool, + input: TokenStream2, + definition: ImplicitAllPalletsDeclaration, + legacy_ordering: bool, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let attr = if legacy_ordering { - quote!((legacy_ordering)) - } else { - quote!() - }; - let mut expansion = quote::quote!( - #[frame_support::runtime #attr] - #input - ); - for pallet in definition.pallet_decls.iter() { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet - .instance - .as_ref() - .map(|instance| quote::quote!(<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts_v2 }] - frame_support = [{ #frame_support }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name = #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let attr = if legacy_ordering { quote!((legacy_ordering)) } else { quote!() }; + let mut expansion = quote::quote!( + #[frame_support::runtime #attr] + #input + ); + for pallet in definition.pallet_decls.iter() { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts_v2 }] + frame_support = [{ #frame_support }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name = #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } fn construct_runtime_final_expansion( - name: Ident, - definition: ExplicitAllPalletsDeclaration, - runtime_types: Vec, - legacy_ordering: bool, + name: Ident, + definition: ExplicitAllPalletsDeclaration, + runtime_types: Vec, + legacy_ordering: bool, ) -> Result { - let ExplicitAllPalletsDeclaration { - mut pallets, - name: pallets_name, - } = definition; - - if !legacy_ordering { - // Ensure that order of hooks is based on the pallet index - pallets.sort_by_key(|p| p.index); - } - - let system_pallet = pallets - .iter() - .find(|decl| decl.name == SYSTEM_PALLET_NAME) - .ok_or_else(|| { - syn::Error::new( - pallets_name.span(), - "`System` pallet declaration is missing. \ + let ExplicitAllPalletsDeclaration { mut pallets, name: pallets_name } = definition; + + if !legacy_ordering { + // Ensure that order of hooks is based on the pallet index + pallets.sort_by_key(|p| p.index); + } + + let system_pallet = + pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { + syn::Error::new( + pallets_name.span(), + "`System` pallet declaration is missing. \ Please add this line: `pub type System = frame_system;`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )); - } - - let features = pallets - .iter() - .filter(|&decl| (!decl.cfg_pattern.is_empty())) - .flat_map(|decl| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let mut dispatch = None; - let mut outer_event = None; - let mut outer_error = None; - let mut outer_origin = None; - let mut freeze_reason = None; - let mut hold_reason = None; - let mut slash_reason = None; - let mut lock_id = None; - let mut task = None; - - for runtime_type in runtime_types.iter() { - match runtime_type { - RuntimeType::RuntimeCall(_) => { - dispatch = Some(expand::expand_outer_dispatch( - &name, - system_pallet, - &pallets, - &scrate, - )); - } - RuntimeType::RuntimeEvent(_) => { - outer_event = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Event, - )?); - } - RuntimeType::RuntimeError(_) => { - outer_error = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Error, - )?); - } - RuntimeType::RuntimeOrigin(_) => { - outer_origin = Some(expand::expand_outer_origin( - &name, - system_pallet, - &pallets, - &scrate, - )?); - } - RuntimeType::RuntimeFreezeReason(_) => { - freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); - } - RuntimeType::RuntimeHoldReason(_) => { - hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); - } - RuntimeType::RuntimeSlashReason(_) => { - slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); - } - RuntimeType::RuntimeLockId(_) => { - lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); - } - RuntimeType::RuntimeTask(_) => { - task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); - } - } - } - - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let res = quote!( - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `runtime` may be used without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` - // is used. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} - - #outer_event - - #outer_error - - #outer_origin - - #all_pallets - - #pallet_to_index - - #dispatch - - #task - - #metadata - - #outer_config - - #inherent - - #validate_unsigned - - #freeze_reason - - #hold_reason - - #lock_id - - #slash_reason - - #integrity_test - - #static_assertions - ); + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )) + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let mut dispatch = None; + let mut outer_event = None; + let mut outer_error = None; + let mut outer_origin = None; + let mut freeze_reason = None; + let mut hold_reason = None; + let mut slash_reason = None; + let mut lock_id = None; + let mut task = None; + + for runtime_type in runtime_types.iter() { + match runtime_type { + RuntimeType::RuntimeCall(_) => { + dispatch = + Some(expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate)); + }, + RuntimeType::RuntimeEvent(_) => { + outer_event = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Event, + )?); + }, + RuntimeType::RuntimeError(_) => { + outer_error = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Error, + )?); + }, + RuntimeType::RuntimeOrigin(_) => { + outer_origin = + Some(expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?); + }, + RuntimeType::RuntimeFreezeReason(_) => { + freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeHoldReason(_) => { + hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeSlashReason(_) => { + slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); + }, + RuntimeType::RuntimeLockId(_) => { + lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); + }, + RuntimeType::RuntimeTask(_) => { + task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); + }, + } + } + + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let res = quote!( + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `runtime` may be used without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` + // is used. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} + + #outer_event + + #outer_error + + #outer_origin + + #all_pallets + + #pallet_to_index + + #dispatch + + #task + + #metadata + + #outer_config + + #inherent + + #validate_unsigned + + #freeze_reason + + #hold_reason + + #lock_id + + #slash_reason + + #integrity_test + + #static_assertions + ); - Ok(res) + Ok(res) } diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs index 888a15e11..aaae579eb 100644 --- a/support/procedural-fork/src/runtime/mod.rs +++ b/support/procedural-fork/src/runtime/mod.rs @@ -210,27 +210,27 @@ mod expand; mod parse; mod keyword { - syn::custom_keyword!(legacy_ordering); + syn::custom_keyword!(legacy_ordering); } pub fn runtime(attr: TokenStream, tokens: TokenStream) -> TokenStream { - let mut legacy_ordering = false; - if !attr.is_empty() { - if syn::parse::(attr.clone()).is_ok() { - legacy_ordering = true; - } else { - let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ + let mut legacy_ordering = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + legacy_ordering = true; + } else { + let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::runtime]` or `#[runtime]`, or must specify the \ `legacy_ordering` attribute, such as `#[frame_support::runtime(legacy_ordering)]` or \ #[runtime(legacy_ordering)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into(); - } - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into() + } + } - let item = syn::parse_macro_input!(tokens as syn::ItemMod); - match parse::Def::try_from(item) { - Ok(def) => expand::expand(def, legacy_ordering).into(), - Err(e) => e.to_compile_error().into(), - } + let item = syn::parse_macro_input!(tokens as syn::ItemMod); + match parse::Def::try_from(item) { + Ok(def) => expand::expand(def, legacy_ordering).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/runtime/parse/helper.rs b/support/procedural-fork/src/runtime/parse/helper.rs index 17e362410..f05395f9b 100644 --- a/support/procedural-fork/src/runtime/parse/helper.rs +++ b/support/procedural-fork/src/runtime/parse/helper.rs @@ -19,26 +19,19 @@ use crate::pallet::parse::helper::MutItemAttrs; use quote::ToTokens; pub(crate) fn take_first_item_runtime_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { - attrs - } else { - return Ok(None); - }; + let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; - if let Some(index) = attrs.iter().position(|attr| { - attr.path() - .segments - .first() - .map_or(false, |segment| segment.ident == "runtime") - }) { - let runtime_attr = attrs.remove(index); - Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) - } else { - Ok(None) - } + if let Some(index) = attrs.iter().position(|attr| { + attr.path().segments.first().map_or(false, |segment| segment.ident == "runtime") + }) { + let runtime_attr = attrs.remove(index); + Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) + } else { + Ok(None) + } } diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs index c2b33fd76..893cb4726 100644 --- a/support/procedural-fork/src/runtime/parse/mod.rs +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -32,229 +32,220 @@ use frame_support_procedural_tools::syn_ext as ext; use runtime_types::RuntimeType; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(runtime); - custom_keyword!(derive); - custom_keyword!(pallet_index); - custom_keyword!(disable_call); - custom_keyword!(disable_unsigned); + custom_keyword!(runtime); + custom_keyword!(derive); + custom_keyword!(pallet_index); + custom_keyword!(disable_call); + custom_keyword!(disable_unsigned); } enum RuntimeAttr { - Runtime(proc_macro2::Span), - Derive(proc_macro2::Span, Vec), - PalletIndex(proc_macro2::Span, u8), - DisableCall(proc_macro2::Span), - DisableUnsigned(proc_macro2::Span), + Runtime(proc_macro2::Span), + Derive(proc_macro2::Span, Vec), + PalletIndex(proc_macro2::Span, u8), + DisableCall(proc_macro2::Span), + DisableUnsigned(proc_macro2::Span), } impl RuntimeAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Runtime(span) => *span, - Self::Derive(span, _) => *span, - Self::PalletIndex(span, _) => *span, - Self::DisableCall(span) => *span, - Self::DisableUnsigned(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Runtime(span) => *span, + Self::Derive(span, _) => *span, + Self::PalletIndex(span, _) => *span, + Self::DisableCall(span) => *span, + Self::DisableUnsigned(span) => *span, + } + } } impl syn::parse::Parse for RuntimeAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::runtime) { - Ok(RuntimeAttr::Runtime( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::derive) { - let _ = content.parse::(); - let derive_content; - syn::parenthesized!(derive_content in content); - let runtime_types = - derive_content.parse::>()?; - let runtime_types = runtime_types.inner.into_iter().collect(); - Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) - } else if lookahead.peek(keyword::pallet_index) { - let _ = content.parse::(); - let pallet_index_content; - syn::parenthesized!(pallet_index_content in content); - let pallet_index = pallet_index_content.parse::()?; - if !pallet_index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(pallet_index.span(), msg)); - } - Ok(RuntimeAttr::PalletIndex( - pallet_index.span(), - pallet_index.base10_parse()?, - )) - } else if lookahead.peek(keyword::disable_call) { - Ok(RuntimeAttr::DisableCall( - content.parse::()?.span(), - )) - } else if lookahead.peek(keyword::disable_unsigned) { - Ok(RuntimeAttr::DisableUnsigned( - content.parse::()?.span(), - )) - } else { - Err(lookahead.error()) - } - } + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::runtime) { + Ok(RuntimeAttr::Runtime(content.parse::()?.span())) + } else if lookahead.peek(keyword::derive) { + let _ = content.parse::(); + let derive_content; + syn::parenthesized!(derive_content in content); + let runtime_types = + derive_content.parse::>()?; + let runtime_types = runtime_types.inner.into_iter().collect(); + Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) + } else if lookahead.peek(keyword::pallet_index) { + let _ = content.parse::(); + let pallet_index_content; + syn::parenthesized!(pallet_index_content in content); + let pallet_index = pallet_index_content.parse::()?; + if !pallet_index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(pallet_index.span(), msg)) + } + Ok(RuntimeAttr::PalletIndex(pallet_index.span(), pallet_index.base10_parse()?)) + } else if lookahead.peek(keyword::disable_call) { + Ok(RuntimeAttr::DisableCall(content.parse::()?.span())) + } else if lookahead.peek(keyword::disable_unsigned) { + Ok(RuntimeAttr::DisableUnsigned(content.parse::()?.span())) + } else { + Err(lookahead.error()) + } + } } #[derive(Debug, Clone)] pub enum AllPalletsDeclaration { - Implicit(ImplicitAllPalletsDeclaration), - Explicit(ExplicitAllPalletsDeclaration), + Implicit(ImplicitAllPalletsDeclaration), + Explicit(ExplicitAllPalletsDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug, Clone)] pub struct ImplicitAllPalletsDeclaration { - pub name: Ident, - pub pallet_decls: Vec, - pub pallet_count: usize, + pub name: Ident, + pub pallet_decls: Vec, + pub pallet_count: usize, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug, Clone)] pub struct ExplicitAllPalletsDeclaration { - pub name: Ident, - pub pallets: Vec, + pub name: Ident, + pub pallets: Vec, } pub struct Def { - pub input: TokenStream2, - pub item: syn::ItemMod, - pub runtime_struct: runtime_struct::RuntimeStructDef, - pub pallets: AllPalletsDeclaration, - pub runtime_types: Vec, + pub input: TokenStream2, + pub item: syn::ItemMod, + pub runtime_struct: runtime_struct::RuntimeStructDef, + pub pallets: AllPalletsDeclaration, + pub runtime_types: Vec, } impl Def { - pub fn try_from(mut item: syn::ItemMod) -> syn::Result { - let input: TokenStream2 = item.to_token_stream(); - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid runtime definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; + pub fn try_from(mut item: syn::ItemMod) -> syn::Result { + let input: TokenStream2 = item.to_token_stream().into(); + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid runtime definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; - let mut runtime_struct = None; - let mut runtime_types = None; + let mut runtime_struct = None; + let mut runtime_types = None; - let mut indices = HashMap::new(); - let mut names = HashMap::new(); + let mut indices = HashMap::new(); + let mut names = HashMap::new(); - let mut pallet_decls = vec![]; - let mut pallets = vec![]; + let mut pallet_decls = vec![]; + let mut pallets = vec![]; - for item in items.iter_mut() { - let mut pallet_item = None; - let mut pallet_index = 0; + for item in items.iter_mut() { + let mut pallet_item = None; + let mut pallet_index = 0; - let mut disable_call = false; - let mut disable_unsigned = false; + let mut disable_call = false; + let mut disable_unsigned = false; - while let Some(runtime_attr) = - helper::take_first_item_runtime_attr::(item)? - { - match runtime_attr { - RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { - let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; - runtime_struct = Some(p); - } - RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { - runtime_types = Some(types); - } - RuntimeAttr::PalletIndex(span, index) => { - pallet_index = index; - pallet_item = if let syn::Item::Type(item) = item { - Some(item.clone()) - } else { - let msg = "Invalid runtime::pallet_index, expected type definition"; - return Err(syn::Error::new(span, msg)); - }; - } - RuntimeAttr::DisableCall(_) => disable_call = true, - RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, - attr => { - let msg = "Invalid duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)); - } - } - } + while let Some(runtime_attr) = + helper::take_first_item_runtime_attr::(item)? + { + match runtime_attr { + RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { + let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; + runtime_struct = Some(p); + }, + RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { + runtime_types = Some(types); + }, + RuntimeAttr::PalletIndex(span, index) => { + pallet_index = index; + pallet_item = if let syn::Item::Type(item) = item { + Some(item.clone()) + } else { + let msg = "Invalid runtime::pallet_index, expected type definition"; + return Err(syn::Error::new(span, msg)) + }; + }, + RuntimeAttr::DisableCall(_) => disable_call = true, + RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, + attr => { + let msg = "Invalid duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)) + }, + } + } - if let Some(pallet_item) = pallet_item { - match *pallet_item.ty.clone() { - syn::Type::Path(ref path) => { - let pallet_decl = - PalletDeclaration::try_from(item.span(), &pallet_item, path)?; + if let Some(pallet_item) = pallet_item { + match *pallet_item.ty.clone() { + syn::Type::Path(ref path) => { + let pallet_decl = + PalletDeclaration::try_from(item.span(), &pallet_item, path)?; - if let Some(used_pallet) = - names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) - { - let msg = "Two pallets with the same name!"; + if let Some(used_pallet) = + names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) + { + let msg = "Two pallets with the same name!"; - let mut err = syn::Error::new(used_pallet, msg); - err.combine(syn::Error::new(pallet_decl.name.span(), msg)); - return Err(err); - } + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); + return Err(err) + } - pallet_decls.push(pallet_decl); - } - syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { - let pallet = Pallet::try_from( - item.span(), - &pallet_item, - pallet_index, - disable_call, - disable_unsigned, - &bounds, - )?; + pallet_decls.push(pallet_decl); + }, + syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { + let pallet = Pallet::try_from( + item.span(), + &pallet_item, + pallet_index, + disable_call, + disable_unsigned, + &bounds, + )?; - if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) - { - let msg = format!( + if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) + { + let msg = format!( "Pallet indices are conflicting: Both pallets {} and {} are at index {}", used_pallet, pallet.name, pallet.index, ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err); - } + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err) + } - pallets.push(pallet); - } - _ => continue, - } - } - } + pallets.push(pallet); + }, + _ => continue, + } + } + } - let name = item.ident.clone(); - let decl_count = pallet_decls.len(); - let pallets = if decl_count > 0 { - AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { - name, - pallet_decls, - pallet_count: decl_count.saturating_add(pallets.len()), - }) - } else { - AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) - }; + let name = item.ident.clone(); + let decl_count = pallet_decls.len(); + let pallets = if decl_count > 0 { + AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { + name, + pallet_decls, + pallet_count: decl_count.saturating_add(pallets.len()), + }) + } else { + AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) + }; - let def = Def { + let def = Def { input, item, runtime_struct: runtime_struct.ok_or_else(|| { @@ -270,6 +261,6 @@ impl Def { })?, }; - Ok(def) - } + Ok(def) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index 976aba764..d2f1857fb 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -20,85 +20,80 @@ use quote::ToTokens; use syn::{punctuated::Punctuated, spanned::Spanned, token, Error, Ident, PathArguments}; impl Pallet { - pub fn try_from( - attr_span: proc_macro2::Span, - item: &syn::ItemType, - pallet_index: u8, - disable_call: bool, - disable_unsigned: bool, - bounds: &Punctuated, - ) -> syn::Result { - let name = item.ident.clone(); + pub fn try_from( + attr_span: proc_macro2::Span, + item: &syn::ItemType, + pallet_index: u8, + disable_call: bool, + disable_unsigned: bool, + bounds: &Punctuated, + ) -> syn::Result { + let name = item.ident.clone(); - let mut pallet_path = None; - let mut pallet_parts = vec![]; + let mut pallet_path = None; + let mut pallet_parts = vec![]; - for (index, bound) in bounds.into_iter().enumerate() { - if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { - if index == 0 { - pallet_path = Some(PalletPath { - inner: path.clone(), - }); - } else { - let pallet_part = syn::parse2::(bound.into_token_stream())?; - pallet_parts.push(pallet_part); - } - } else { - return Err(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - )); - }; - } + for (index, bound) in bounds.into_iter().enumerate() { + if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { + if index == 0 { + pallet_path = Some(PalletPath { inner: path.clone() }); + } else { + let pallet_part = syn::parse2::(bound.into_token_stream())?; + pallet_parts.push(pallet_part); + } + } else { + return Err(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + )) + }; + } - let mut path = pallet_path.ok_or(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - ))?; + let mut path = pallet_path.ok_or(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + ))?; - let mut instance = None; - if let Some(segment) = path - .inner - .segments - .iter_mut() - .find(|seg| !seg.arguments.is_empty()) - { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = Some(Ident::new( - &arg_path.to_token_stream().to_string(), - arg_path.span(), - )); - segment.arguments = PathArguments::None; - } - } - } + let mut instance = None; + if let Some(segment) = path.inner.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = + Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); + segment.arguments = PathArguments::None; + } + } + } - pallet_parts.retain(|part| { - if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { - false - } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = - (disable_unsigned, &part.keyword) - { - false - } else { - true - } - }); + pallet_parts = pallet_parts + .into_iter() + .filter(|part| { + if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { + false + } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = + (disable_unsigned, &part.keyword) + { + false + } else { + true + } + }) + .collect(); - let cfg_pattern = vec![]; + let cfg_pattern = vec![]; - Ok(Pallet { - is_expanded: true, - name, - index: pallet_index, - path, - instance, - cfg_pattern, - pallet_parts, - }) - } + Ok(Pallet { + is_expanded: true, + name, + index: pallet_index, + path, + instance, + cfg_pattern, + pallet_parts, + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet_decl.rs b/support/procedural-fork/src/runtime/parse/pallet_decl.rs index bb1246606..437a163cf 100644 --- a/support/procedural-fork/src/runtime/parse/pallet_decl.rs +++ b/support/procedural-fork/src/runtime/parse/pallet_decl.rs @@ -21,51 +21,40 @@ use syn::{spanned::Spanned, Attribute, Ident, PathArguments}; /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: syn::Path, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: syn::Path, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, } impl PalletDeclaration { - pub fn try_from( - _attr_span: proc_macro2::Span, - item: &syn::ItemType, - path: &syn::TypePath, - ) -> syn::Result { - let name = item.ident.clone(); - - let mut path = path.path.clone(); - - let mut instance = None; - if let Some(segment) = path - .segments - .iter_mut() - .find(|seg| !seg.arguments.is_empty()) - { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = Some(Ident::new( - &arg_path.to_token_stream().to_string(), - arg_path.span(), - )); - segment.arguments = PathArguments::None; - } - } - } - - Ok(Self { - name, - path, - instance, - attrs: item.attrs.clone(), - }) - } + pub fn try_from( + _attr_span: proc_macro2::Span, + item: &syn::ItemType, + path: &syn::TypePath, + ) -> syn::Result { + let name = item.ident.clone(); + + let mut path = path.path.clone(); + + let mut instance = None; + if let Some(segment) = path.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = + Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); + segment.arguments = PathArguments::None; + } + } + } + + Ok(Self { name, path, instance, attrs: item.attrs.clone() }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_struct.rs b/support/procedural-fork/src/runtime/parse/runtime_struct.rs index 7ddbdcfeb..8fa746ee8 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_struct.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_struct.rs @@ -17,22 +17,19 @@ use syn::spanned::Spanned; pub struct RuntimeStructDef { - pub ident: syn::Ident, - pub attr_span: proc_macro2::Span, + pub ident: syn::Ident, + pub attr_span: proc_macro2::Span, } impl RuntimeStructDef { - pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid runtime::runtime, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)); - }; + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid runtime::runtime, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)) + }; - Ok(Self { - ident: item.ident.clone(), - attr_span, - }) - } + Ok(Self { ident: item.ident.clone(), attr_span }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_types.rs b/support/procedural-fork/src/runtime/parse/runtime_types.rs index 4d8c8358c..a4480e2a1 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_types.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_types.rs @@ -16,61 +16,61 @@ // limitations under the License. use syn::{ - parse::{Parse, ParseStream}, - Result, + parse::{Parse, ParseStream}, + Result, }; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(RuntimeCall); - custom_keyword!(RuntimeEvent); - custom_keyword!(RuntimeError); - custom_keyword!(RuntimeOrigin); - custom_keyword!(RuntimeFreezeReason); - custom_keyword!(RuntimeHoldReason); - custom_keyword!(RuntimeSlashReason); - custom_keyword!(RuntimeLockId); - custom_keyword!(RuntimeTask); + custom_keyword!(RuntimeCall); + custom_keyword!(RuntimeEvent); + custom_keyword!(RuntimeError); + custom_keyword!(RuntimeOrigin); + custom_keyword!(RuntimeFreezeReason); + custom_keyword!(RuntimeHoldReason); + custom_keyword!(RuntimeSlashReason); + custom_keyword!(RuntimeLockId); + custom_keyword!(RuntimeTask); } #[derive(Debug, Clone, PartialEq)] pub enum RuntimeType { - RuntimeCall(keyword::RuntimeCall), - RuntimeEvent(keyword::RuntimeEvent), - RuntimeError(keyword::RuntimeError), - RuntimeOrigin(keyword::RuntimeOrigin), - RuntimeFreezeReason(keyword::RuntimeFreezeReason), - RuntimeHoldReason(keyword::RuntimeHoldReason), - RuntimeSlashReason(keyword::RuntimeSlashReason), - RuntimeLockId(keyword::RuntimeLockId), - RuntimeTask(keyword::RuntimeTask), + RuntimeCall(keyword::RuntimeCall), + RuntimeEvent(keyword::RuntimeEvent), + RuntimeError(keyword::RuntimeError), + RuntimeOrigin(keyword::RuntimeOrigin), + RuntimeFreezeReason(keyword::RuntimeFreezeReason), + RuntimeHoldReason(keyword::RuntimeHoldReason), + RuntimeSlashReason(keyword::RuntimeSlashReason), + RuntimeLockId(keyword::RuntimeLockId), + RuntimeTask(keyword::RuntimeTask), } impl Parse for RuntimeType { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); - if lookahead.peek(keyword::RuntimeCall) { - Ok(Self::RuntimeCall(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeEvent) { - Ok(Self::RuntimeEvent(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeError) { - Ok(Self::RuntimeError(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeOrigin) { - Ok(Self::RuntimeOrigin(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeFreezeReason) { - Ok(Self::RuntimeFreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeHoldReason) { - Ok(Self::RuntimeHoldReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeSlashReason) { - Ok(Self::RuntimeSlashReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeLockId) { - Ok(Self::RuntimeLockId(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeTask) { - Ok(Self::RuntimeTask(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + if lookahead.peek(keyword::RuntimeCall) { + Ok(Self::RuntimeCall(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeEvent) { + Ok(Self::RuntimeEvent(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeError) { + Ok(Self::RuntimeError(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeOrigin) { + Ok(Self::RuntimeOrigin(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeFreezeReason) { + Ok(Self::RuntimeFreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeHoldReason) { + Ok(Self::RuntimeHoldReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeSlashReason) { + Ok(Self::RuntimeSlashReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeLockId) { + Ok(Self::RuntimeLockId(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeTask) { + Ok(Self::RuntimeTask(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } diff --git a/support/procedural-fork/src/storage_alias.rs b/support/procedural-fork/src/storage_alias.rs index 7099239f9..06f62768f 100644 --- a/support/procedural-fork/src/storage_alias.rs +++ b/support/procedural-fork/src/storage_alias.rs @@ -22,688 +22,655 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens}; use syn::{ - parenthesized, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, - visit::Visit, - Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, + parenthesized, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, + visit::Visit, + Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, }; /// Extension trait for [`Type`]. trait TypeExt { - fn get_ident(&self) -> Option<&Ident>; - fn contains_ident(&self, ident: &Ident) -> bool; + fn get_ident(&self) -> Option<&Ident>; + fn contains_ident(&self, ident: &Ident) -> bool; } impl TypeExt for Type { - fn get_ident(&self) -> Option<&Ident> { - match self { - Type::Path(p) => match &p.qself { - Some(qself) => qself.ty.get_ident(), - None => p.path.get_ident(), - }, - _ => None, - } - } - - fn contains_ident(&self, ident: &Ident) -> bool { - struct ContainsIdent<'a> { - ident: &'a Ident, - found: bool, - } - impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { - fn visit_ident(&mut self, i: &'ast Ident) { - if i == self.ident { - self.found = true; - } - } - } - - let mut visitor = ContainsIdent { - ident, - found: false, - }; - syn::visit::visit_type(&mut visitor, self); - visitor.found - } + fn get_ident(&self) -> Option<&Ident> { + match self { + Type::Path(p) => match &p.qself { + Some(qself) => qself.ty.get_ident(), + None => p.path.get_ident(), + }, + _ => None, + } + } + + fn contains_ident(&self, ident: &Ident) -> bool { + struct ContainsIdent<'a> { + ident: &'a Ident, + found: bool, + } + impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { + fn visit_ident(&mut self, i: &'ast Ident) { + if i == self.ident { + self.found = true; + } + } + } + + let mut visitor = ContainsIdent { ident, found: false }; + syn::visit::visit_type(&mut visitor, self); + visitor.found + } } /// Represents generics which only support [`TypeParam`] separated by commas. struct SimpleGenerics { - lt_token: Token![<], - params: Punctuated, - gt_token: Token![>], + lt_token: Token![<], + params: Punctuated, + gt_token: Token![>], } impl SimpleGenerics { - /// Returns the generics for types declarations etc. - fn type_generics(&self) -> impl Iterator { - self.params.iter().map(|p| &p.ident) - } - - /// Returns the generics for the `impl` block. - fn impl_generics(&self) -> impl Iterator { - self.params.iter() - } + /// Returns the generics for types declarations etc. + fn type_generics(&self) -> impl Iterator { + self.params.iter().map(|p| &p.ident) + } + + /// Returns the generics for the `impl` block. + fn impl_generics(&self) -> impl Iterator { + self.params.iter() + } } impl Parse for SimpleGenerics { - fn parse(input: ParseStream<'_>) -> Result { - Ok(Self { - lt_token: input.parse()?, - params: Punctuated::parse_separated_nonempty(input)?, - gt_token: input.parse()?, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + Ok(Self { + lt_token: input.parse()?, + params: Punctuated::parse_separated_nonempty(input)?, + gt_token: input.parse()?, + }) + } } impl ToTokens for SimpleGenerics { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.lt_token.to_tokens(tokens); - self.params.to_tokens(tokens); - self.gt_token.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lt_token.to_tokens(tokens); + self.params.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } } mod storage_types { - syn::custom_keyword!(StorageValue); - syn::custom_keyword!(StorageMap); - syn::custom_keyword!(CountedStorageMap); - syn::custom_keyword!(StorageDoubleMap); - syn::custom_keyword!(StorageNMap); + syn::custom_keyword!(StorageValue); + syn::custom_keyword!(StorageMap); + syn::custom_keyword!(CountedStorageMap); + syn::custom_keyword!(StorageDoubleMap); + syn::custom_keyword!(StorageNMap); } /// The types of prefixes the storage alias macro supports. mod prefix_types { - // Use the verbatim/unmodified input name as the prefix. - syn::custom_keyword!(verbatim); - // The input type is a pallet and its pallet name should be used as the prefix. - syn::custom_keyword!(pallet_name); - // The input type implements `Get<'static str>` and this `str` should be used as the prefix. - syn::custom_keyword!(dynamic); + // Use the verbatim/unmodified input name as the prefix. + syn::custom_keyword!(verbatim); + // The input type is a pallet and its pallet name should be used as the prefix. + syn::custom_keyword!(pallet_name); + // The input type implements `Get<'static str>` and this `str` should be used as the prefix. + syn::custom_keyword!(dynamic); } /// The supported storage types enum StorageType { - Value { - _kw: storage_types::StorageValue, - _lt_token: Token![<], - prefix: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - Map { - _kw: storage_types::StorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - CountedMap { - _kw: storage_types::CountedStorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - DoubleMap { - _kw: storage_types::StorageDoubleMap, - _lt_token: Token![<], - prefix: Type, - _hasher1_comma: Token![,], - hasher1_ty: Type, - _key1_comma: Token![,], - key1_ty: Type, - _hasher2_comma: Token![,], - hasher2_ty: Type, - _key2_comma: Token![,], - key2_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - NMap { - _kw: storage_types::StorageNMap, - _lt_token: Token![<], - prefix: Type, - _paren_comma: Token![,], - _paren_token: token::Paren, - key_types: Punctuated, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, + Value { + _kw: storage_types::StorageValue, + _lt_token: Token![<], + prefix: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + Map { + _kw: storage_types::StorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + CountedMap { + _kw: storage_types::CountedStorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + DoubleMap { + _kw: storage_types::StorageDoubleMap, + _lt_token: Token![<], + prefix: Type, + _hasher1_comma: Token![,], + hasher1_ty: Type, + _key1_comma: Token![,], + key1_ty: Type, + _hasher2_comma: Token![,], + hasher2_ty: Type, + _key2_comma: Token![,], + key2_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + NMap { + _kw: storage_types::StorageNMap, + _lt_token: Token![<], + prefix: Type, + _paren_comma: Token![,], + _paren_token: token::Paren, + key_types: Punctuated, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, } impl StorageType { - /// Generate the actual type declaration. - fn generate_type_declaration( - &self, - crate_: &syn::Path, - storage_instance: &StorageInstance, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - visibility: &Visibility, - attributes: &[Attribute], - ) -> TokenStream { - let storage_instance_generics = &storage_instance.generics; - let storage_instance = &storage_instance.name; - let attributes = attributes.iter(); - let storage_generics = storage_generics.map(|g| { - let generics = g.type_generics(); - - quote!( < #( #generics ),* > ) - }); - - match self { - Self::Value { - value_ty, - query_type, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< - #storage_instance #storage_instance_generics, - #value_ty - #query_type - >; - } - } - Self::CountedMap { - value_ty, - query_type, - hasher_ty, - key_ty, - .. - } - | Self::Map { - value_ty, - query_type, - hasher_ty, - key_ty, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let map_type = Ident::new( - match self { - Self::Map { .. } => "StorageMap", - _ => "CountedStorageMap", - }, - Span::call_site(), - ); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< - #storage_instance #storage_instance_generics, - #hasher_ty, - #key_ty, - #value_ty - #query_type - >; - } - } - Self::DoubleMap { - value_ty, - query_type, - hasher1_ty, - key1_ty, - hasher2_ty, - key2_ty, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< - #storage_instance #storage_instance_generics, - #hasher1_ty, - #key1_ty, - #hasher2_ty, - #key2_ty, - #value_ty - #query_type - >; - } - } - Self::NMap { - value_ty, - query_type, - key_types, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let key_types = key_types.iter(); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< - #storage_instance #storage_instance_generics, - ( #( #key_types ),* ), - #value_ty - #query_type - >; - } - } - } - } - - /// The prefix for this storage type. - fn prefix(&self) -> &Type { - match self { - Self::Value { prefix, .. } - | Self::Map { prefix, .. } - | Self::CountedMap { prefix, .. } - | Self::NMap { prefix, .. } - | Self::DoubleMap { prefix, .. } => prefix, - } - } + /// Generate the actual type declaration. + fn generate_type_declaration( + &self, + crate_: &syn::Path, + storage_instance: &StorageInstance, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + visibility: &Visibility, + attributes: &[Attribute], + ) -> TokenStream { + let storage_instance_generics = &storage_instance.generics; + let storage_instance = &storage_instance.name; + let attributes = attributes.iter(); + let storage_generics = storage_generics.map(|g| { + let generics = g.type_generics(); + + quote!( < #( #generics ),* > ) + }); + + match self { + Self::Value { value_ty, query_type, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< + #storage_instance #storage_instance_generics, + #value_ty + #query_type + >; + } + }, + Self::CountedMap { value_ty, query_type, hasher_ty, key_ty, .. } | + Self::Map { value_ty, query_type, hasher_ty, key_ty, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let map_type = Ident::new( + match self { + Self::Map { .. } => "StorageMap", + _ => "CountedStorageMap", + }, + Span::call_site(), + ); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< + #storage_instance #storage_instance_generics, + #hasher_ty, + #key_ty, + #value_ty + #query_type + >; + } + }, + Self::DoubleMap { + value_ty, + query_type, + hasher1_ty, + key1_ty, + hasher2_ty, + key2_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< + #storage_instance #storage_instance_generics, + #hasher1_ty, + #key1_ty, + #hasher2_ty, + #key2_ty, + #value_ty + #query_type + >; + } + }, + Self::NMap { value_ty, query_type, key_types, .. } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let key_types = key_types.iter(); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< + #storage_instance #storage_instance_generics, + ( #( #key_types ),* ), + #value_ty + #query_type + >; + } + }, + } + } + + /// The prefix for this storage type. + fn prefix(&self) -> &Type { + match self { + Self::Value { prefix, .. } | + Self::Map { prefix, .. } | + Self::CountedMap { prefix, .. } | + Self::NMap { prefix, .. } | + Self::DoubleMap { prefix, .. } => prefix, + } + } } impl Parse for StorageType { - fn parse(input: ParseStream<'_>) -> Result { - let lookahead = input.lookahead1(); - - let parse_query_type = |input: ParseStream<'_>| -> Result> { - if input.peek(Token![,]) && !input.peek2(Token![>]) { - Ok(Some((input.parse()?, input.parse()?))) - } else { - Ok(None) - } - }; - - if lookahead.peek(storage_types::StorageValue) { - Ok(Self::Value { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageMap) { - Ok(Self::Map { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::CountedStorageMap) { - Ok(Self::CountedMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageDoubleMap) { - Ok(Self::DoubleMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher1_comma: input.parse()?, - hasher1_ty: input.parse()?, - _key1_comma: input.parse()?, - key1_ty: input.parse()?, - _hasher2_comma: input.parse()?, - hasher2_ty: input.parse()?, - _key2_comma: input.parse()?, - key2_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageNMap) { - let content; - Ok(Self::NMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _paren_comma: input.parse()?, - _paren_token: parenthesized!(content in input), - key_types: Punctuated::parse_terminated(&content)?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream<'_>) -> Result { + let lookahead = input.lookahead1(); + + let parse_query_type = |input: ParseStream<'_>| -> Result> { + if input.peek(Token![,]) && !input.peek2(Token![>]) { + Ok(Some((input.parse()?, input.parse()?))) + } else { + Ok(None) + } + }; + + if lookahead.peek(storage_types::StorageValue) { + Ok(Self::Value { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageMap) { + Ok(Self::Map { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::CountedStorageMap) { + Ok(Self::CountedMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageDoubleMap) { + Ok(Self::DoubleMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher1_comma: input.parse()?, + hasher1_ty: input.parse()?, + _key1_comma: input.parse()?, + key1_ty: input.parse()?, + _hasher2_comma: input.parse()?, + hasher2_ty: input.parse()?, + _key2_comma: input.parse()?, + key2_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageNMap) { + let content; + Ok(Self::NMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _paren_comma: input.parse()?, + _paren_token: parenthesized!(content in input), + key_types: Punctuated::parse_terminated(&content)?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else { + Err(lookahead.error()) + } + } } /// The input expected by this macro. struct Input { - attributes: Vec, - visibility: Visibility, - _type: Token![type], - storage_name: Ident, - storage_generics: Option, - where_clause: Option, - _equal: Token![=], - storage_type: StorageType, - _semicolon: Token![;], + attributes: Vec, + visibility: Visibility, + _type: Token![type], + storage_name: Ident, + storage_generics: Option, + where_clause: Option, + _equal: Token![=], + storage_type: StorageType, + _semicolon: Token![;], } impl Parse for Input { - fn parse(input: ParseStream<'_>) -> Result { - let attributes = input.call(Attribute::parse_outer)?; - let visibility = input.parse()?; - let _type = input.parse()?; - let storage_name = input.parse()?; - - let lookahead = input.lookahead1(); - let storage_generics = if lookahead.peek(Token![<]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()); - }; - - let lookahead = input.lookahead1(); - let where_clause = if lookahead.peek(Token![where]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()); - }; - - let _equal = input.parse()?; - - let storage_type = input.parse()?; - - let _semicolon = input.parse()?; - - Ok(Self { - attributes, - visibility, - _type, - storage_name, - storage_generics, - _equal, - storage_type, - where_clause, - _semicolon, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + let attributes = input.call(Attribute::parse_outer)?; + let visibility = input.parse()?; + let _type = input.parse()?; + let storage_name = input.parse()?; + + let lookahead = input.lookahead1(); + let storage_generics = if lookahead.peek(Token![<]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()) + }; + + let lookahead = input.lookahead1(); + let where_clause = if lookahead.peek(Token![where]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()) + }; + + let _equal = input.parse()?; + + let storage_type = input.parse()?; + + let _semicolon = input.parse()?; + + Ok(Self { + attributes, + visibility, + _type, + storage_name, + storage_generics, + _equal, + storage_type, + where_clause, + _semicolon, + }) + } } /// Defines which type of prefix the storage alias is using. #[derive(Clone, Copy)] enum PrefixType { - /// An appropriate prefix will be determined automatically. - /// - /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. - /// Otherwise use the verbatim passed name as prefix. - Compatibility, - /// The provided ident/name will be used as the prefix. - Verbatim, - /// The provided type will be used to determine the prefix. This type must - /// implement `PalletInfoAccess` which specifies the proper name. This - /// name is then used as the prefix. - PalletName, - /// Uses the provided type implementing `Get<'static str>` to determine the prefix. - Dynamic, + /// An appropriate prefix will be determined automatically. + /// + /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. + /// Otherwise use the verbatim passed name as prefix. + Compatibility, + /// The provided ident/name will be used as the prefix. + Verbatim, + /// The provided type will be used to determine the prefix. This type must + /// implement `PalletInfoAccess` which specifies the proper name. This + /// name is then used as the prefix. + PalletName, + /// Uses the provided type implementing `Get<'static str>` to determine the prefix. + Dynamic, } /// Implementation of the `storage_alias` attribute macro. pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> Result { - let input = syn::parse2::(input)?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - - let prefix_type = if attributes.is_empty() { - PrefixType::Compatibility - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Verbatim - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::PalletName - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Dynamic - } else { - return Err(Error::new(attributes.span(), "Unknown attributes")); - }; - - let storage_instance = generate_storage_instance( - &crate_, - &input.storage_name, - input.storage_generics.as_ref(), - input.where_clause.as_ref(), - input.storage_type.prefix(), - &input.visibility, - matches!(input.storage_type, StorageType::CountedMap { .. }), - prefix_type, - )?; - - let definition = input.storage_type.generate_type_declaration( - &crate_, - &storage_instance, - &input.storage_name, - input.storage_generics.as_ref(), - &input.visibility, - &input.attributes, - ); - - let storage_instance_code = storage_instance.code; - - Ok(quote! { - #storage_instance_code - - #definition - }) + let input = syn::parse2::(input)?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + + let prefix_type = if attributes.is_empty() { + PrefixType::Compatibility + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Verbatim + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::PalletName + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Dynamic + } else { + return Err(Error::new(attributes.span(), "Unknown attributes")) + }; + + let storage_instance = generate_storage_instance( + &crate_, + &input.storage_name, + input.storage_generics.as_ref(), + input.where_clause.as_ref(), + input.storage_type.prefix(), + &input.visibility, + matches!(input.storage_type, StorageType::CountedMap { .. }), + prefix_type, + )?; + + let definition = input.storage_type.generate_type_declaration( + &crate_, + &storage_instance, + &input.storage_name, + input.storage_generics.as_ref(), + &input.visibility, + &input.attributes, + ); + + let storage_instance_code = storage_instance.code; + + Ok(quote! { + #storage_instance_code + + #definition + }) } /// The storage instance to use for the storage alias. struct StorageInstance { - name: Ident, - generics: TokenStream, - code: TokenStream, + name: Ident, + generics: TokenStream, + code: TokenStream, } /// Generate the [`StorageInstance`] for the storage alias. fn generate_storage_instance( - crate_: &syn::Path, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - storage_where_clause: Option<&WhereClause>, - prefix: &Type, - visibility: &Visibility, - is_counted_map: bool, - prefix_type: PrefixType, + crate_: &syn::Path, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + storage_where_clause: Option<&WhereClause>, + prefix: &Type, + visibility: &Visibility, + is_counted_map: bool, + prefix_type: PrefixType, ) -> Result { - if let Type::Infer(_) = prefix { - return Err(Error::new( - prefix.span(), - "`_` is not allowed as prefix by `storage_alias`.", - )); - } - - let impl_generics_used_by_prefix = storage_generics - .as_ref() - .map(|g| { - g.impl_generics() - .filter(|g| prefix.contains_ident(&g.ident)) - .collect::>() - }) - .unwrap_or_default(); - - let (pallet_prefix, impl_generics, type_generics) = match prefix_type { - PrefixType::Compatibility => { - if !impl_generics_used_by_prefix.is_empty() { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - < #prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - } else if let Some(prefix) = prefix.get_ident() { - let prefix_str = prefix.to_string(); - - (quote!(#prefix_str), quote!(), quote!()) - } else { - return Err(Error::new_spanned( - prefix, - "If there are no generics, the prefix is only allowed to be an identifier.", - )); - } - } - PrefixType::Verbatim => { - let prefix_str = match prefix.get_ident() { - Some(p) => p.to_string(), - None => { - return Err(Error::new_spanned( - prefix, - "Prefix type `verbatim` requires that the prefix is an ident.", - )) - } - }; - - (quote!(#prefix_str), quote!(), quote!()) - } - PrefixType::PalletName => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - } - PrefixType::Dynamic => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::Get<_>>::get() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - } - }; - - let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); - - let name_str = format!("{}_Storage_Instance", storage_name); - let name = Ident::new(&name_str, Span::call_site()); - let storage_name_str = storage_name.to_string(); - - let counter_code = is_counted_map.then(|| { - let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); - let counter_storage_name_str = counter_prefix(&storage_name_str); - let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); - - quote! { - #visibility struct #counter_name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #counter_name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #counter_storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance - for #name< #type_generics > #where_clause - { - type CounterPrefix = #counter_name < #type_generics >; - } - } - }); - - let storage_prefix_hash = helper::two128_str(&storage_name_str); - - // Implement `StorageInstance` trait. - let code = quote! { - #[allow(non_camel_case_types)] - #visibility struct #name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - #counter_code - }; - - Ok(StorageInstance { - name, - code, - generics: quote!( < #type_generics > ), - }) + if let Type::Infer(_) = prefix { + return Err(Error::new(prefix.span(), "`_` is not allowed as prefix by `storage_alias`.")) + } + + let impl_generics_used_by_prefix = storage_generics + .as_ref() + .map(|g| { + g.impl_generics() + .filter(|g| prefix.contains_ident(&g.ident)) + .collect::>() + }) + .unwrap_or_default(); + + let (pallet_prefix, impl_generics, type_generics) = match prefix_type { + PrefixType::Compatibility => + if !impl_generics_used_by_prefix.is_empty() { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + < #prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } else if let Some(prefix) = prefix.get_ident() { + let prefix_str = prefix.to_string(); + + (quote!(#prefix_str), quote!(), quote!()) + } else { + return Err(Error::new_spanned( + prefix, + "If there are no generics, the prefix is only allowed to be an identifier.", + )) + }, + PrefixType::Verbatim => { + let prefix_str = match prefix.get_ident() { + Some(p) => p.to_string(), + None => + return Err(Error::new_spanned( + prefix, + "Prefix type `verbatim` requires that the prefix is an ident.", + )), + }; + + (quote!(#prefix_str), quote!(), quote!()) + }, + PrefixType::PalletName => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + }, + PrefixType::Dynamic => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::Get<_>>::get() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + }, + }; + + let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); + + let name_str = format!("{}_Storage_Instance", storage_name); + let name = Ident::new(&name_str, Span::call_site()); + let storage_name_str = storage_name.to_string(); + + let counter_code = is_counted_map.then(|| { + let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); + let counter_storage_name_str = counter_prefix(&storage_name_str); + let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); + + quote! { + #visibility struct #counter_name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #counter_name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #counter_storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance + for #name< #type_generics > #where_clause + { + type CounterPrefix = #counter_name < #type_generics >; + } + } + }); + + let storage_prefix_hash = helper::two128_str(&storage_name_str); + + // Implement `StorageInstance` trait. + let code = quote! { + #[allow(non_camel_case_types)] + #visibility struct #name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + #counter_code + }; + + Ok(StorageInstance { name, code, generics: quote!( < #type_generics > ) }) } diff --git a/support/procedural-fork/src/transactional.rs b/support/procedural-fork/src/transactional.rs index 73a841d9b..e9d4f84b7 100644 --- a/support/procedural-fork/src/transactional.rs +++ b/support/procedural-fork/src/transactional.rs @@ -21,50 +21,40 @@ use quote::quote; use syn::{ItemFn, Result}; pub fn transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { - attrs, - vis, - sig, - block, - } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - use #crate_::storage::{with_transaction, TransactionOutcome}; - with_transaction(|| { - let r = (|| { #block })(); - if r.is_ok() { - TransactionOutcome::Commit(r) - } else { - TransactionOutcome::Rollback(r) - } - }) - } - }; - - Ok(output.into()) + let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + use #crate_::storage::{with_transaction, TransactionOutcome}; + with_transaction(|| { + let r = (|| { #block })(); + if r.is_ok() { + TransactionOutcome::Commit(r) + } else { + TransactionOutcome::Rollback(r) + } + }) + } + }; + + Ok(output.into()) } pub fn require_transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { - attrs, - vis, - sig, - block, - } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - if !#crate_::storage::transactional::is_transactional() { - return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); - } - #block - } - }; - - Ok(output.into()) + let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + if !#crate_::storage::transactional::is_transactional() { + return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); + } + #block + } + }; + + Ok(output.into()) } diff --git a/support/procedural-fork/src/tt_macro.rs b/support/procedural-fork/src/tt_macro.rs index 3f280013f..d37127421 100644 --- a/support/procedural-fork/src/tt_macro.rs +++ b/support/procedural-fork/src/tt_macro.rs @@ -22,29 +22,29 @@ use proc_macro2::{Ident, TokenStream}; use quote::format_ident; struct CreateTtReturnMacroDef { - name: Ident, - args: Vec<(Ident, TokenStream)>, + name: Ident, + args: Vec<(Ident, TokenStream)>, } impl syn::parse::Parse for CreateTtReturnMacroDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let name = input.parse()?; - let _ = input.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let name = input.parse()?; + let _ = input.parse::()?; - let mut args = Vec::new(); - while !input.is_empty() { - let mut value; - let key: Ident = input.parse()?; - let _ = input.parse::()?; - let _: syn::token::Bracket = syn::bracketed!(value in input); - let _: syn::token::Brace = syn::braced!(value in value); - let value: TokenStream = value.parse()?; + let mut args = Vec::new(); + while !input.is_empty() { + let mut value; + let key: Ident = input.parse()?; + let _ = input.parse::()?; + let _: syn::token::Bracket = syn::bracketed!(value in input); + let _: syn::token::Brace = syn::braced!(value in value); + let value: TokenStream = value.parse()?; - args.push((key, value)) - } + args.push((key, value)) + } - Ok(Self { name, args }) - } + Ok(Self { name, args }) + } } /// A proc macro that accepts a name and any number of key-value pairs, to be used to create a @@ -74,32 +74,32 @@ impl syn::parse::Parse for CreateTtReturnMacroDef { /// } /// ``` pub fn create_tt_return_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let CreateTtReturnMacroDef { name, args } = - syn::parse_macro_input!(input as CreateTtReturnMacroDef); + let CreateTtReturnMacroDef { name, args } = + syn::parse_macro_input!(input as CreateTtReturnMacroDef); - let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let unique_name = format_ident!("{}_{}", name, count); + let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let unique_name = format_ident!("{}_{}", name, count); - let decl_macro = quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #unique_name { - { - $caller:tt - $(your_tt_return = [{ $my_tt_macro:path }])? - } => { - $my_tt_return! { - $caller - #( - #keys = [{ #values }] - )* - } - } - } + let decl_macro = quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #unique_name { + { + $caller:tt + $(your_tt_return = [{ $my_tt_macro:path }])? + } => { + $my_tt_return! { + $caller + #( + #keys = [{ #values }] + )* + } + } + } - pub use #unique_name as #name; - }; + pub use #unique_name as #name; + }; - decl_macro.into() + decl_macro.into() } From f0d85ac1f6f381cd097511a66511794f66d3e5ba Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 09:56:43 -0400 Subject: [PATCH 099/213] cargo fmt --all --- support/procedural-fork/src/benchmark.rs | 2117 +++++++++-------- .../src/construct_runtime/expand/call.rs | 393 +-- .../expand/composite_helper.rs | 132 +- .../src/construct_runtime/expand/config.rs | 208 +- .../construct_runtime/expand/freeze_reason.rs | 90 +- .../construct_runtime/expand/hold_reason.rs | 90 +- .../src/construct_runtime/expand/inherent.rs | 459 ++-- .../src/construct_runtime/expand/lock_id.rs | 72 +- .../src/construct_runtime/expand/metadata.rs | 399 ++-- .../src/construct_runtime/expand/origin.rs | 846 +++---- .../construct_runtime/expand/outer_enums.rs | 379 +-- .../construct_runtime/expand/slash_reason.rs | 72 +- .../src/construct_runtime/expand/task.rs | 212 +- .../src/construct_runtime/expand/unsigned.rs | 113 +- .../src/construct_runtime/mod.rs | 1010 ++++---- .../src/construct_runtime/parse.rs | 1257 +++++----- support/procedural-fork/src/crate_version.rs | 36 +- support/procedural-fork/src/derive_impl.rs | 385 +-- .../procedural-fork/src/dummy_part_checker.rs | 98 +- support/procedural-fork/src/dynamic_params.rs | 422 ++-- support/procedural-fork/src/key_prefix.rs | 142 +- .../procedural-fork/src/match_and_insert.rs | 244 +- support/procedural-fork/src/no_bound/clone.rs | 162 +- support/procedural-fork/src/no_bound/debug.rs | 186 +- .../procedural-fork/src/no_bound/default.rs | 139 +- support/procedural-fork/src/no_bound/ord.rs | 96 +- .../src/no_bound/partial_eq.rs | 214 +- .../src/no_bound/partial_ord.rs | 119 +- .../procedural-fork/src/pallet/expand/call.rs | 874 +++---- .../src/pallet/expand/composite.rs | 20 +- .../src/pallet/expand/config.rs | 120 +- .../src/pallet/expand/constants.rs | 172 +- .../src/pallet/expand/doc_only.rs | 152 +- .../src/pallet/expand/documentation.rs | 189 +- .../src/pallet/expand/error.rs | 298 +-- .../src/pallet/expand/event.rs | 303 +-- .../src/pallet/expand/genesis_build.rs | 50 +- .../src/pallet/expand/genesis_config.rs | 239 +- .../src/pallet/expand/hooks.rs | 588 ++--- .../src/pallet/expand/inherent.rs | 59 +- .../src/pallet/expand/instances.rs | 32 +- .../procedural-fork/src/pallet/expand/mod.rs | 124 +- .../src/pallet/expand/origin.rs | 59 +- .../src/pallet/expand/pallet_struct.rs | 524 ++-- .../src/pallet/expand/storage.rs | 1415 +++++------ .../src/pallet/expand/tasks.rs | 308 +-- .../src/pallet/expand/tt_default_parts.rs | 394 +-- .../src/pallet/expand/type_value.rs | 90 +- .../src/pallet/expand/validate_unsigned.rs | 60 +- .../src/pallet/expand/warnings.rs | 111 +- support/procedural-fork/src/pallet/mod.rs | 38 +- .../procedural-fork/src/pallet/parse/call.rs | 791 +++--- .../src/pallet/parse/composite.rs | 330 +-- .../src/pallet/parse/config.rs | 983 ++++---- .../procedural-fork/src/pallet/parse/error.rs | 148 +- .../procedural-fork/src/pallet/parse/event.rs | 206 +- .../src/pallet/parse/extra_constants.rs | 240 +- .../src/pallet/parse/genesis_build.rs | 69 +- .../src/pallet/parse/genesis_config.rs | 81 +- .../src/pallet/parse/helper.rs | 933 ++++---- .../procedural-fork/src/pallet/parse/hooks.rs | 104 +- .../src/pallet/parse/inherent.rs | 68 +- .../procedural-fork/src/pallet/parse/mod.rs | 1160 ++++----- .../src/pallet/parse/origin.rs | 80 +- .../src/pallet/parse/pallet_struct.rs | 220 +- .../src/pallet/parse/storage.rs | 1749 +++++++------- .../procedural-fork/src/pallet/parse/tasks.rs | 1489 ++++++------ .../src/pallet/parse/tests/mod.rs | 142 +- .../src/pallet/parse/tests/tasks.rs | 372 +-- .../src/pallet/parse/type_value.rs | 176 +- .../src/pallet/parse/validate_unsigned.rs | 64 +- support/procedural-fork/src/pallet_error.rs | 307 +-- .../procedural-fork/src/runtime/expand/mod.rs | 574 ++--- support/procedural-fork/src/runtime/mod.rs | 32 +- .../src/runtime/parse/helper.rs | 29 +- .../procedural-fork/src/runtime/parse/mod.rs | 365 +-- .../src/runtime/parse/pallet.rs | 146 +- .../src/runtime/parse/pallet_decl.rs | 77 +- .../src/runtime/parse/runtime_struct.rs | 25 +- .../src/runtime/parse/runtime_types.rs | 90 +- support/procedural-fork/src/storage_alias.rs | 1211 +++++----- support/procedural-fork/src/transactional.rs | 76 +- support/procedural-fork/src/tt_macro.rs | 82 +- 83 files changed, 14810 insertions(+), 13920 deletions(-) diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs index 0a62c3f92..376200d6e 100644 --- a/support/procedural-fork/src/benchmark.rs +++ b/support/procedural-fork/src/benchmark.rs @@ -23,332 +23,369 @@ use proc_macro::TokenStream; use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use quote::{quote, ToTokens}; use syn::{ - parse::{Nothing, ParseStream}, - parse_quote, - punctuated::Punctuated, - spanned::Spanned, - token::{Comma, Gt, Lt, PathSep}, - Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, - PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, - Visibility, WhereClause, + parse::{Nothing, ParseStream}, + parse_quote, + punctuated::Punctuated, + spanned::Spanned, + token::{Comma, Gt, Lt, PathSep}, + Attribute, Error, Expr, ExprBlock, ExprCall, ExprPath, FnArg, Item, ItemFn, ItemMod, Pat, Path, + PathArguments, PathSegment, Result, ReturnType, Signature, Stmt, Token, Type, TypePath, + Visibility, WhereClause, }; mod keywords { - use syn::custom_keyword; - - custom_keyword!(benchmark); - custom_keyword!(benchmarks); - custom_keyword!(block); - custom_keyword!(extra); - custom_keyword!(pov_mode); - custom_keyword!(extrinsic_call); - custom_keyword!(skip_meta); - custom_keyword!(BenchmarkError); - custom_keyword!(Result); - custom_keyword!(MaxEncodedLen); - custom_keyword!(Measured); - custom_keyword!(Ignored); - - pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); - pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); + use syn::custom_keyword; + + custom_keyword!(benchmark); + custom_keyword!(benchmarks); + custom_keyword!(block); + custom_keyword!(extra); + custom_keyword!(pov_mode); + custom_keyword!(extrinsic_call); + custom_keyword!(skip_meta); + custom_keyword!(BenchmarkError); + custom_keyword!(Result); + custom_keyword!(MaxEncodedLen); + custom_keyword!(Measured); + custom_keyword!(Ignored); + + pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); + pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); } /// This represents the raw parsed data for a param definition such as `x: Linear<10, 20>`. #[derive(Clone)] struct ParamDef { - name: String, - _typ: Type, - start: syn::GenericArgument, - end: syn::GenericArgument, + name: String, + _typ: Type, + start: syn::GenericArgument, + end: syn::GenericArgument, } /// Allows easy parsing of the `<10, 20>` component of `x: Linear<10, 20>`. #[derive(Parse)] struct RangeArgs { - _lt_token: Lt, - start: syn::GenericArgument, - _comma: Comma, - end: syn::GenericArgument, - _trailing_comma: Option, - _gt_token: Gt, + _lt_token: Lt, + start: syn::GenericArgument, + _comma: Comma, + end: syn::GenericArgument, + _trailing_comma: Option, + _gt_token: Gt, } #[derive(Clone, Debug)] struct BenchmarkAttrs { - skip_meta: bool, - extra: bool, - pov_mode: Option, + skip_meta: bool, + extra: bool, + pov_mode: Option, } /// Represents a single benchmark option enum BenchmarkAttr { - Extra, - SkipMeta, - /// How the PoV should be measured. - PoV(PovModeAttr), + Extra, + SkipMeta, + /// How the PoV should be measured. + PoV(PovModeAttr), } impl syn::parse::Parse for PovModeAttr { - fn parse(input: ParseStream) -> Result { - let _pov: keywords::pov_mode = input.parse()?; - let _eq: Token![=] = input.parse()?; - let root = PovEstimationMode::parse(input)?; - - let mut maybe_content = None; - let _ = || -> Result<()> { - let content; - syn::braced!(content in input); - maybe_content = Some(content); - Ok(()) - }(); - - let per_key = match maybe_content { - Some(content) => { - let per_key = Punctuated::::parse_terminated(&content)?; - per_key.into_iter().collect() - }, - None => Vec::new(), - }; - - Ok(Self { root, per_key }) - } + fn parse(input: ParseStream) -> Result { + let _pov: keywords::pov_mode = input.parse()?; + let _eq: Token![=] = input.parse()?; + let root = PovEstimationMode::parse(input)?; + + let mut maybe_content = None; + let _ = || -> Result<()> { + let content; + syn::braced!(content in input); + maybe_content = Some(content); + Ok(()) + }(); + + let per_key = match maybe_content { + Some(content) => { + let per_key = Punctuated::::parse_terminated(&content)?; + per_key.into_iter().collect() + } + None => Vec::new(), + }; + + Ok(Self { root, per_key }) + } } impl syn::parse::Parse for BenchmarkAttr { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::extra) { - let _extra: keywords::extra = input.parse()?; - Ok(BenchmarkAttr::Extra) - } else if lookahead.peek(keywords::skip_meta) { - let _skip_meta: keywords::skip_meta = input.parse()?; - Ok(BenchmarkAttr::SkipMeta) - } else if lookahead.peek(keywords::pov_mode) { - PovModeAttr::parse(input).map(BenchmarkAttr::PoV) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::extra) { + let _extra: keywords::extra = input.parse()?; + Ok(BenchmarkAttr::Extra) + } else if lookahead.peek(keywords::skip_meta) { + let _skip_meta: keywords::skip_meta = input.parse()?; + Ok(BenchmarkAttr::SkipMeta) + } else if lookahead.peek(keywords::pov_mode) { + PovModeAttr::parse(input).map(BenchmarkAttr::PoV) + } else { + Err(lookahead.error()) + } + } } /// A `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone)] struct PovModeAttr { - /// The root mode for this benchmarks. - root: PovEstimationMode, - /// The pov-mode for a specific key. This overwrites `root` for this key. - per_key: Vec, + /// The root mode for this benchmarks. + root: PovEstimationMode, + /// The pov-mode for a specific key. This overwrites `root` for this key. + per_key: Vec, } /// A single key-value pair inside the `{}` of a `#[pov_mode = .. { .. }]` attribute. #[derive(Debug, Clone, derive_syn_parse::Parse)] struct PovModeKeyAttr { - /// A specific storage key for which to set the PoV mode. - key: Path, - _underscore: Token![:], - /// The PoV mode for this key. - mode: PovEstimationMode, + /// A specific storage key for which to set the PoV mode. + key: Path, + _underscore: Token![:], + /// The PoV mode for this key. + mode: PovEstimationMode, } /// How the PoV should be estimated. #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum PovEstimationMode { - /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. - MaxEncodedLen, - /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. - Measured, - /// Do not estimate the PoV size for this storage item or benchmark. - Ignored, + /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. + MaxEncodedLen, + /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. + Measured, + /// Do not estimate the PoV size for this storage item or benchmark. + Ignored, } impl syn::parse::Parse for PovEstimationMode { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keywords::MaxEncodedLen) { - let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; - return Ok(PovEstimationMode::MaxEncodedLen) - } else if lookahead.peek(keywords::Measured) { - let _measured: keywords::Measured = input.parse()?; - return Ok(PovEstimationMode::Measured) - } else if lookahead.peek(keywords::Ignored) { - let _ignored: keywords::Ignored = input.parse()?; - return Ok(PovEstimationMode::Ignored) - } else { - return Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::MaxEncodedLen) { + let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; + return Ok(PovEstimationMode::MaxEncodedLen); + } else if lookahead.peek(keywords::Measured) { + let _measured: keywords::Measured = input.parse()?; + return Ok(PovEstimationMode::Measured); + } else if lookahead.peek(keywords::Ignored) { + let _ignored: keywords::Ignored = input.parse()?; + return Ok(PovEstimationMode::Ignored); + } else { + return Err(lookahead.error()); + } + } } impl ToString for PovEstimationMode { - fn to_string(&self) -> String { - match self { - PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), - PovEstimationMode::Measured => "Measured".into(), - PovEstimationMode::Ignored => "Ignored".into(), - } - } + fn to_string(&self) -> String { + match self { + PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), + PovEstimationMode::Measured => "Measured".into(), + PovEstimationMode::Ignored => "Ignored".into(), + } + } } impl quote::ToTokens for PovEstimationMode { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), - PovEstimationMode::Measured => tokens.extend(quote!(Measured)), - PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), + PovEstimationMode::Measured => tokens.extend(quote!(Measured)), + PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), + } + } } impl syn::parse::Parse for BenchmarkAttrs { - fn parse(input: ParseStream) -> syn::Result { - let mut extra = false; - let mut skip_meta = false; - let mut pov_mode = None; - let args = Punctuated::::parse_terminated(&input)?; - - for arg in args.into_iter() { - match arg { - BenchmarkAttr::Extra => { - if extra { - return Err(input.error("`extra` can only be specified once")) - } - extra = true; - }, - BenchmarkAttr::SkipMeta => { - if skip_meta { - return Err(input.error("`skip_meta` can only be specified once")) - } - skip_meta = true; - }, - BenchmarkAttr::PoV(mode) => { - if pov_mode.is_some() { - return Err(input.error("`pov_mode` can only be specified once")) - } - pov_mode = Some(mode); - }, - } - } - Ok(BenchmarkAttrs { extra, skip_meta, pov_mode }) - } + fn parse(input: ParseStream) -> syn::Result { + let mut extra = false; + let mut skip_meta = false; + let mut pov_mode = None; + let args = Punctuated::::parse_terminated(&input)?; + + for arg in args.into_iter() { + match arg { + BenchmarkAttr::Extra => { + if extra { + return Err(input.error("`extra` can only be specified once")); + } + extra = true; + } + BenchmarkAttr::SkipMeta => { + if skip_meta { + return Err(input.error("`skip_meta` can only be specified once")); + } + skip_meta = true; + } + BenchmarkAttr::PoV(mode) => { + if pov_mode.is_some() { + return Err(input.error("`pov_mode` can only be specified once")); + } + pov_mode = Some(mode); + } + } + } + Ok(BenchmarkAttrs { + extra, + skip_meta, + pov_mode, + }) + } } /// Represents the parsed extrinsic call for a benchmark #[derive(Clone)] enum BenchmarkCallDef { - ExtrinsicCall { origin: Expr, expr_call: ExprCall, attr_span: Span }, // #[extrinsic_call] - Block { block: ExprBlock, attr_span: Span }, // #[block] + ExtrinsicCall { + origin: Expr, + expr_call: ExprCall, + attr_span: Span, + }, // #[extrinsic_call] + Block { + block: ExprBlock, + attr_span: Span, + }, // #[block] } impl BenchmarkCallDef { - /// Returns the `span()` for attribute - fn attr_span(&self) -> Span { - match self { - BenchmarkCallDef::ExtrinsicCall { origin: _, expr_call: _, attr_span } => *attr_span, - BenchmarkCallDef::Block { block: _, attr_span } => *attr_span, - } - } + /// Returns the `span()` for attribute + fn attr_span(&self) -> Span { + match self { + BenchmarkCallDef::ExtrinsicCall { + origin: _, + expr_call: _, + attr_span, + } => *attr_span, + BenchmarkCallDef::Block { + block: _, + attr_span, + } => *attr_span, + } + } } /// Represents a parsed `#[benchmark]` or `#[instance_benchmark]` item. #[derive(Clone)] struct BenchmarkDef { - params: Vec, - setup_stmts: Vec, - call_def: BenchmarkCallDef, - verify_stmts: Vec, - last_stmt: Option, - fn_sig: Signature, - fn_vis: Visibility, - fn_attrs: Vec, + params: Vec, + setup_stmts: Vec, + call_def: BenchmarkCallDef, + verify_stmts: Vec, + last_stmt: Option, + fn_sig: Signature, + fn_vis: Visibility, + fn_attrs: Vec, } /// used to parse something compatible with `Result` #[derive(Parse)] struct ResultDef { - _result_kw: keywords::Result, - _lt: Token![<], - unit: Type, - _comma: Comma, - e_type: TypePath, - _gt: Token![>], + _result_kw: keywords::Result, + _lt: Token![<], + unit: Type, + _comma: Comma, + e_type: TypePath, + _gt: Token![>], } /// Ensures that `ReturnType` is a `Result<(), BenchmarkError>`, if specified fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { - if let ReturnType::Type(_, typ) = &item_fn.sig.output { - let non_unit = |span| return Err(Error::new(span, "expected `()`")); - let Type::Path(TypePath { path, qself: _ }) = &**typ else { - return Err(Error::new( + if let ReturnType::Type(_, typ) = &item_fn.sig.output { + let non_unit = |span| return Err(Error::new(span, "expected `()`")); + let Type::Path(TypePath { path, qself: _ }) = &**typ else { + return Err(Error::new( typ.span(), "Only `Result<(), BenchmarkError>` or a blank return type is allowed on benchmark function definitions", - )) - }; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - let res: ResultDef = syn::parse2(seg.to_token_stream())?; - // ensure T in Result is () - let Type::Tuple(tup) = res.unit else { return non_unit(res.unit.span()) }; - if !tup.elems.is_empty() { - return non_unit(tup.span()) - } - let TypePath { path, qself: _ } = res.e_type; - let seg = path - .segments - .last() - .expect("to be parsed as a TypePath, it must have at least one segment; qed"); - syn::parse2::(seg.to_token_stream())?; - } - Ok(()) + )); + }; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + let res: ResultDef = syn::parse2(seg.to_token_stream())?; + // ensure T in Result is () + let Type::Tuple(tup) = res.unit else { + return non_unit(res.unit.span()); + }; + if !tup.elems.is_empty() { + return non_unit(tup.span()); + } + let TypePath { path, qself: _ } = res.e_type; + let seg = path + .segments + .last() + .expect("to be parsed as a TypePath, it must have at least one segment; qed"); + syn::parse2::(seg.to_token_stream())?; + } + Ok(()) } /// Parses params such as `x: Linear<0, 1>` fn parse_params(item_fn: &ItemFn) -> Result> { - let mut params: Vec = Vec::new(); - for arg in &item_fn.sig.inputs { - let invalid_param = |span| { - return Err(Error::new( - span, - "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", - )) - }; - - let FnArg::Typed(arg) = arg else { return invalid_param(arg.span()) }; - let Pat::Ident(ident) = &*arg.pat else { return invalid_param(arg.span()) }; - - // check param name - let var_span = ident.span(); - let invalid_param_name = || { - return Err(Error::new( + let mut params: Vec = Vec::new(); + for arg in &item_fn.sig.inputs { + let invalid_param = |span| { + return Err(Error::new( + span, + "Invalid benchmark function param. A valid example would be `x: Linear<5, 10>`.", + )); + }; + + let FnArg::Typed(arg) = arg else { + return invalid_param(arg.span()); + }; + let Pat::Ident(ident) = &*arg.pat else { + return invalid_param(arg.span()); + }; + + // check param name + let var_span = ident.span(); + let invalid_param_name = || { + return Err(Error::new( var_span, "Benchmark parameter names must consist of a single lowercase letter (a-z) and no other characters.", )); - }; - let name = ident.ident.to_token_stream().to_string(); - if name.len() > 1 { - return invalid_param_name() - }; - let Some(name_char) = name.chars().next() else { return invalid_param_name() }; - if !name_char.is_alphabetic() || !name_char.is_lowercase() { - return invalid_param_name() - } - - // parse type - let typ = &*arg.ty; - let Type::Path(tpath) = typ else { return invalid_param(typ.span()) }; - let Some(segment) = tpath.path.segments.last() else { return invalid_param(typ.span()) }; - let args = segment.arguments.to_token_stream().into(); - let Ok(args) = syn::parse::(args) else { return invalid_param(typ.span()) }; - - params.push(ParamDef { name, _typ: typ.clone(), start: args.start, end: args.end }); - } - Ok(params) + }; + let name = ident.ident.to_token_stream().to_string(); + if name.len() > 1 { + return invalid_param_name(); + }; + let Some(name_char) = name.chars().next() else { + return invalid_param_name(); + }; + if !name_char.is_alphabetic() || !name_char.is_lowercase() { + return invalid_param_name(); + } + + // parse type + let typ = &*arg.ty; + let Type::Path(tpath) = typ else { + return invalid_param(typ.span()); + }; + let Some(segment) = tpath.path.segments.last() else { + return invalid_param(typ.span()); + }; + let args = segment.arguments.to_token_stream().into(); + let Ok(args) = syn::parse::(args) else { + return invalid_param(typ.span()); + }; + + params.push(ParamDef { + name, + _typ: typ.clone(), + start: args.start, + end: args.end, + }); + } + Ok(params) } /// Used in several places where the `#[extrinsic_call]` or `#[body]` annotation is missing fn missing_call(item_fn: &ItemFn) -> Result { - return Err(Error::new( + return Err(Error::new( item_fn.block.brace_token.span.join(), "No valid #[extrinsic_call] or #[block] annotation could be found in benchmark function body." )); @@ -358,8 +395,8 @@ fn missing_call(item_fn: &ItemFn) -> Result { /// returns them. Also handles parsing errors for invalid / extra call defs. AKA this is /// general handling for `#[extrinsic_call]` and `#[block]` fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { - // #[extrinsic_call] / #[block] handling - let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { + // #[extrinsic_call] / #[block] handling + let call_defs = item_fn.block.stmts.iter().enumerate().filter_map(|(i, child)| { if let Stmt::Expr(Expr::Call(expr_call), _semi) = child { // #[extrinsic_call] case expr_call.attrs.iter().enumerate().find_map(|(k, attr)| { @@ -393,810 +430,850 @@ fn parse_call_def(item_fn: &ItemFn) -> Result<(usize, BenchmarkCallDef)> { None } }).collect::>>()?; - Ok(match &call_defs[..] { - [(i, call_def)] => (*i, call_def.clone()), // = 1 - [] => return missing_call(item_fn), - _ => - return Err(Error::new( - call_defs[1].1.attr_span(), - "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", - )), - }) + Ok(match &call_defs[..] { + [(i, call_def)] => (*i, call_def.clone()), // = 1 + [] => return missing_call(item_fn), + _ => { + return Err(Error::new( + call_defs[1].1.attr_span(), + "Only one #[extrinsic_call] or #[block] attribute is allowed per benchmark.", + )) + } + }) } impl BenchmarkDef { - /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. - pub fn from(item_fn: &ItemFn) -> Result { - let params = parse_params(item_fn)?; - ensure_valid_return_type(item_fn)?; - let (i, call_def) = parse_call_def(&item_fn)?; - - let (verify_stmts, last_stmt) = match item_fn.sig.output { - ReturnType::Default => - // no return type, last_stmt should be None - (Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), None), - ReturnType::Type(_, _) => { - // defined return type, last_stmt should be Result<(), BenchmarkError> - // compatible and should not be included in verify_stmts - if i + 1 >= item_fn.block.stmts.len() { - return Err(Error::new( - item_fn.block.span(), - "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ + /// Constructs a [`BenchmarkDef`] by traversing an existing [`ItemFn`] node. + pub fn from(item_fn: &ItemFn) -> Result { + let params = parse_params(item_fn)?; + ensure_valid_return_type(item_fn)?; + let (i, call_def) = parse_call_def(&item_fn)?; + + let (verify_stmts, last_stmt) = match item_fn.sig.output { + ReturnType::Default => + // no return type, last_stmt should be None + { + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len()]), + None, + ) + } + ReturnType::Type(_, _) => { + // defined return type, last_stmt should be Result<(), BenchmarkError> + // compatible and should not be included in verify_stmts + if i + 1 >= item_fn.block.stmts.len() { + return Err(Error::new( + item_fn.block.span(), + "Benchmark `#[block]` or `#[extrinsic_call]` item cannot be the \ last statement of your benchmark function definition if you have \ defined a return type. You should return something compatible \ with Result<(), BenchmarkError> (i.e. `Ok(())`) as the last statement \ or change your signature to a blank return type.", - )) - } - let Some(stmt) = item_fn.block.stmts.last() else { return missing_call(item_fn) }; - ( - Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), - Some(stmt.clone()), - ) - }, - }; - - Ok(BenchmarkDef { - params, - setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), - call_def, - verify_stmts, - last_stmt, - fn_sig: item_fn.sig.clone(), - fn_vis: item_fn.vis.clone(), - fn_attrs: item_fn.attrs.clone(), - }) - } + )); + } + let Some(stmt) = item_fn.block.stmts.last() else { + return missing_call(item_fn); + }; + ( + Vec::from(&item_fn.block.stmts[(i + 1)..item_fn.block.stmts.len() - 1]), + Some(stmt.clone()), + ) + } + }; + + Ok(BenchmarkDef { + params, + setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), + call_def, + verify_stmts, + last_stmt, + fn_sig: item_fn.sig.clone(), + fn_vis: item_fn.vis.clone(), + fn_attrs: item_fn.attrs.clone(), + }) + } } /// Parses and expands a `#[benchmarks]` or `#[instance_benchmarks]` invocation pub fn benchmarks( - attrs: TokenStream, - tokens: TokenStream, - instance: bool, + attrs: TokenStream, + tokens: TokenStream, + instance: bool, ) -> syn::Result { - let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; - // gather module info - let module: ItemMod = syn::parse(tokens)?; - let mod_span = module.span(); - let where_clause = match syn::parse::(attrs.clone()) { - Ok(_) => quote!(), - Err(_) => syn::parse::(attrs)?.predicates.to_token_stream(), - }; - let mod_vis = module.vis; - let mod_name = module.ident; - - // consume #[benchmarks] attribute by excluding it from mod_attrs - let mod_attrs: Vec<&Attribute> = module - .attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) - .collect(); - - let mut benchmark_names: Vec = Vec::new(); - let mut extra_benchmark_names: Vec = Vec::new(); - let mut skip_meta_benchmark_names: Vec = Vec::new(); - // Map benchmarks to PoV modes. - let mut pov_modes = Vec::new(); - - let (_brace, mut content) = - module.content.ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; - - // find all function defs marked with #[benchmark] - let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { - // parse as a function def first - let Item::Fn(func) = stmt else { return None }; - - // find #[benchmark] attribute on function def - let benchmark_attr = - func.attrs.iter().find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; - - Some((benchmark_attr.clone(), func.clone(), stmt)) - }); - - // parse individual benchmark defs and args - for (benchmark_attr, func, stmt) in benchmark_fn_metas { - // parse benchmark def - let benchmark_def = BenchmarkDef::from(&func)?; - - // record benchmark name - let name = &func.sig.ident; - benchmark_names.push(name.clone()); - - // Check if we need to parse any args - if benchmark_attr.meta.require_path_only().is_err() { - // parse any args provided to #[benchmark] - let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; - - // record name sets - if benchmark_attrs.extra { - extra_benchmark_names.push(name.clone()); - } else if benchmark_attrs.skip_meta { - skip_meta_benchmark_names.push(name.clone()); - } - - if let Some(mode) = benchmark_attrs.pov_mode { - let mut modes = Vec::new(); - // We cannot expand strings here since it is no-std, but syn does not expand bytes. - let name = name.to_string(); - let m = mode.root.to_string(); - modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); - - for attr in mode.per_key.iter() { - // syn always puts spaces in quoted paths: - let key = attr.key.clone().into_token_stream().to_string().replace(" ", ""); - let mode = attr.mode.to_string(); - modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); - } - - pov_modes.push( - quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), - ); - } - } - - // expand benchmark - let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); - - // replace original function def with expanded code - *stmt = Item::Verbatim(expanded); - } - - // generics - let type_use_generics = match instance { - false => quote!(T), - true => quote!(T, I), - }; - let type_impl_generics = match instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - - // benchmark name variables - let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); - let extra_benchmark_names_str: Vec = - extra_benchmark_names.iter().map(|n| n.to_string()).collect(); - let skip_meta_benchmark_names_str: Vec = - skip_meta_benchmark_names.iter().map(|n| n.to_string()).collect(); - let mut selected_benchmark_mappings: Vec = Vec::new(); - let mut benchmarks_by_name_mappings: Vec = Vec::new(); - let test_idents: Vec = benchmark_names_str - .iter() - .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) - .collect(); - for i in 0..benchmark_names.len() { - let name_ident = &benchmark_names[i]; - let name_str = &benchmark_names_str[i]; - let test_ident = &test_idents[i]; - selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); - benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) - } - - let impl_test_function = content - .iter_mut() - .find_map(|item| { - let Item::Macro(item_macro) = item else { - return None; - }; - - if !item_macro - .mac - .path - .segments - .iter() - .any(|s| s.ident == "impl_benchmark_test_suite") - { - return None; - } - - let tokens = item_macro.mac.tokens.clone(); - *item = Item::Verbatim(quote! {}); - - Some(quote! { - impl_test_function!( - (#( {} #benchmark_names )*) - (#( #extra_benchmark_names )*) - (#( #skip_meta_benchmark_names )*) - #tokens - ); - }) - }) - .unwrap_or(quote! {}); - - // emit final quoted tokens - let res = quote! { - #(#mod_attrs) - * - #mod_vis mod #mod_name { - #(#content) - * - - #[allow(non_camel_case_types)] - enum SelectedBenchmark { - #(#benchmark_names), - * - } - - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) - } - ) - * - } - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool, - ) -> Result< - #krate::__private::Box Result<(), #krate::BenchmarkError>>, - #krate::BenchmarkError, - > { - match self { - #( - Self::#benchmark_names => { - <#benchmark_names as #krate::BenchmarkingSetup< - #type_use_generics - >>::instance(&#benchmark_names, components, verify) - } - ) - * - } - } - } - #[cfg(any(feature = "runtime-benchmarks", test))] - impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> - where T: #frame_system::Config, #where_clause - { - fn benchmarks( - extra: bool, - ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { - let mut all_names = #krate::__private::vec![ - #(#benchmark_names_str), - * - ]; - if !extra { - let extra = [ - #(#extra_benchmark_names_str), - * - ]; - all_names.retain(|x| !extra.contains(x)); - } - let pov_modes: - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec<( - #krate::__private::Vec, - #krate::__private::Vec - )>, - )> = #krate::__private::vec![ - #( #pov_modes ),* - ]; - all_names.into_iter().map(|benchmark| { - let selected_benchmark = match benchmark { - #(#selected_benchmark_mappings), - *, - _ => panic!("all benchmarks should be selectable") - }; - let components = >::components(&selected_benchmark); - let name = benchmark.as_bytes().to_vec(); - let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); - - #krate::BenchmarkMetadata { - name: benchmark.as_bytes().to_vec(), - components, - pov_modes: modes.unwrap_or_default(), - } - }).collect::<#krate::__private::Vec<_>>() - } - - fn run_benchmark( - extrinsic: &[u8], - c: &[(#krate::BenchmarkParameter, u32)], - whitelist: &[#krate::__private::TrackedStorageKey], - verify: bool, - internal_repeats: u32, - ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { - let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; - let selected_benchmark = match extrinsic { - #(#selected_benchmark_mappings), - *, - _ => return Err("Could not find extrinsic.".into()), - }; - let mut whitelist = whitelist.to_vec(); - let whitelisted_caller_key = <#frame_system::Account< - T, - > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( - #krate::whitelisted_caller::() - ); - whitelist.push(whitelisted_caller_key.into()); - let transactional_layer_key = #krate::__private::TrackedStorageKey::new( - #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), - ); - whitelist.push(transactional_layer_key); - // Whitelist the `:extrinsic_index`. - let extrinsic_index = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() - ); - whitelist.push(extrinsic_index); - // Whitelist the `:intrablock_entropy`. - let intrablock_entropy = #krate::__private::TrackedStorageKey::new( - #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() - ); - whitelist.push(intrablock_entropy); - - #krate::benchmarking::set_whitelist(whitelist.clone()); - let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); - - // Always do at least one internal repeat... - for _ in 0 .. internal_repeats.max(1) { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the externalities environment for the setup we want to - // benchmark. - let closure_to_benchmark = < - SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> - >::instance(&selected_benchmark, c, verify)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Commit the externalities to the database, flushing the DB cache. - // This will enable worst case scenario for reading from the database. - #krate::benchmarking::commit_db(); - - // Access all whitelisted keys to get them into the proof recorder since the - // recorder does now have a whitelist. - for key in &whitelist { - #krate::__private::storage::unhashed::get_raw(&key.key); - } - - // Reset the read/write counter so we don't count operations in the setup process. - #krate::benchmarking::reset_read_write_count(); - - // Time the extrinsic logic. - #krate::__private::log::trace!( - target: "benchmark", - "Start Benchmark: {} ({:?})", - extrinsic, - c - ); - - let start_pov = #krate::benchmarking::proof_size(); - let start_extrinsic = #krate::benchmarking::current_time(); - - closure_to_benchmark()?; - - let finish_extrinsic = #krate::benchmarking::current_time(); - let end_pov = #krate::benchmarking::proof_size(); - - // Calculate the diff caused by the benchmark. - let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); - let diff_pov = match (start_pov, end_pov) { - (Some(start), Some(end)) => end.saturating_sub(start), - _ => Default::default(), - }; - - // Commit the changes to get proper write count - #krate::benchmarking::commit_db(); - #krate::__private::log::trace!( - target: "benchmark", - "End Benchmark: {} ns", elapsed_extrinsic - ); - let read_write_count = #krate::benchmarking::read_write_count(); - #krate::__private::log::trace!( - target: "benchmark", - "Read/Write Count {:?}", read_write_count - ); - - // Time the storage root recalculation. - let start_storage_root = #krate::benchmarking::current_time(); - #krate::__private::storage_root(#krate::__private::StateVersion::V1); - let finish_storage_root = #krate::benchmarking::current_time(); - let elapsed_storage_root = finish_storage_root - start_storage_root; - - let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; - let read_and_written_keys = if skip_meta.contains(&extrinsic) { - #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] - } else { - #krate::benchmarking::get_read_and_written_keys() - }; - - results.push(#krate::BenchmarkResult { - components: c.to_vec(), - extrinsic_time: elapsed_extrinsic, - storage_root_time: elapsed_storage_root, - reads: read_write_count.0, - repeat_reads: read_write_count.1, - writes: read_write_count.2, - repeat_writes: read_write_count.3, - proof_size: diff_pov, - keys: read_and_written_keys, - }); - } - - return Ok(results); - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - /// Test a particular benchmark by name. - /// - /// This isn't called `test_benchmark_by_name` just in case some end-user eventually - /// writes a benchmark, itself called `by_name`; the function would be shadowed in - /// that case. - /// - /// This is generally intended to be used by child test modules such as those created - /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet - /// author chooses not to implement benchmarks. - #[allow(unused)] - fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { - let name = #krate::__private::str::from_utf8(name) - .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; - match name { - #(#benchmarks_by_name_mappings), - *, - _ => Err("Could not find test for requested benchmark.".into()), - } - } - } - - #impl_test_function - } - #mod_vis use #mod_name::*; - }; - Ok(res.into()) + let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; + // gather module info + let module: ItemMod = syn::parse(tokens)?; + let mod_span = module.span(); + let where_clause = match syn::parse::(attrs.clone()) { + Ok(_) => quote!(), + Err(_) => syn::parse::(attrs)? + .predicates + .to_token_stream(), + }; + let mod_vis = module.vis; + let mod_name = module.ident; + + // consume #[benchmarks] attribute by excluding it from mod_attrs + let mod_attrs: Vec<&Attribute> = module + .attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARKS_TOKEN)) + .collect(); + + let mut benchmark_names: Vec = Vec::new(); + let mut extra_benchmark_names: Vec = Vec::new(); + let mut skip_meta_benchmark_names: Vec = Vec::new(); + // Map benchmarks to PoV modes. + let mut pov_modes = Vec::new(); + + let (_brace, mut content) = module + .content + .ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; + + // find all function defs marked with #[benchmark] + let benchmark_fn_metas = content.iter_mut().filter_map(|stmt| { + // parse as a function def first + let Item::Fn(func) = stmt else { return None }; + + // find #[benchmark] attribute on function def + let benchmark_attr = func + .attrs + .iter() + .find(|attr| attr.path().is_ident(keywords::BENCHMARK_TOKEN))?; + + Some((benchmark_attr.clone(), func.clone(), stmt)) + }); + + // parse individual benchmark defs and args + for (benchmark_attr, func, stmt) in benchmark_fn_metas { + // parse benchmark def + let benchmark_def = BenchmarkDef::from(&func)?; + + // record benchmark name + let name = &func.sig.ident; + benchmark_names.push(name.clone()); + + // Check if we need to parse any args + if benchmark_attr.meta.require_path_only().is_err() { + // parse any args provided to #[benchmark] + let benchmark_attrs: BenchmarkAttrs = benchmark_attr.parse_args()?; + + // record name sets + if benchmark_attrs.extra { + extra_benchmark_names.push(name.clone()); + } else if benchmark_attrs.skip_meta { + skip_meta_benchmark_names.push(name.clone()); + } + + if let Some(mode) = benchmark_attrs.pov_mode { + let mut modes = Vec::new(); + // We cannot expand strings here since it is no-std, but syn does not expand bytes. + let name = name.to_string(); + let m = mode.root.to_string(); + modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); + + for attr in mode.per_key.iter() { + // syn always puts spaces in quoted paths: + let key = attr + .key + .clone() + .into_token_stream() + .to_string() + .replace(" ", ""); + let mode = attr.mode.to_string(); + modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); + } + + pov_modes.push( + quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), + ); + } + } + + // expand benchmark + let expanded = expand_benchmark(benchmark_def, name, instance, where_clause.clone()); + + // replace original function def with expanded code + *stmt = Item::Verbatim(expanded); + } + + // generics + let type_use_generics = match instance { + false => quote!(T), + true => quote!(T, I), + }; + let type_impl_generics = match instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + + // benchmark name variables + let benchmark_names_str: Vec = benchmark_names.iter().map(|n| n.to_string()).collect(); + let extra_benchmark_names_str: Vec = extra_benchmark_names + .iter() + .map(|n| n.to_string()) + .collect(); + let skip_meta_benchmark_names_str: Vec = skip_meta_benchmark_names + .iter() + .map(|n| n.to_string()) + .collect(); + let mut selected_benchmark_mappings: Vec = Vec::new(); + let mut benchmarks_by_name_mappings: Vec = Vec::new(); + let test_idents: Vec = benchmark_names_str + .iter() + .map(|n| Ident::new(format!("test_benchmark_{}", n).as_str(), Span::call_site())) + .collect(); + for i in 0..benchmark_names.len() { + let name_ident = &benchmark_names[i]; + let name_str = &benchmark_names_str[i]; + let test_ident = &test_idents[i]; + selected_benchmark_mappings.push(quote!(#name_str => SelectedBenchmark::#name_ident)); + benchmarks_by_name_mappings.push(quote!(#name_str => Self::#test_ident())) + } + + let impl_test_function = content + .iter_mut() + .find_map(|item| { + let Item::Macro(item_macro) = item else { + return None; + }; + + if !item_macro + .mac + .path + .segments + .iter() + .any(|s| s.ident == "impl_benchmark_test_suite") + { + return None; + } + + let tokens = item_macro.mac.tokens.clone(); + *item = Item::Verbatim(quote! {}); + + Some(quote! { + impl_test_function!( + (#( {} #benchmark_names )*) + (#( #extra_benchmark_names )*) + (#( #skip_meta_benchmark_names )*) + #tokens + ); + }) + }) + .unwrap_or(quote! {}); + + // emit final quoted tokens + let res = quote! { + #(#mod_attrs) + * + #mod_vis mod #mod_name { + #(#content) + * + + #[allow(non_camel_case_types)] + enum SelectedBenchmark { + #(#benchmark_names), + * + } + + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> for SelectedBenchmark where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup<#type_use_generics>>::components(&#benchmark_names) + } + ) + * + } + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool, + ) -> Result< + #krate::__private::Box Result<(), #krate::BenchmarkError>>, + #krate::BenchmarkError, + > { + match self { + #( + Self::#benchmark_names => { + <#benchmark_names as #krate::BenchmarkingSetup< + #type_use_generics + >>::instance(&#benchmark_names, components, verify) + } + ) + * + } + } + } + #[cfg(any(feature = "runtime-benchmarks", test))] + impl<#type_impl_generics> #krate::Benchmarking for Pallet<#type_use_generics> + where T: #frame_system::Config, #where_clause + { + fn benchmarks( + extra: bool, + ) -> #krate::__private::Vec<#krate::BenchmarkMetadata> { + let mut all_names = #krate::__private::vec![ + #(#benchmark_names_str), + * + ]; + if !extra { + let extra = [ + #(#extra_benchmark_names_str), + * + ]; + all_names.retain(|x| !extra.contains(x)); + } + let pov_modes: + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec + )>, + )> = #krate::__private::vec![ + #( #pov_modes ),* + ]; + all_names.into_iter().map(|benchmark| { + let selected_benchmark = match benchmark { + #(#selected_benchmark_mappings), + *, + _ => panic!("all benchmarks should be selectable") + }; + let components = >::components(&selected_benchmark); + let name = benchmark.as_bytes().to_vec(); + let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); + + #krate::BenchmarkMetadata { + name: benchmark.as_bytes().to_vec(), + components, + pov_modes: modes.unwrap_or_default(), + } + }).collect::<#krate::__private::Vec<_>>() + } + + fn run_benchmark( + extrinsic: &[u8], + c: &[(#krate::BenchmarkParameter, u32)], + whitelist: &[#krate::__private::TrackedStorageKey], + verify: bool, + internal_repeats: u32, + ) -> Result<#krate::__private::Vec<#krate::BenchmarkResult>, #krate::BenchmarkError> { + let extrinsic = #krate::__private::str::from_utf8(extrinsic).map_err(|_| "`extrinsic` is not a valid utf-8 string!")?; + let selected_benchmark = match extrinsic { + #(#selected_benchmark_mappings), + *, + _ => return Err("Could not find extrinsic.".into()), + }; + let mut whitelist = whitelist.to_vec(); + let whitelisted_caller_key = <#frame_system::Account< + T, + > as #krate::__private::storage::StorageMap<_, _,>>::hashed_key_for( + #krate::whitelisted_caller::() + ); + whitelist.push(whitelisted_caller_key.into()); + let transactional_layer_key = #krate::__private::TrackedStorageKey::new( + #krate::__private::storage::transactional::TRANSACTION_LEVEL_KEY.into(), + ); + whitelist.push(transactional_layer_key); + // Whitelist the `:extrinsic_index`. + let extrinsic_index = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::EXTRINSIC_INDEX.into() + ); + whitelist.push(extrinsic_index); + // Whitelist the `:intrablock_entropy`. + let intrablock_entropy = #krate::__private::TrackedStorageKey::new( + #krate::__private::well_known_keys::INTRABLOCK_ENTROPY.into() + ); + whitelist.push(intrablock_entropy); + + #krate::benchmarking::set_whitelist(whitelist.clone()); + let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); + + // Always do at least one internal repeat... + for _ in 0 .. internal_repeats.max(1) { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the externalities environment for the setup we want to + // benchmark. + let closure_to_benchmark = < + SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> + >::instance(&selected_benchmark, c, verify)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Commit the externalities to the database, flushing the DB cache. + // This will enable worst case scenario for reading from the database. + #krate::benchmarking::commit_db(); + + // Access all whitelisted keys to get them into the proof recorder since the + // recorder does now have a whitelist. + for key in &whitelist { + #krate::__private::storage::unhashed::get_raw(&key.key); + } + + // Reset the read/write counter so we don't count operations in the setup process. + #krate::benchmarking::reset_read_write_count(); + + // Time the extrinsic logic. + #krate::__private::log::trace!( + target: "benchmark", + "Start Benchmark: {} ({:?})", + extrinsic, + c + ); + + let start_pov = #krate::benchmarking::proof_size(); + let start_extrinsic = #krate::benchmarking::current_time(); + + closure_to_benchmark()?; + + let finish_extrinsic = #krate::benchmarking::current_time(); + let end_pov = #krate::benchmarking::proof_size(); + + // Calculate the diff caused by the benchmark. + let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); + let diff_pov = match (start_pov, end_pov) { + (Some(start), Some(end)) => end.saturating_sub(start), + _ => Default::default(), + }; + + // Commit the changes to get proper write count + #krate::benchmarking::commit_db(); + #krate::__private::log::trace!( + target: "benchmark", + "End Benchmark: {} ns", elapsed_extrinsic + ); + let read_write_count = #krate::benchmarking::read_write_count(); + #krate::__private::log::trace!( + target: "benchmark", + "Read/Write Count {:?}", read_write_count + ); + + // Time the storage root recalculation. + let start_storage_root = #krate::benchmarking::current_time(); + #krate::__private::storage_root(#krate::__private::StateVersion::V1); + let finish_storage_root = #krate::benchmarking::current_time(); + let elapsed_storage_root = finish_storage_root - start_storage_root; + + let skip_meta = [ #(#skip_meta_benchmark_names_str),* ]; + let read_and_written_keys = if skip_meta.contains(&extrinsic) { + #krate::__private::vec![(b"Skipped Metadata".to_vec(), 0, 0, false)] + } else { + #krate::benchmarking::get_read_and_written_keys() + }; + + results.push(#krate::BenchmarkResult { + components: c.to_vec(), + extrinsic_time: elapsed_extrinsic, + storage_root_time: elapsed_storage_root, + reads: read_write_count.0, + repeat_reads: read_write_count.1, + writes: read_write_count.2, + repeat_writes: read_write_count.3, + proof_size: diff_pov, + keys: read_and_written_keys, + }); + } + + return Ok(results); + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + /// Test a particular benchmark by name. + /// + /// This isn't called `test_benchmark_by_name` just in case some end-user eventually + /// writes a benchmark, itself called `by_name`; the function would be shadowed in + /// that case. + /// + /// This is generally intended to be used by child test modules such as those created + /// by the `impl_benchmark_test_suite` macro. However, it is not an error if a pallet + /// author chooses not to implement benchmarks. + #[allow(unused)] + fn test_bench_by_name(name: &[u8]) -> Result<(), #krate::BenchmarkError> { + let name = #krate::__private::str::from_utf8(name) + .map_err(|_| -> #krate::BenchmarkError { "`name` is not a valid utf8 string!".into() })?; + match name { + #(#benchmarks_by_name_mappings), + *, + _ => Err("Could not find test for requested benchmark.".into()), + } + } + } + + #impl_test_function + } + #mod_vis use #mod_name::*; + }; + Ok(res.into()) } /// Prepares a [`Vec`] to be interpolated by [`quote!`] by creating easily-iterable /// arrays formatted in such a way that they can be interpolated directly. struct UnrolledParams { - param_ranges: Vec, - param_names: Vec, + param_ranges: Vec, + param_names: Vec, } impl UnrolledParams { - /// Constructs an [`UnrolledParams`] from a [`Vec`] - fn from(params: &Vec) -> UnrolledParams { - let param_ranges: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - let start = &p.start; - let end = &p.end; - quote!(#name, #start, #end) - }) - .collect(); - let param_names: Vec = params - .iter() - .map(|p| { - let name = Ident::new(&p.name, Span::call_site()); - quote!(#name) - }) - .collect(); - UnrolledParams { param_ranges, param_names } - } + /// Constructs an [`UnrolledParams`] from a [`Vec`] + fn from(params: &Vec) -> UnrolledParams { + let param_ranges: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + let start = &p.start; + let end = &p.end; + quote!(#name, #start, #end) + }) + .collect(); + let param_names: Vec = params + .iter() + .map(|p| { + let name = Ident::new(&p.name, Span::call_site()); + quote!(#name) + }) + .collect(); + UnrolledParams { + param_ranges, + param_names, + } + } } /// Performs expansion of an already-parsed [`BenchmarkDef`]. fn expand_benchmark( - benchmark_def: BenchmarkDef, - name: &Ident, - is_instance: bool, - where_clause: TokenStream2, + benchmark_def: BenchmarkDef, + name: &Ident, + is_instance: bool, + where_clause: TokenStream2, ) -> TokenStream2 { - // set up variables needed during quoting - let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { - Ok(ident) => ident, - Err(err) => return err.to_compile_error().into(), - }; - let frame_system = match generate_access_from_frame_or_crate("frame-system") { - Ok(path) => path, - Err(err) => return err.to_compile_error().into(), - }; - let codec = quote!(#krate::__private::codec); - let traits = quote!(#krate::__private::traits); - let setup_stmts = benchmark_def.setup_stmts; - let verify_stmts = benchmark_def.verify_stmts; - let last_stmt = benchmark_def.last_stmt; - let test_ident = - Ident::new(format!("test_benchmark_{}", name.to_string()).as_str(), Span::call_site()); - - // unroll params (prepare for quoting) - let unrolled = UnrolledParams::from(&benchmark_def.params); - let param_names = unrolled.param_names; - let param_ranges = unrolled.param_ranges; - - let type_use_generics = match is_instance { - false => quote!(T), - true => quote!(T, I), - }; - - let type_impl_generics = match is_instance { - false => quote!(T: Config), - true => quote!(T: Config, I: 'static), - }; - - // used in the benchmarking impls - let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { - BenchmarkCallDef::ExtrinsicCall { origin, expr_call, attr_span: _ } => { - let mut expr_call = expr_call.clone(); - - // remove first arg from expr_call - let mut final_args = Punctuated::::new(); - let args: Vec<&Expr> = expr_call.args.iter().collect(); - for arg in &args[1..] { - final_args.push((*(*arg)).clone()); - } - expr_call.args = final_args; - - let origin = match origin { - Expr::Cast(t) => { - let ty = t.ty.clone(); - quote! { - <::RuntimeOrigin as From<#ty>>::from(#origin); - } - }, - _ => quote! { - #origin.into(); - }, - }; - - // determine call name (handles `_` and normal call syntax) - let expr_span = expr_call.span(); - let call_err = || { - syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") - .to_compile_error() - }; - let call_name = match *expr_call.func { - Expr::Path(expr_path) => { - // normal function call - let Some(segment) = expr_path.path.segments.last() else { return call_err() }; - segment.ident.to_string() - }, - Expr::Infer(_) => { - // `_` style - // replace `_` with fn name - name.to_string() - }, - _ => return call_err(), - }; - - // modify extrinsic call to be prefixed with "new_call_variant" - let call_name = format!("new_call_variant_{}", call_name); - let mut punct: Punctuated = Punctuated::new(); - punct.push(PathSegment { - arguments: PathArguments::None, - ident: Ident::new(call_name.as_str(), Span::call_site()), - }); - *expr_call.func = Expr::Path(ExprPath { - attrs: vec![], - qself: None, - path: Path { leading_colon: None, segments: punct }, - }); - let pre_call = quote! { - let __call = Call::<#type_use_generics>::#expr_call; - let __benchmarked_call_encoded = #codec::Encode::encode(&__call); - }; - let post_call = quote! { - let __call_decoded = as #codec::Decode> - ::decode(&mut &__benchmarked_call_encoded[..]) - .expect("call is encoded above, encoding must be correct"); - let __origin = #origin; - as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( - __call_decoded, - __origin, - ) - }; - ( - // (pre_call, post_call, fn_call_body): - pre_call.clone(), - quote!(#post_call?;), - quote! { - #pre_call - #post_call.unwrap(); - }, - ) - }, - BenchmarkCallDef::Block { block, attr_span: _ } => - (quote!(), quote!(#block), quote!(#block)), - }; - - let vis = benchmark_def.fn_vis; - - // remove #[benchmark] attribute - let fn_attrs = benchmark_def - .fn_attrs - .iter() - .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); - - // modify signature generics, ident, and inputs, e.g: - // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` - // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), - // BenchmarkError>` - let mut sig = benchmark_def.fn_sig; - sig.generics = parse_quote!(<#type_impl_generics>); - if !where_clause.is_empty() { - sig.generics.where_clause = parse_quote!(where #where_clause); - } - sig.ident = - Ident::new(format!("_{}", name.to_token_stream().to_string()).as_str(), Span::call_site()); - let mut fn_param_inputs: Vec = - param_names.iter().map(|name| quote!(#name: u32)).collect(); - fn_param_inputs.push(quote!(verify: bool)); - sig.inputs = parse_quote!(#(#fn_param_inputs),*); - - // used in instance() impl - let impl_last_stmt = match &last_stmt { - Some(stmt) => quote!(#stmt), - None => quote!(Ok(())), - }; - let fn_attrs_clone = fn_attrs.clone(); - - let fn_def = quote! { - #( - #fn_attrs_clone - )* - #vis #sig { - #( - #setup_stmts - )* - #fn_call_body - if verify { - #( - #verify_stmts - )* - } - #last_stmt - } - }; - - // generate final quoted tokens - let res = quote! { - // benchmark function definition - #fn_def - - #[allow(non_camel_case_types)] - #( - #fn_attrs - )* - struct #name; - - #[allow(unused_variables)] - impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> - for #name where #where_clause { - fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { - #krate::__private::vec! [ - #( - (#krate::BenchmarkParameter::#param_ranges) - ),* - ] - } - - fn instance( - &self, - components: &[(#krate::BenchmarkParameter, u32)], - verify: bool - ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { - #( - // prepare instance #param_names - let #param_names = components.iter() - .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) - .ok_or("Could not find component during benchmark preparation.")? - .1; - )* - - // benchmark setup code - #( - #setup_stmts - )* - #pre_call - Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { - #post_call - if verify { - #( - #verify_stmts - )* - } - #impl_last_stmt - })) - } - } - - #[cfg(test)] - impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { - #[allow(unused)] - fn #test_ident() -> Result<(), #krate::BenchmarkError> { - let selected_benchmark = SelectedBenchmark::#name; - let components = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::components(&selected_benchmark); - let execute_benchmark = | - c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> - | -> Result<(), #krate::BenchmarkError> { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the benchmark, return execution + verification function. - let closure_to_verify = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::instance(&selected_benchmark, &c, true)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } - - // Run execution + verification - closure_to_verify() - }; - - if components.is_empty() { - execute_benchmark(Default::default())?; - } else { - let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { - ev.parse().map_err(|_| { - #krate::BenchmarkError::Stop( - "Could not parse env var `VALUES_PER_COMPONENT` as u32." - ) - })? - } else { - 6 - }; - - if num_values < 2 { - return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); - } - - for (name, low, high) in components.clone().into_iter() { - // Test the lowest, highest (if its different from the lowest) - // and up to num_values-2 more equidistant values in between. - // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] - if high < low { - return Err("The start of a `ParamRange` must be less than or equal to the end".into()); - } - - let mut values = #krate::__private::vec![low]; - let diff = (high - low).min(num_values - 1); - let slope = (high - low) as f32 / diff as f32; - - for i in 1..=diff { - let value = ((low as f32 + slope * i as f32) as u32) - .clamp(low, high); - values.push(value); - } - - for component_value in values { - // Select the max value for all the other components. - let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components - .iter() - .map(|(n, _, h)| - if *n == name { - (*n, component_value) - } else { - (*n, *h) - } - ) - .collect(); - - execute_benchmark(c)?; - } - } - } - return Ok(()); - } - } - }; - res + // set up variables needed during quoting + let krate = match generate_access_from_frame_or_crate("frame-benchmarking") { + Ok(ident) => ident, + Err(err) => return err.to_compile_error().into(), + }; + let frame_system = match generate_access_from_frame_or_crate("frame-system") { + Ok(path) => path, + Err(err) => return err.to_compile_error().into(), + }; + let codec = quote!(#krate::__private::codec); + let traits = quote!(#krate::__private::traits); + let setup_stmts = benchmark_def.setup_stmts; + let verify_stmts = benchmark_def.verify_stmts; + let last_stmt = benchmark_def.last_stmt; + let test_ident = Ident::new( + format!("test_benchmark_{}", name.to_string()).as_str(), + Span::call_site(), + ); + + // unroll params (prepare for quoting) + let unrolled = UnrolledParams::from(&benchmark_def.params); + let param_names = unrolled.param_names; + let param_ranges = unrolled.param_ranges; + + let type_use_generics = match is_instance { + false => quote!(T), + true => quote!(T, I), + }; + + let type_impl_generics = match is_instance { + false => quote!(T: Config), + true => quote!(T: Config, I: 'static), + }; + + // used in the benchmarking impls + let (pre_call, post_call, fn_call_body) = match &benchmark_def.call_def { + BenchmarkCallDef::ExtrinsicCall { + origin, + expr_call, + attr_span: _, + } => { + let mut expr_call = expr_call.clone(); + + // remove first arg from expr_call + let mut final_args = Punctuated::::new(); + let args: Vec<&Expr> = expr_call.args.iter().collect(); + for arg in &args[1..] { + final_args.push((*(*arg)).clone()); + } + expr_call.args = final_args; + + let origin = match origin { + Expr::Cast(t) => { + let ty = t.ty.clone(); + quote! { + <::RuntimeOrigin as From<#ty>>::from(#origin); + } + } + _ => quote! { + #origin.into(); + }, + }; + + // determine call name (handles `_` and normal call syntax) + let expr_span = expr_call.span(); + let call_err = || { + syn::Error::new(expr_span, "Extrinsic call must be a function call or `_`") + .to_compile_error() + }; + let call_name = match *expr_call.func { + Expr::Path(expr_path) => { + // normal function call + let Some(segment) = expr_path.path.segments.last() else { + return call_err(); + }; + segment.ident.to_string() + } + Expr::Infer(_) => { + // `_` style + // replace `_` with fn name + name.to_string() + } + _ => return call_err(), + }; + + // modify extrinsic call to be prefixed with "new_call_variant" + let call_name = format!("new_call_variant_{}", call_name); + let mut punct: Punctuated = Punctuated::new(); + punct.push(PathSegment { + arguments: PathArguments::None, + ident: Ident::new(call_name.as_str(), Span::call_site()), + }); + *expr_call.func = Expr::Path(ExprPath { + attrs: vec![], + qself: None, + path: Path { + leading_colon: None, + segments: punct, + }, + }); + let pre_call = quote! { + let __call = Call::<#type_use_generics>::#expr_call; + let __benchmarked_call_encoded = #codec::Encode::encode(&__call); + }; + let post_call = quote! { + let __call_decoded = as #codec::Decode> + ::decode(&mut &__benchmarked_call_encoded[..]) + .expect("call is encoded above, encoding must be correct"); + let __origin = #origin; + as #traits::UnfilteredDispatchable>::dispatch_bypass_filter( + __call_decoded, + __origin, + ) + }; + ( + // (pre_call, post_call, fn_call_body): + pre_call.clone(), + quote!(#post_call?;), + quote! { + #pre_call + #post_call.unwrap(); + }, + ) + } + BenchmarkCallDef::Block { + block, + attr_span: _, + } => (quote!(), quote!(#block), quote!(#block)), + }; + + let vis = benchmark_def.fn_vis; + + // remove #[benchmark] attribute + let fn_attrs = benchmark_def + .fn_attrs + .iter() + .filter(|attr| !attr.path().is_ident(keywords::BENCHMARK_TOKEN)); + + // modify signature generics, ident, and inputs, e.g: + // before: `fn bench(u: Linear<1, 100>) -> Result<(), BenchmarkError>` + // after: `fn _bench , I: 'static>(u: u32, verify: bool) -> Result<(), + // BenchmarkError>` + let mut sig = benchmark_def.fn_sig; + sig.generics = parse_quote!(<#type_impl_generics>); + if !where_clause.is_empty() { + sig.generics.where_clause = parse_quote!(where #where_clause); + } + sig.ident = Ident::new( + format!("_{}", name.to_token_stream().to_string()).as_str(), + Span::call_site(), + ); + let mut fn_param_inputs: Vec = + param_names.iter().map(|name| quote!(#name: u32)).collect(); + fn_param_inputs.push(quote!(verify: bool)); + sig.inputs = parse_quote!(#(#fn_param_inputs),*); + + // used in instance() impl + let impl_last_stmt = match &last_stmt { + Some(stmt) => quote!(#stmt), + None => quote!(Ok(())), + }; + let fn_attrs_clone = fn_attrs.clone(); + + let fn_def = quote! { + #( + #fn_attrs_clone + )* + #vis #sig { + #( + #setup_stmts + )* + #fn_call_body + if verify { + #( + #verify_stmts + )* + } + #last_stmt + } + }; + + // generate final quoted tokens + let res = quote! { + // benchmark function definition + #fn_def + + #[allow(non_camel_case_types)] + #( + #fn_attrs + )* + struct #name; + + #[allow(unused_variables)] + impl<#type_impl_generics> #krate::BenchmarkingSetup<#type_use_generics> + for #name where #where_clause { + fn components(&self) -> #krate::__private::Vec<(#krate::BenchmarkParameter, u32, u32)> { + #krate::__private::vec! [ + #( + (#krate::BenchmarkParameter::#param_ranges) + ),* + ] + } + + fn instance( + &self, + components: &[(#krate::BenchmarkParameter, u32)], + verify: bool + ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { + #( + // prepare instance #param_names + let #param_names = components.iter() + .find(|&c| c.0 == #krate::BenchmarkParameter::#param_names) + .ok_or("Could not find component during benchmark preparation.")? + .1; + )* + + // benchmark setup code + #( + #setup_stmts + )* + #pre_call + Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { + #post_call + if verify { + #( + #verify_stmts + )* + } + #impl_last_stmt + })) + } + } + + #[cfg(test)] + impl<#type_impl_generics> Pallet<#type_use_generics> where T: #frame_system::Config, #where_clause { + #[allow(unused)] + fn #test_ident() -> Result<(), #krate::BenchmarkError> { + let selected_benchmark = SelectedBenchmark::#name; + let components = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::components(&selected_benchmark); + let execute_benchmark = | + c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> + | -> Result<(), #krate::BenchmarkError> { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); + + // Set up the benchmark, return execution + verification function. + let closure_to_verify = < + SelectedBenchmark as #krate::BenchmarkingSetup + >::instance(&selected_benchmark, &c, true)?; + + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + + // Run execution + verification + closure_to_verify() + }; + + if components.is_empty() { + execute_benchmark(Default::default())?; + } else { + let num_values: u32 = if let Ok(ev) = std::env::var("VALUES_PER_COMPONENT") { + ev.parse().map_err(|_| { + #krate::BenchmarkError::Stop( + "Could not parse env var `VALUES_PER_COMPONENT` as u32." + ) + })? + } else { + 6 + }; + + if num_values < 2 { + return Err("`VALUES_PER_COMPONENT` must be at least 2".into()); + } + + for (name, low, high) in components.clone().into_iter() { + // Test the lowest, highest (if its different from the lowest) + // and up to num_values-2 more equidistant values in between. + // For 0..10 and num_values=6 this would mean: [0, 2, 4, 6, 8, 10] + if high < low { + return Err("The start of a `ParamRange` must be less than or equal to the end".into()); + } + + let mut values = #krate::__private::vec![low]; + let diff = (high - low).min(num_values - 1); + let slope = (high - low) as f32 / diff as f32; + + for i in 1..=diff { + let value = ((low as f32 + slope * i as f32) as u32) + .clamp(low, high); + values.push(value); + } + + for component_value in values { + // Select the max value for all the other components. + let c: #krate::__private::Vec<(#krate::BenchmarkParameter, u32)> = components + .iter() + .map(|(n, _, h)| + if *n == name { + (*n, component_value) + } else { + (*n, *h) + } + ) + .collect(); + + execute_benchmark(c)?; + } + } + } + return Ok(()); + } + } + }; + res } diff --git a/support/procedural-fork/src/construct_runtime/expand/call.rs b/support/procedural-fork/src/construct_runtime/expand/call.rs index b0041ccc0..7e8c2e856 100644 --- a/support/procedural-fork/src/construct_runtime/expand/call.rs +++ b/support/procedural-fork/src/construct_runtime/expand/call.rs @@ -22,202 +22,205 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_dispatch( - runtime: &Ident, - system_pallet: &Pallet, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut variant_defs = TokenStream::new(); - let mut variant_patterns = Vec::new(); - let mut query_call_part_macros = Vec::new(); - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let system_path = &system_pallet.path; - - let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); - - for pallet_declaration in pallets_with_call { - let name = &pallet_declaration.name; - let path = &pallet_declaration.path; - let index = pallet_declaration.index; - let attr = - pallet_declaration.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - variant_defs.extend(quote! { - #attr - #[codec(index = #index)] - #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), - }); - variant_patterns.push(quote!(RuntimeCall::#name(call))); - pallet_names.push(name); - pallet_attrs.push(attr); - query_call_part_macros.push(quote! { - #path::__substrate_call_check::is_call_part_defined!(#name); - }); - } - - quote! { - #( #query_call_part_macros )* - - #[derive( - Clone, PartialEq, Eq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeCall { - #variant_defs - } - #[cfg(test)] - impl RuntimeCall { - /// Return a list of the module names together with their size in memory. - pub const fn sizes() -> &'static [( &'static str, usize )] { - use #scrate::dispatch::Callable; - use core::mem::size_of; - &[#( - #pallet_attrs - ( - stringify!(#pallet_names), - size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), - ), - )*] - } - - /// Panics with diagnostic information if the size is greater than the given `limit`. - pub fn assert_size_under(limit: usize) { - let size = core::mem::size_of::(); - let call_oversize = size > limit; - if call_oversize { - println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); - let mut sizes = Self::sizes().to_vec(); - sizes.sort_by_key(|x| -(x.1 as isize)); - for (i, &(name, size)) in sizes.iter().enumerate().take(5) { - println!("Offender #{}: {} at {} bytes", i + 1, name, size); - } - if let Some((_, next_size)) = sizes.get(5) { - println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); - } - panic!( - "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the + let mut variant_defs = TokenStream::new(); + let mut variant_patterns = Vec::new(); + let mut query_call_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let system_path = &system_pallet.path; + + let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); + + for pallet_declaration in pallets_with_call { + let name = &pallet_declaration.name; + let path = &pallet_declaration.path; + let index = pallet_declaration.index; + let attr = + pallet_declaration + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + variant_defs.extend(quote! { + #attr + #[codec(index = #index)] + #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ), + }); + variant_patterns.push(quote!(RuntimeCall::#name(call))); + pallet_names.push(name); + pallet_attrs.push(attr); + query_call_part_macros.push(quote! { + #path::__substrate_call_check::is_call_part_defined!(#name); + }); + } + + quote! { + #( #query_call_part_macros )* + + #[derive( + Clone, PartialEq, Eq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeCall { + #variant_defs + } + #[cfg(test)] + impl RuntimeCall { + /// Return a list of the module names together with their size in memory. + pub const fn sizes() -> &'static [( &'static str, usize )] { + use #scrate::dispatch::Callable; + use core::mem::size_of; + &[#( + #pallet_attrs + ( + stringify!(#pallet_names), + size_of::< <#pallet_names as Callable<#runtime>>::RuntimeCall >(), + ), + )*] + } + + /// Panics with diagnostic information if the size is greater than the given `limit`. + pub fn assert_size_under(limit: usize) { + let size = core::mem::size_of::(); + let call_oversize = size > limit; + if call_oversize { + println!("Size of `Call` is {} bytes (provided limit is {} bytes)", size, limit); + let mut sizes = Self::sizes().to_vec(); + sizes.sort_by_key(|x| -(x.1 as isize)); + for (i, &(name, size)) in sizes.iter().enumerate().take(5) { + println!("Offender #{}: {} at {} bytes", i + 1, name, size); + } + if let Some((_, next_size)) = sizes.get(5) { + println!("{} others of size {} bytes or less", sizes.len() - 5, next_size); + } + panic!( + "Size of `Call` is more than limit; use `Box` on complex parameter types to reduce the size of `Call`. If the limit is too strong, maybe consider providing a higher limit." - ); - } - } - } - impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { - fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { - match self { - #( - #pallet_attrs - #variant_patterns => call.get_dispatch_info(), - )* - } - } - } - - impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { - type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match self { - #( - #pallet_attrs - #variant_patterns => call.is_feeless(origin), - )* - } - } - } - - impl #scrate::traits::GetCallMetadata for RuntimeCall { - fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { - use #scrate::traits::GetCallName; - match self { - #( - #pallet_attrs - #variant_patterns => { - let function_name = call.get_call_name(); - let pallet_name = stringify!(#pallet_names); - #scrate::traits::CallMetadata { function_name, pallet_name } - } - )* - } - } - - fn get_module_names() -> &'static [&'static str] { - &[#( - #pallet_attrs - stringify!(#pallet_names), - )*] - } - - fn get_call_names(module: &str) -> &'static [&'static str] { - use #scrate::{dispatch::Callable, traits::GetCallName}; - match module { - #( - #pallet_attrs - stringify!(#pallet_names) => - <<#pallet_names as Callable<#runtime>>::RuntimeCall - as GetCallName>::get_call_names(), - )* - _ => unreachable!(), - } - } - } - impl #scrate::__private::Dispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - type Config = RuntimeCall; - type Info = #scrate::dispatch::DispatchInfo; - type PostInfo = #scrate::dispatch::PostDispatchInfo; - fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - if !::filter_call(&origin, &self) { - return ::core::result::Result::Err( - #system_path::Error::<#runtime>::CallFiltered.into() - ); - } - - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) - } - } - impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { - type RuntimeOrigin = RuntimeOrigin; - fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { - match self { - #( - #pallet_attrs - #variant_patterns => - #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), - )* - } - } - } - - #( - #pallet_attrs - impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - #[allow(unreachable_patterns)] - fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { - match self { - #variant_patterns => Some(call), - // May be unreachable - _ => None, - } - } - } - - #pallet_attrs - impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { - fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { - #variant_patterns - } - } - )* - } + ); + } + } + } + impl #scrate::dispatch::GetDispatchInfo for RuntimeCall { + fn get_dispatch_info(&self) -> #scrate::dispatch::DispatchInfo { + match self { + #( + #pallet_attrs + #variant_patterns => call.get_dispatch_info(), + )* + } + } + } + + impl #scrate::dispatch::CheckIfFeeless for RuntimeCall { + type Origin = #system_path::pallet_prelude::OriginFor<#runtime>; + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match self { + #( + #pallet_attrs + #variant_patterns => call.is_feeless(origin), + )* + } + } + } + + impl #scrate::traits::GetCallMetadata for RuntimeCall { + fn get_call_metadata(&self) -> #scrate::traits::CallMetadata { + use #scrate::traits::GetCallName; + match self { + #( + #pallet_attrs + #variant_patterns => { + let function_name = call.get_call_name(); + let pallet_name = stringify!(#pallet_names); + #scrate::traits::CallMetadata { function_name, pallet_name } + } + )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[#( + #pallet_attrs + stringify!(#pallet_names), + )*] + } + + fn get_call_names(module: &str) -> &'static [&'static str] { + use #scrate::{dispatch::Callable, traits::GetCallName}; + match module { + #( + #pallet_attrs + stringify!(#pallet_names) => + <<#pallet_names as Callable<#runtime>>::RuntimeCall + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } + impl #scrate::__private::Dispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + type Config = RuntimeCall; + type Info = #scrate::dispatch::DispatchInfo; + type PostInfo = #scrate::dispatch::PostDispatchInfo; + fn dispatch(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + if !::filter_call(&origin, &self) { + return ::core::result::Result::Err( + #system_path::Error::<#runtime>::CallFiltered.into() + ); + } + + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(self, origin) + } + } + impl #scrate::traits::UnfilteredDispatchable for RuntimeCall { + type RuntimeOrigin = RuntimeOrigin; + fn dispatch_bypass_filter(self, origin: RuntimeOrigin) -> #scrate::dispatch::DispatchResultWithPostInfo { + match self { + #( + #pallet_attrs + #variant_patterns => + #scrate::traits::UnfilteredDispatchable::dispatch_bypass_filter(call, origin), + )* + } + } + } + + #( + #pallet_attrs + impl #scrate::traits::IsSubType<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + #[allow(unreachable_patterns)] + fn is_sub_type(&self) -> Option<&#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> { + match self { + #variant_patterns => Some(call), + // May be unreachable + _ => None, + } + } + } + + #pallet_attrs + impl From<#scrate::dispatch::CallableCallFor<#pallet_names, #runtime>> for RuntimeCall { + fn from(call: #scrate::dispatch::CallableCallFor<#pallet_names, #runtime>) -> Self { + #variant_patterns + } + } + )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs index 101a476fb..be6b2f085 100644 --- a/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs +++ b/support/procedural-fork/src/construct_runtime/expand/composite_helper.rs @@ -20,82 +20,82 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; pub(crate) fn expand_conversion_fn( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); - let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); + let runtime_composite_name = quote::format_ident!("Runtime{}", composite_name); - if let Some(inst) = instance { - quote! { - impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { - fn from(hr: #path::#composite_name<#path::#inst>) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } else { - quote! { - impl From<#path::#composite_name> for #runtime_composite_name { - fn from(hr: #path::#composite_name) -> Self { - #runtime_composite_name::#variant_name(hr) - } - } - } - } + if let Some(inst) = instance { + quote! { + impl From<#path::#composite_name<#path::#inst>> for #runtime_composite_name { + fn from(hr: #path::#composite_name<#path::#inst>) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } else { + quote! { + impl From<#path::#composite_name> for #runtime_composite_name { + fn from(hr: #path::#composite_name) -> Self { + #runtime_composite_name::#variant_name(hr) + } + } + } + } } pub(crate) fn expand_variant( - composite_name: &str, - index: u8, - path: &PalletPath, - instance: Option<&Ident>, - variant_name: &Ident, + composite_name: &str, + index: u8, + path: &PalletPath, + instance: Option<&Ident>, + variant_name: &Ident, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name<#path::#inst>), - } - } else { - quote! { - #[codec(index = #index)] - #variant_name(#path::#composite_name), - } - } + if let Some(inst) = instance { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name<#path::#inst>), + } + } else { + quote! { + #[codec(index = #index)] + #variant_name(#path::#composite_name), + } + } } pub(crate) fn expand_variant_count( - composite_name: &str, - path: &PalletPath, - instance: Option<&Ident>, + composite_name: &str, + path: &PalletPath, + instance: Option<&Ident>, ) -> TokenStream { - let composite_name = quote::format_ident!("{}", composite_name); + let composite_name = quote::format_ident!("{}", composite_name); - if let Some(inst) = instance { - quote! { - #path::#composite_name::<#path::#inst>::VARIANT_COUNT - } - } else { - // Wrapped `<`..`>` means: use default type parameter for enum. - // - // This is used for pallets without instance support or pallets with instance support when - // we don't specify instance: - // - // ``` - // pub struct Pallet{..} - // - // #[pallet::composite_enum] - // pub enum HoldReason {..} - // - // Pallet1: pallet_x, // <- default type parameter - // ``` - quote! { - <#path::#composite_name>::VARIANT_COUNT - } - } + if let Some(inst) = instance { + quote! { + #path::#composite_name::<#path::#inst>::VARIANT_COUNT + } + } else { + // Wrapped `<`..`>` means: use default type parameter for enum. + // + // This is used for pallets without instance support or pallets with instance support when + // we don't specify instance: + // + // ``` + // pub struct Pallet{..} + // + // #[pallet::composite_enum] + // pub enum HoldReason {..} + // + // Pallet1: pallet_x, // <- default type parameter + // ``` + quote! { + <#path::#composite_name>::VARIANT_COUNT + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/config.rs b/support/procedural-fork/src/construct_runtime/expand/config.rs index dbbe6ba6e..ff715e584 100644 --- a/support/procedural-fork/src/construct_runtime/expand/config.rs +++ b/support/procedural-fork/src/construct_runtime/expand/config.rs @@ -23,125 +23,135 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_config( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut types = TokenStream::new(); - let mut fields = TokenStream::new(); - let mut genesis_build_calls = TokenStream::new(); - let mut query_genesis_config_part_macros = Vec::new(); + let mut types = TokenStream::new(); + let mut fields = TokenStream::new(); + let mut genesis_build_calls = TokenStream::new(); + let mut query_genesis_config_part_macros = Vec::new(); - for decl in pallet_decls { - if let Some(pallet_entry) = decl.find_part("Config") { - let path = &decl.path; - let pallet_name = &decl.name; - let path_str = path.into_token_stream().to_string(); - let config = format_ident!("{}Config", pallet_name); - let field_name = - &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); - let part_is_generic = !pallet_entry.generics.params.is_empty(); - let attr = &decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for decl in pallet_decls { + if let Some(pallet_entry) = decl.find_part("Config") { + let path = &decl.path; + let pallet_name = &decl.name; + let path_str = path.into_token_stream().to_string(); + let config = format_ident!("{}Config", pallet_name); + let field_name = + &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); + let part_is_generic = !pallet_entry.generics.params.is_empty(); + let attr = &decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - types.extend(expand_config_types(attr, runtime, decl, &config, part_is_generic)); - fields.extend(quote!(#attr pub #field_name: #config,)); - genesis_build_calls - .extend(expand_config_build_storage_call(scrate, &config, attr, field_name)); - query_genesis_config_part_macros.push(quote! { + types.extend(expand_config_types( + attr, + runtime, + decl, + &config, + part_is_generic, + )); + fields.extend(quote!(#attr pub #field_name: #config,)); + genesis_build_calls.extend(expand_config_build_storage_call( + scrate, &config, attr, field_name, + )); + query_genesis_config_part_macros.push(quote! { #path::__substrate_genesis_config_check::is_genesis_config_defined!(#pallet_name); #[cfg(feature = "std")] #path::__substrate_genesis_config_check::is_std_enabled_for_genesis!(#pallet_name, #path_str); }); - } - } + } + } - quote! { - #( #query_genesis_config_part_macros )* + quote! { + #( #query_genesis_config_part_macros )* - #types + #types - use #scrate::__private::serde as __genesis_config_serde_import__; - #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] - #[serde(rename_all = "camelCase")] - #[serde(deny_unknown_fields)] - #[serde(crate = "__genesis_config_serde_import__")] - pub struct RuntimeGenesisConfig { - #fields - } + use #scrate::__private::serde as __genesis_config_serde_import__; + #[derive(#scrate::__private::serde::Serialize, #scrate::__private::serde::Deserialize, Default)] + #[serde(rename_all = "camelCase")] + #[serde(deny_unknown_fields)] + #[serde(crate = "__genesis_config_serde_import__")] + pub struct RuntimeGenesisConfig { + #fields + } - #[cfg(any(feature = "std", test))] - impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { - fn assimilate_storage( - &self, - storage: &mut #scrate::sp_runtime::Storage, - ) -> std::result::Result<(), String> { - #scrate::__private::BasicExternalities::execute_with_storage(storage, || { - ::build(&self); - Ok(()) - }) - } - } + #[cfg(any(feature = "std", test))] + impl #scrate::sp_runtime::BuildStorage for RuntimeGenesisConfig { + fn assimilate_storage( + &self, + storage: &mut #scrate::sp_runtime::Storage, + ) -> std::result::Result<(), String> { + #scrate::__private::BasicExternalities::execute_with_storage(storage, || { + ::build(&self); + Ok(()) + }) + } + } - impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { - fn build(&self) { - #genesis_build_calls - ::on_genesis(); - } - } + impl #scrate::traits::BuildGenesisConfig for RuntimeGenesisConfig { + fn build(&self) { + #genesis_build_calls + ::on_genesis(); + } + } - /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. - #[cfg(test)] - #[test] - fn test_genesis_config_builds() { - #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { - ::build( - &RuntimeGenesisConfig::default() - ); - }); - } - } + /// Test the `Default` derive impl of the `RuntimeGenesisConfig`. + #[cfg(test)] + #[test] + fn test_genesis_config_builds() { + #scrate::__private::sp_io::TestExternalities::default().execute_with(|| { + ::build( + &RuntimeGenesisConfig::default() + ); + }); + } + } } fn expand_config_types( - attr: &TokenStream, - runtime: &Ident, - decl: &Pallet, - config: &Ident, - part_is_generic: bool, + attr: &TokenStream, + runtime: &Ident, + decl: &Pallet, + config: &Ident, + part_is_generic: bool, ) -> TokenStream { - let path = &decl.path; + let path = &decl.path; - match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; - }, - (None, true) => quote! { - #attr - pub type #config = #path::GenesisConfig<#runtime>; - }, - (_, false) => quote! { - #attr - pub type #config = #path::GenesisConfig; - }, - } + match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; + }, + (None, true) => quote! { + #attr + pub type #config = #path::GenesisConfig<#runtime>; + }, + (_, false) => quote! { + #attr + pub type #config = #path::GenesisConfig; + }, + } } fn expand_config_build_storage_call( - scrate: &TokenStream, - pallet_genesis_config: &Ident, - attr: &TokenStream, - field_name: &Ident, + scrate: &TokenStream, + pallet_genesis_config: &Ident, + attr: &TokenStream, + field_name: &Ident, ) -> TokenStream { - quote! { - #attr - <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); - } + quote! { + #attr + <#pallet_genesis_config as #scrate::traits::BuildGenesisConfig>::build(&self.#field_name); + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs index f12f99526..131c919ef 100644 --- a/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/freeze_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_freeze_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut freeze_reason_variants = Vec::new(); - let mut freeze_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("FreezeReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut freeze_reason_variants = Vec::new(); + let mut freeze_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("FreezeReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "FreezeReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "FreezeReason", + path, + instance, + variant_name, + )); - freeze_reason_variants.push(composite_helper::expand_variant( - "FreezeReason", - index, - path, - instance, - variant_name, - )); + freeze_reason_variants.push(composite_helper::expand_variant( + "FreezeReason", + index, + path, + instance, + variant_name, + )); - freeze_reason_variants_count.push(composite_helper::expand_variant_count( - "FreezeReason", - path, - instance, - )); - } - } + freeze_reason_variants_count.push(composite_helper::expand_variant_count( + "FreezeReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a freeze on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeFreezeReason { - #( #freeze_reason_variants )* - } + quote! { + /// A reason for placing a freeze on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeFreezeReason { + #( #freeze_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeFreezeReason { - const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeFreezeReason { + const VARIANT_COUNT: u32 = 0 #( + #freeze_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs index cdab92712..58870a321 100644 --- a/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/hold_reason.rs @@ -21,55 +21,55 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_hold_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut hold_reason_variants = Vec::new(); - let mut hold_reason_variants_count = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("HoldReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut hold_reason_variants = Vec::new(); + let mut hold_reason_variants_count = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("HoldReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "HoldReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "HoldReason", + path, + instance, + variant_name, + )); - hold_reason_variants.push(composite_helper::expand_variant( - "HoldReason", - index, - path, - instance, - variant_name, - )); + hold_reason_variants.push(composite_helper::expand_variant( + "HoldReason", + index, + path, + instance, + variant_name, + )); - hold_reason_variants_count.push(composite_helper::expand_variant_count( - "HoldReason", - path, - instance, - )); - } - } + hold_reason_variants_count.push(composite_helper::expand_variant_count( + "HoldReason", + path, + instance, + )); + } + } - quote! { - /// A reason for placing a hold on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeHoldReason { - #( #hold_reason_variants )* - } + quote! { + /// A reason for placing a hold on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeHoldReason { + #( #hold_reason_variants )* + } - impl #scrate::traits::VariantCount for RuntimeHoldReason { - const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; - } + impl #scrate::traits::VariantCount for RuntimeHoldReason { + const VARIANT_COUNT: u32 = 0 #( + #hold_reason_variants_count )*; + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/inherent.rs b/support/procedural-fork/src/construct_runtime/expand/inherent.rs index da483fa6c..b58d540fe 100644 --- a/support/procedural-fork/src/construct_runtime/expand/inherent.rs +++ b/support/procedural-fork/src/construct_runtime/expand/inherent.rs @@ -22,233 +22,236 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_inherent( - runtime: &Ident, - block: &TokenStream, - unchecked_extrinsic: &TokenStream, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + block: &TokenStream, + unchecked_extrinsic: &TokenStream, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_inherent_part_macros = Vec::new(); - - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("Inherent") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - pallet_names.push(name); - pallet_attrs.push(attr); - query_inherent_part_macros.push(quote! { - #path::__substrate_inherent_check::is_inherent_part_defined!(#name); - }); - } - } - - quote! { - #( #query_inherent_part_macros )* - - trait InherentDataExt { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; - } - - impl InherentDataExt for #scrate::inherent::InherentData { - fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> - { - use #scrate::inherent::ProvideInherent; - - let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); - - #( - #pallet_attrs - if let Some(inherent) = #pallet_names::create_inherent(self) { - let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( - inherent.into(), - None, - ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ - `Some`; qed"); - - inherents.push(inherent); - } - )* - - inherents - } - - fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { - use #scrate::inherent::{ProvideInherent, IsFatalError}; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - use #scrate::__private::{sp_inherents::Error, log}; - - let mut result = #scrate::inherent::CheckInherentsResult::new(); - - // This handle assume we abort on the first fatal error. - fn handle_put_error_result(res: Result<(), Error>) { - const LOG_TARGET: &str = "runtime::inherent"; - match res { - Ok(()) => (), - Err(Error::InherentDataExists(id)) => - log::debug!( - target: LOG_TARGET, - "Some error already reported for inherent {:?}, new non fatal \ - error is ignored", - id - ), - Err(Error::FatalErrorReported) => - log::error!( - target: LOG_TARGET, - "Fatal error already reported, unexpected considering there is \ - only one fatal error", - ), - Err(_) => - log::error!( - target: LOG_TARGET, - "Unexpected error from `put_error` operation", - ), - } - } - - for xt in block.extrinsics() { - // Inherents are before any other extrinsics. - // And signed extrinsics are not inherents. - if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { - break - } - - let mut is_inherent = false; - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if #pallet_names::is_inherent(call) { - is_inherent = true; - if let Err(e) = #pallet_names::check_inherent(call, self) { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - } - } - } - )* - - // Inherents are before any other extrinsics. - // No module marked it as inherent thus it is not. - if !is_inherent { - break - } - } - - #( - #pallet_attrs - match #pallet_names::is_inherent_required(self) { - Ok(Some(e)) => { - let found = block.extrinsics().iter().any(|xt| { - let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) - .unwrap_or(false); - - if !is_signed { - let call = < - #unchecked_extrinsic as ExtrinsicCall - >::call(xt); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - #pallet_names::is_inherent(&call) - } else { - false - } - } else { - // Signed extrinsics are not inherents. - false - } - }); - - if !found { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - } - }, - Ok(None) => (), - Err(e) => { - handle_put_error_result(result.put_error( - #pallet_names::INHERENT_IDENTIFIER, &e - )); - if e.is_fatal_error() { - return result; - } - }, - } - )* - - result - } - } - - impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { - fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - - if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { - // Signed extrinsics are never inherents. - return false - } - - #( - #pallet_attrs - { - let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); - if let Some(call) = IsSubType::<_>::is_sub_type(call) { - if <#pallet_names as ProvideInherent>::is_inherent(&call) { - return true; - } - } - } - )* - false - } - } - - impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { - fn ensure_inherents_are_first(block: &#block) -> Result { - use #scrate::inherent::ProvideInherent; - use #scrate::traits::{IsSubType, ExtrinsicCall}; - use #scrate::sp_runtime::traits::Block as _; - - let mut num_inherents = 0u32; - - for (i, xt) in block.extrinsics().iter().enumerate() { - if >::is_inherent(xt) { - if num_inherents != i as u32 { - return Err(i as u32); - } - - num_inherents += 1; // Safe since we are in an `enumerate` loop. - } - } - - Ok(num_inherents) - } - } - } + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_inherent_part_macros = Vec::new(); + + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("Inherent") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + pallet_names.push(name); + pallet_attrs.push(attr); + query_inherent_part_macros.push(quote! { + #path::__substrate_inherent_check::is_inherent_part_defined!(#name); + }); + } + } + + quote! { + #( #query_inherent_part_macros )* + + trait InherentDataExt { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; + } + + impl InherentDataExt for #scrate::inherent::InherentData { + fn create_extrinsics(&self) -> + #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> + { + use #scrate::inherent::ProvideInherent; + + let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); + + #( + #pallet_attrs + if let Some(inherent) = #pallet_names::create_inherent(self) { + let inherent = <#unchecked_extrinsic as #scrate::sp_runtime::traits::Extrinsic>::new( + inherent.into(), + None, + ).expect("Runtime UncheckedExtrinsic is not Opaque, so it has to return \ + `Some`; qed"); + + inherents.push(inherent); + } + )* + + inherents + } + + fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult { + use #scrate::inherent::{ProvideInherent, IsFatalError}; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + use #scrate::__private::{sp_inherents::Error, log}; + + let mut result = #scrate::inherent::CheckInherentsResult::new(); + + // This handle assume we abort on the first fatal error. + fn handle_put_error_result(res: Result<(), Error>) { + const LOG_TARGET: &str = "runtime::inherent"; + match res { + Ok(()) => (), + Err(Error::InherentDataExists(id)) => + log::debug!( + target: LOG_TARGET, + "Some error already reported for inherent {:?}, new non fatal \ + error is ignored", + id + ), + Err(Error::FatalErrorReported) => + log::error!( + target: LOG_TARGET, + "Fatal error already reported, unexpected considering there is \ + only one fatal error", + ), + Err(_) => + log::error!( + target: LOG_TARGET, + "Unexpected error from `put_error` operation", + ), + } + } + + for xt in block.extrinsics() { + // Inherents are before any other extrinsics. + // And signed extrinsics are not inherents. + if #scrate::sp_runtime::traits::Extrinsic::is_signed(xt).unwrap_or(false) { + break + } + + let mut is_inherent = false; + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if #pallet_names::is_inherent(call) { + is_inherent = true; + if let Err(e) = #pallet_names::check_inherent(call, self) { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + } + } + } + )* + + // Inherents are before any other extrinsics. + // No module marked it as inherent thus it is not. + if !is_inherent { + break + } + } + + #( + #pallet_attrs + match #pallet_names::is_inherent_required(self) { + Ok(Some(e)) => { + let found = block.extrinsics().iter().any(|xt| { + let is_signed = #scrate::sp_runtime::traits::Extrinsic::is_signed(xt) + .unwrap_or(false); + + if !is_signed { + let call = < + #unchecked_extrinsic as ExtrinsicCall + >::call(xt); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + #pallet_names::is_inherent(&call) + } else { + false + } + } else { + // Signed extrinsics are not inherents. + false + } + }); + + if !found { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + } + }, + Ok(None) => (), + Err(e) => { + handle_put_error_result(result.put_error( + #pallet_names::INHERENT_IDENTIFIER, &e + )); + if e.is_fatal_error() { + return result; + } + }, + } + )* + + result + } + } + + impl #scrate::traits::IsInherent<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> for #runtime { + fn is_inherent(ext: &<#block as #scrate::sp_runtime::traits::Block>::Extrinsic) -> bool { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + + if #scrate::sp_runtime::traits::Extrinsic::is_signed(ext).unwrap_or(false) { + // Signed extrinsics are never inherents. + return false + } + + #( + #pallet_attrs + { + let call = <#unchecked_extrinsic as ExtrinsicCall>::call(ext); + if let Some(call) = IsSubType::<_>::is_sub_type(call) { + if <#pallet_names as ProvideInherent>::is_inherent(&call) { + return true; + } + } + } + )* + false + } + } + + impl #scrate::traits::EnsureInherentsAreFirst<#block> for #runtime { + fn ensure_inherents_are_first(block: &#block) -> Result { + use #scrate::inherent::ProvideInherent; + use #scrate::traits::{IsSubType, ExtrinsicCall}; + use #scrate::sp_runtime::traits::Block as _; + + let mut num_inherents = 0u32; + + for (i, xt) in block.extrinsics().iter().enumerate() { + if >::is_inherent(xt) { + if num_inherents != i as u32 { + return Err(i as u32); + } + + num_inherents += 1; // Safe since we are in an `enumerate` loop. + } + } + + Ok(num_inherents) + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs index e67c0da00..67c2fb933 100644 --- a/support/procedural-fork/src/construct_runtime/expand/lock_id.rs +++ b/support/procedural-fork/src/construct_runtime/expand/lock_id.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_lock_id(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut lock_id_variants = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("LockId") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut lock_id_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("LockId") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "LockId", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "LockId", + path, + instance, + variant_name, + )); - lock_id_variants.push(composite_helper::expand_variant( - "LockId", - index, - path, - instance, - variant_name, - )); - } - } + lock_id_variants.push(composite_helper::expand_variant( + "LockId", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// An identifier for each lock placed on funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeLockId { - #( #lock_id_variants )* - } + quote! { + /// An identifier for each lock placed on funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeLockId { + #( #lock_id_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/metadata.rs b/support/procedural-fork/src/construct_runtime/expand/metadata.rs index 0e76f9a92..f98c719ca 100644 --- a/support/procedural-fork/src/construct_runtime/expand/metadata.rs +++ b/support/procedural-fork/src/construct_runtime/expand/metadata.rs @@ -22,237 +22,240 @@ use std::str::FromStr; use syn::Ident; pub fn expand_runtime_metadata( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream, - extrinsic: &TokenStream, - system_path: &PalletPath, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream, + extrinsic: &TokenStream, + system_path: &PalletPath, ) -> TokenStream { - let pallets = pallet_declarations - .iter() - .filter_map(|pallet_declaration| { - pallet_declaration.find_part("Pallet").map(|_| { - let filtered_names: Vec<_> = pallet_declaration - .pallet_parts() - .iter() - .filter(|part| part.name() != "Pallet") - .map(|part| part.name()) - .collect(); - (pallet_declaration, filtered_names) - }) - }) - .map(|(decl, filtered_names)| { - let name = &decl.name; - let index = &decl.index; - let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); - let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); - let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); - let constants = expand_pallet_metadata_constants(runtime, decl); - let errors = expand_pallet_metadata_errors(runtime, decl); - let docs = expand_pallet_metadata_docs(runtime, decl); - let attr = decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let pallets = pallet_declarations + .iter() + .filter_map(|pallet_declaration| { + pallet_declaration.find_part("Pallet").map(|_| { + let filtered_names: Vec<_> = pallet_declaration + .pallet_parts() + .iter() + .filter(|part| part.name() != "Pallet") + .map(|part| part.name()) + .collect(); + (pallet_declaration, filtered_names) + }) + }) + .map(|(decl, filtered_names)| { + let name = &decl.name; + let index = &decl.index; + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); + let docs = expand_pallet_metadata_docs(runtime, decl); + let attr = decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - #scrate::__private::metadata_ir::PalletMetadataIR { - name: stringify!(#name), - index: #index, - storage: #storage, - calls: #calls, - event: #event, - constants: #constants, - error: #errors, - docs: #docs, - } - } - }) - .collect::>(); + quote! { + #attr + #scrate::__private::metadata_ir::PalletMetadataIR { + name: stringify!(#name), + index: #index, + storage: #storage, + calls: #calls, + event: #event, + constants: #constants, + error: #errors, + docs: #docs, + } + } + }) + .collect::>(); - quote! { - impl #runtime { - fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - // - // `Deref` needs a reference for resolving the function call. - let rt = #runtime; + quote! { + impl #runtime { + fn metadata_ir() -> #scrate::__private::metadata_ir::MetadataIR { + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. + // + // `Deref` needs a reference for resolving the function call. + let rt = #runtime; - let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); - let address_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress - >(); - let call_ty = #scrate::__private::scale_info::meta_type::< - <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call - >(); - let signature_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature - >(); - let extra_ty = #scrate::__private::scale_info::meta_type::< - <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra - >(); + let ty = #scrate::__private::scale_info::meta_type::<#extrinsic>(); + let address_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureAddress + >(); + let call_ty = #scrate::__private::scale_info::meta_type::< + <#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::Call + >(); + let signature_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::Signature + >(); + let extra_ty = #scrate::__private::scale_info::meta_type::< + <<#extrinsic as #scrate::sp_runtime::traits::Extrinsic>::SignaturePayload as #scrate::sp_runtime::traits::SignaturePayload>::SignatureExtra + >(); - #scrate::__private::metadata_ir::MetadataIR { - pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], - extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { - ty, - version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - address_ty, - call_ty, - signature_ty, - extra_ty, - signed_extensions: < - < - #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata - >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension - >::metadata() - .into_iter() - .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { - identifier: meta.identifier, - ty: meta.ty, - additional_signed: meta.additional_signed, - }) - .collect(), - }, - ty: #scrate::__private::scale_info::meta_type::<#runtime>(), - apis: (&rt).runtime_metadata(), - outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { - call_enum_ty: #scrate::__private::scale_info::meta_type::< - <#runtime as #system_path::Config>::RuntimeCall - >(), - event_enum_ty: #scrate::__private::scale_info::meta_type::(), - error_enum_ty: #scrate::__private::scale_info::meta_type::(), - } - } - } + #scrate::__private::metadata_ir::MetadataIR { + pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], + extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { + ty, + version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + address_ty, + call_ty, + signature_ty, + extra_ty, + signed_extensions: < + < + #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension + >::metadata() + .into_iter() + .map(|meta| #scrate::__private::metadata_ir::SignedExtensionMetadataIR { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, + }) + .collect(), + }, + ty: #scrate::__private::scale_info::meta_type::<#runtime>(), + apis: (&rt).runtime_metadata(), + outer_enums: #scrate::__private::metadata_ir::OuterEnumsIR { + call_enum_ty: #scrate::__private::scale_info::meta_type::< + <#runtime as #system_path::Config>::RuntimeCall + >(), + event_enum_ty: #scrate::__private::scale_info::meta_type::(), + error_enum_ty: #scrate::__private::scale_info::meta_type::(), + } + } + } - pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { - // Note: this always returns the V14 version. The runtime API function - // must be deprecated. - #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) - } + pub fn metadata() -> #scrate::__private::metadata::RuntimeMetadataPrefixed { + // Note: this always returns the V14 version. The runtime API function + // must be deprecated. + #scrate::__private::metadata_ir::into_v14(#runtime::metadata_ir()) + } - pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { - #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { - #scrate::__private::OpaqueMetadata::new(prefixed.into()) - }) - } + pub fn metadata_at_version(version: u32) -> Option<#scrate::__private::OpaqueMetadata> { + #scrate::__private::metadata_ir::into_version(#runtime::metadata_ir(), version).map(|prefixed| { + #scrate::__private::OpaqueMetadata::new(prefixed.into()) + }) + } - pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { - #scrate::__private::metadata_ir::supported_versions() - } - } - } + pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { + #scrate::__private::metadata_ir::supported_versions() + } + } + } } fn expand_pallet_metadata_storage( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Storage") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Storage") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_calls( - filtered_names: &[&'static str], - runtime: &Ident, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Call") { - let instance = decl.instance.as_ref().into_iter(); - let path = &decl.path; + if filtered_names.contains(&"Call") { + let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; - quote! { - Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) - } - } else { - quote!(None) - } + quote! { + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_events( - filtered_names: &[&'static str], - runtime: &Ident, - scrate: &TokenStream, - decl: &Pallet, + filtered_names: &[&'static str], + runtime: &Ident, + scrate: &TokenStream, + decl: &Pallet, ) -> TokenStream { - if filtered_names.contains(&"Event") { - let path = &decl.path; - let part_is_generic = !decl - .find_part("Event") - .expect("Event part exists; qed") - .generics - .params - .is_empty(); - let pallet_event = match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::Event::<#path::#inst>), - (None, true) => quote!(#path::Event::<#runtime>), - (None, false) => quote!(#path::Event), - }; + if filtered_names.contains(&"Event") { + let path = &decl.path; + let part_is_generic = !decl + .find_part("Event") + .expect("Event part exists; qed") + .generics + .params + .is_empty(); + let pallet_event = match (decl.instance.as_ref(), part_is_generic) { + (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::Event::<#path::#inst>), + (None, true) => quote!(#path::Event::<#runtime>), + (None, false) => quote!(#path::Event), + }; - quote! { - Some( - #scrate::__private::metadata_ir::PalletEventMetadataIR { - ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() - } - ) - } - } else { - quote!(None) - } + quote! { + Some( + #scrate::__private::metadata_ir::PalletEventMetadataIR { + ty: #scrate::__private::scale_info::meta_type::<#pallet_event>() + } + ) + } + } else { + quote!(None) + } } fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() + } } fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() + } } fn expand_pallet_metadata_docs(runtime: &Ident, decl: &Pallet) -> TokenStream { - let path = &decl.path; - let instance = decl.instance.as_ref().into_iter(); + let path = &decl.path; + let instance = decl.instance.as_ref().into_iter(); - quote! { - #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() - } + quote! { + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_documentation_metadata() + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/origin.rs b/support/procedural-fork/src/construct_runtime/expand/origin.rs index 83049919d..2d50777bf 100644 --- a/support/procedural-fork/src/construct_runtime/expand/origin.rs +++ b/support/procedural-fork/src/construct_runtime/expand/origin.rs @@ -22,434 +22,448 @@ use std::str::FromStr; use syn::{Generics, Ident}; pub fn expand_outer_origin( - runtime: &Ident, - system_pallet: &Pallet, - pallets: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + system_pallet: &Pallet, + pallets: &[Pallet], + scrate: &TokenStream, ) -> syn::Result { - let mut caller_variants = TokenStream::new(); - let mut pallet_conversions = TokenStream::new(); - let mut query_origin_part_macros = Vec::new(); - - for pallet_decl in pallets.iter().filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) { - if let Some(pallet_entry) = pallet_decl.find_part("Origin") { - let instance = pallet_decl.instance.as_ref(); - let index = pallet_decl.index; - let generics = &pallet_entry.generics; - let name = &pallet_decl.name; - let path = &pallet_decl.path; - - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `Origin` cannot \ + let mut caller_variants = TokenStream::new(); + let mut pallet_conversions = TokenStream::new(); + let mut query_origin_part_macros = Vec::new(); + + for pallet_decl in pallets + .iter() + .filter(|pallet| pallet.name != SYSTEM_PALLET_NAME) + { + if let Some(pallet_entry) = pallet_decl.find_part("Origin") { + let instance = pallet_decl.instance.as_ref(); + let index = pallet_decl.index; + let generics = &pallet_entry.generics; + let name = &pallet_decl.name; + let path = &pallet_decl.path; + + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `Origin` cannot \ be constructed: pallet `{}` must have generic `Origin`", - name - ); - return Err(syn::Error::new(name.span(), msg)) - } - - caller_variants.extend(expand_origin_caller_variant( - runtime, - pallet_decl, - index, - instance, - generics, - )); - pallet_conversions.extend(expand_origin_pallet_conversions( - scrate, - runtime, - pallet_decl, - instance, - generics, - )); - query_origin_part_macros.push(quote! { - #path::__substrate_origin_check::is_origin_part_defined!(#name); - }); - } - } - - let system_path = &system_pallet.path; - - let system_index = system_pallet.index; - - let system_path_name = system_path.module_name(); - - let doc_string = get_intra_doc_string( - "Origin is always created with the base filter configured in", - &system_path_name, - ); - - let doc_string_none_origin = - get_intra_doc_string("Create with system none origin and", &system_path_name); - - let doc_string_root_origin = - get_intra_doc_string("Create with system root origin and", &system_path_name); - - let doc_string_signed_origin = - get_intra_doc_string("Create with system signed origin and", &system_path_name); - - let doc_string_runtime_origin = - get_intra_doc_string("Convert to runtime origin, using as filter:", &system_path_name); - - let doc_string_runtime_origin_with_caller = get_intra_doc_string( - "Convert to runtime origin with caller being system signed or none and use filter", - &system_path_name, - ); - - Ok(quote! { - #( #query_origin_part_macros )* - - /// The runtime origin type representing the origin of a call. - /// - #[doc = #doc_string] - #[derive(Clone)] - pub struct RuntimeOrigin { - pub caller: OriginCaller, - filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, - } - - #[cfg(not(feature = "std"))] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.write_str("") - } - } - - #[cfg(feature = "std")] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { - fn fmt( - &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { - fmt.debug_struct("Origin") - .field("caller", &self.caller) - .field("filter", &"[function ptr]") - .finish() - } - } - - impl #scrate::traits::OriginTrait for RuntimeOrigin { - type Call = <#runtime as #system_path::Config>::RuntimeCall; - type PalletsOrigin = OriginCaller; - type AccountId = <#runtime as #system_path::Config>::AccountId; - - fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { - let f = self.filter.clone(); - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { - f(call) && filter(call) - })); - } - - fn reset_filter(&mut self) { - let filter = < - <#runtime as #system_path::Config>::BaseCallFilter - as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> - >::contains; - - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); - } - - fn set_caller_from(&mut self, other: impl Into) { - self.caller = other.into().caller; - } - - fn filter_call(&self, call: &Self::Call) -> bool { - match self.caller { - // Root bypasses all filters - OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, - _ => (self.filter)(call), - } - } - - fn caller(&self) -> &Self::PalletsOrigin { - &self.caller - } - - fn into_caller(self) -> Self::PalletsOrigin { - self.caller - } - - fn try_with_caller( - mut self, - f: impl FnOnce(Self::PalletsOrigin) -> Result, - ) -> Result { - match f(self.caller) { - Ok(r) => Ok(r), - Err(caller) => { self.caller = caller; Err(self) } - } - } - - fn none() -> Self { - #system_path::RawOrigin::None.into() - } - - fn root() -> Self { - #system_path::RawOrigin::Root.into() - } - - fn signed(by: Self::AccountId) -> Self { - #system_path::RawOrigin::Signed(by).into() - } - } - - #[derive( - Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, - )] - #[allow(non_camel_case_types)] - pub enum OriginCaller { - #[codec(index = #system_index)] - system(#system_path::Origin<#runtime>), - #caller_variants - #[allow(dead_code)] - Void(#scrate::__private::Void) - } - - // For backwards compatibility and ease of accessing these functions. - #[allow(dead_code)] - impl RuntimeOrigin { - #[doc = #doc_string_none_origin] - pub fn none() -> Self { - ::none() - } - - #[doc = #doc_string_root_origin] - pub fn root() -> Self { - ::root() - } - - #[doc = #doc_string_signed_origin] - pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { - ::signed(by) - } - } - - impl From<#system_path::Origin<#runtime>> for OriginCaller { - fn from(x: #system_path::Origin<#runtime>) -> Self { - OriginCaller::system(x) - } - } - - impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { - fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match self { - OriginCaller::system(x) => Some(x), - _ => None, - } - } - fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { - match &self { - OriginCaller::system(o) => Some(o), - _ => None, - } - } - } - - impl TryFrom for #system_path::Origin<#runtime> { - type Error = OriginCaller; - fn try_from(x: OriginCaller) - -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> - { - if let OriginCaller::system(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { - - #[doc = #doc_string_runtime_origin] - fn from(x: #system_path::Origin<#runtime>) -> Self { - let o: OriginCaller = x.into(); - o.into() - } - } - - impl From for RuntimeOrigin { - fn from(x: OriginCaller) -> Self { - let mut o = RuntimeOrigin { - caller: x, - filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), - }; - - #scrate::traits::OriginTrait::reset_filter(&mut o); - - o - } - } - - impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::system(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - impl From::AccountId>> for RuntimeOrigin { - #[doc = #doc_string_runtime_origin_with_caller] - fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { - <#system_path::Origin<#runtime>>::from(x).into() - } - } - - #pallet_conversions - }) + name + ); + return Err(syn::Error::new(name.span(), msg)); + } + + caller_variants.extend(expand_origin_caller_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); + pallet_conversions.extend(expand_origin_pallet_conversions( + scrate, + runtime, + pallet_decl, + instance, + generics, + )); + query_origin_part_macros.push(quote! { + #path::__substrate_origin_check::is_origin_part_defined!(#name); + }); + } + } + + let system_path = &system_pallet.path; + + let system_index = system_pallet.index; + + let system_path_name = system_path.module_name(); + + let doc_string = get_intra_doc_string( + "Origin is always created with the base filter configured in", + &system_path_name, + ); + + let doc_string_none_origin = + get_intra_doc_string("Create with system none origin and", &system_path_name); + + let doc_string_root_origin = + get_intra_doc_string("Create with system root origin and", &system_path_name); + + let doc_string_signed_origin = + get_intra_doc_string("Create with system signed origin and", &system_path_name); + + let doc_string_runtime_origin = get_intra_doc_string( + "Convert to runtime origin, using as filter:", + &system_path_name, + ); + + let doc_string_runtime_origin_with_caller = get_intra_doc_string( + "Convert to runtime origin with caller being system signed or none and use filter", + &system_path_name, + ); + + Ok(quote! { + #( #query_origin_part_macros )* + + /// The runtime origin type representing the origin of a call. + /// + #[doc = #doc_string] + #[derive(Clone)] + pub struct RuntimeOrigin { + pub caller: OriginCaller, + filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, + } + + #[cfg(not(feature = "std"))] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.write_str("") + } + } + + #[cfg(feature = "std")] + impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + fn fmt( + &self, + fmt: &mut #scrate::__private::sp_std::fmt::Formatter, + ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt.debug_struct("Origin") + .field("caller", &self.caller) + .field("filter", &"[function ptr]") + .finish() + } + } + + impl #scrate::traits::OriginTrait for RuntimeOrigin { + type Call = <#runtime as #system_path::Config>::RuntimeCall; + type PalletsOrigin = OriginCaller; + type AccountId = <#runtime as #system_path::Config>::AccountId; + + fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { + let f = self.filter.clone(); + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { + f(call) && filter(call) + })); + } + + fn reset_filter(&mut self) { + let filter = < + <#runtime as #system_path::Config>::BaseCallFilter + as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> + >::contains; + + self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); + } + + fn set_caller_from(&mut self, other: impl Into) { + self.caller = other.into().caller; + } + + fn filter_call(&self, call: &Self::Call) -> bool { + match self.caller { + // Root bypasses all filters + OriginCaller::system(#system_path::Origin::<#runtime>::Root) => true, + _ => (self.filter)(call), + } + } + + fn caller(&self) -> &Self::PalletsOrigin { + &self.caller + } + + fn into_caller(self) -> Self::PalletsOrigin { + self.caller + } + + fn try_with_caller( + mut self, + f: impl FnOnce(Self::PalletsOrigin) -> Result, + ) -> Result { + match f(self.caller) { + Ok(r) => Ok(r), + Err(caller) => { self.caller = caller; Err(self) } + } + } + + fn none() -> Self { + #system_path::RawOrigin::None.into() + } + + fn root() -> Self { + #system_path::RawOrigin::Root.into() + } + + fn signed(by: Self::AccountId) -> Self { + #system_path::RawOrigin::Signed(by).into() + } + } + + #[derive( + Clone, PartialEq, Eq, #scrate::__private::RuntimeDebug, #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, #scrate::__private::scale_info::TypeInfo, #scrate::__private::codec::MaxEncodedLen, + )] + #[allow(non_camel_case_types)] + pub enum OriginCaller { + #[codec(index = #system_index)] + system(#system_path::Origin<#runtime>), + #caller_variants + #[allow(dead_code)] + Void(#scrate::__private::Void) + } + + // For backwards compatibility and ease of accessing these functions. + #[allow(dead_code)] + impl RuntimeOrigin { + #[doc = #doc_string_none_origin] + pub fn none() -> Self { + ::none() + } + + #[doc = #doc_string_root_origin] + pub fn root() -> Self { + ::root() + } + + #[doc = #doc_string_signed_origin] + pub fn signed(by: <#runtime as #system_path::Config>::AccountId) -> Self { + ::signed(by) + } + } + + impl From<#system_path::Origin<#runtime>> for OriginCaller { + fn from(x: #system_path::Origin<#runtime>) -> Self { + OriginCaller::system(x) + } + } + + impl #scrate::traits::CallerTrait<<#runtime as #system_path::Config>::AccountId> for OriginCaller { + fn into_system(self) -> Option<#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match self { + OriginCaller::system(x) => Some(x), + _ => None, + } + } + fn as_system_ref(&self) -> Option<&#system_path::RawOrigin<<#runtime as #system_path::Config>::AccountId>> { + match &self { + OriginCaller::system(o) => Some(o), + _ => None, + } + } + } + + impl TryFrom for #system_path::Origin<#runtime> { + type Error = OriginCaller; + fn try_from(x: OriginCaller) + -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> + { + if let OriginCaller::system(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + impl From<#system_path::Origin<#runtime>> for RuntimeOrigin { + + #[doc = #doc_string_runtime_origin] + fn from(x: #system_path::Origin<#runtime>) -> Self { + let o: OriginCaller = x.into(); + o.into() + } + } + + impl From for RuntimeOrigin { + fn from(x: OriginCaller) -> Self { + let mut o = RuntimeOrigin { + caller: x, + filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), + }; + + #scrate::traits::OriginTrait::reset_filter(&mut o); + + o + } + } + + impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::system(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + impl From::AccountId>> for RuntimeOrigin { + #[doc = #doc_string_runtime_origin_with_caller] + fn from(x: Option<<#runtime as #system_path::Config>::AccountId>) -> Self { + <#system_path::Origin<#runtime>>::from(x).into() + } + } + + #pallet_conversions + }) } fn expand_origin_caller_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let part_is_generic = !generics.params.is_empty(); - let variant_name = &pallet.name; - let path = &pallet.path; - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::Origin), - }, - } + let part_is_generic = !generics.params.is_empty(); + let variant_name = &pallet.name; + let path = &pallet.path; + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::Origin), + }, + } } fn expand_origin_pallet_conversions( - scrate: &TokenStream, - runtime: &Ident, - pallet: &Pallet, - instance: Option<&Ident>, - generics: &Generics, + scrate: &TokenStream, + runtime: &Ident, + pallet: &Pallet, + instance: Option<&Ident>, + generics: &Generics, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - - let part_is_generic = !generics.params.is_empty(); - let pallet_origin = match instance { - Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), - Some(inst) => quote!(#path::Origin<#path::#inst>), - None if part_is_generic => quote!(#path::Origin<#runtime>), - None => quote!(#path::Origin), - }; - - let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); - - quote! { - #attr - impl From<#pallet_origin> for OriginCaller { - fn from(x: #pallet_origin) -> Self { - OriginCaller::#variant_name(x) - } - } - - #attr - impl From<#pallet_origin> for RuntimeOrigin { - #[doc = #doc_string] - fn from(x: #pallet_origin) -> Self { - let x: OriginCaller = x.into(); - x.into() - } - } - - #attr - impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { - /// NOTE: converting to pallet origin loses the origin filter information. - fn from(val: RuntimeOrigin) -> Self { - if let OriginCaller::#variant_name(l) = val.caller { - Ok(l) - } else { - Err(val) - } - } - } - - #attr - impl TryFrom for #pallet_origin { - type Error = OriginCaller; - fn try_from( - x: OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { - if let OriginCaller::#variant_name(l) = x { - Ok(l) - } else { - Err(x) - } - } - } - - #attr - impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = x { - Ok(&l) - } else { - Err(()) - } - } - } - - #attr - impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { - type Error = (); - fn try_from( - x: &'a RuntimeOrigin, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { - if let OriginCaller::#variant_name(l) = &x.caller { - Ok(&l) - } else { - Err(()) - } - } - } - } + let path = &pallet.path; + let variant_name = &pallet.name; + + let part_is_generic = !generics.params.is_empty(); + let pallet_origin = match instance { + Some(inst) if part_is_generic => quote!(#path::Origin<#runtime, #path::#inst>), + Some(inst) => quote!(#path::Origin<#path::#inst>), + None if part_is_generic => quote!(#path::Origin<#runtime>), + None => quote!(#path::Origin), + }; + + let doc_string = get_intra_doc_string(" Convert to runtime origin using", &path.module_name()); + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); + + quote! { + #attr + impl From<#pallet_origin> for OriginCaller { + fn from(x: #pallet_origin) -> Self { + OriginCaller::#variant_name(x) + } + } + + #attr + impl From<#pallet_origin> for RuntimeOrigin { + #[doc = #doc_string] + fn from(x: #pallet_origin) -> Self { + let x: OriginCaller = x.into(); + x.into() + } + } + + #attr + impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { + /// NOTE: converting to pallet origin loses the origin filter information. + fn from(val: RuntimeOrigin) -> Self { + if let OriginCaller::#variant_name(l) = val.caller { + Ok(l) + } else { + Err(val) + } + } + } + + #attr + impl TryFrom for #pallet_origin { + type Error = OriginCaller; + fn try_from( + x: OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { + if let OriginCaller::#variant_name(l) = x { + Ok(l) + } else { + Err(x) + } + } + } + + #attr + impl<'a> TryFrom<&'a OriginCaller> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a OriginCaller, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = x { + Ok(&l) + } else { + Err(()) + } + } + } + + #attr + impl<'a> TryFrom<&'a RuntimeOrigin> for &'a #pallet_origin { + type Error = (); + fn try_from( + x: &'a RuntimeOrigin, + ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + if let OriginCaller::#variant_name(l) = &x.caller { + Ok(&l) + } else { + Err(()) + } + } + } + } } // Get the actual documentation using the doc information and system path name fn get_intra_doc_string(doc_info: &str, system_path_name: &String) -> String { - format!(" {} [`{}::Config::BaseCallFilter`].", doc_info, system_path_name) + format!( + " {} [`{}::Config::BaseCallFilter`].", + doc_info, system_path_name + ) } diff --git a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs index 80b242ccb..28e39c7a2 100644 --- a/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs +++ b/support/procedural-fork/src/construct_runtime/expand/outer_enums.rs @@ -24,37 +24,37 @@ use syn::{Generics, Ident}; /// Represents the types supported for creating an outer enum. #[derive(Clone, Copy, PartialEq)] pub enum OuterEnumType { - /// Collects the Event enums from all pallets. - Event, - /// Collects the Error enums from all pallets. - Error, + /// Collects the Event enums from all pallets. + Event, + /// Collects the Error enums from all pallets. + Error, } impl OuterEnumType { - /// The name of the structure this enum represents. - fn struct_name(&self) -> &str { - match self { - OuterEnumType::Event => "RuntimeEvent", - OuterEnumType::Error => "RuntimeError", - } - } + /// The name of the structure this enum represents. + fn struct_name(&self) -> &str { + match self { + OuterEnumType::Event => "RuntimeEvent", + OuterEnumType::Error => "RuntimeError", + } + } - /// The name of the variant (ie `Event` or `Error`). - fn variant_name(&self) -> &str { - match self { - OuterEnumType::Event => "Event", - OuterEnumType::Error => "Error", - } - } + /// The name of the variant (ie `Event` or `Error`). + fn variant_name(&self) -> &str { + match self { + OuterEnumType::Event => "Event", + OuterEnumType::Error => "Error", + } + } } impl ToTokens for OuterEnumType { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - OuterEnumType::Event => quote!(Event).to_tokens(tokens), - OuterEnumType::Error => quote!(Error).to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + OuterEnumType::Event => quote!(Event).to_tokens(tokens), + OuterEnumType::Error => quote!(Error).to_tokens(tokens), + } + } } /// Create an outer enum that encapsulates all pallets as variants. @@ -84,196 +84,207 @@ impl ToTokens for OuterEnumType { /// /// Notice that the pallet index is preserved using the `#[codec(index = ..)]` attribute. pub fn expand_outer_enum( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, + enum_ty: OuterEnumType, ) -> syn::Result { - // Stores all pallet variants. - let mut enum_variants = TokenStream::new(); - // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. - let mut enum_conversions = TokenStream::new(); - // Specific for events to query via `is_event_part_defined!`. - let mut query_enum_part_macros = Vec::new(); + // Stores all pallet variants. + let mut enum_variants = TokenStream::new(); + // Generates the enum conversion between the `Runtime` outer enum and the pallet's enum. + let mut enum_conversions = TokenStream::new(); + // Specific for events to query via `is_event_part_defined!`. + let mut query_enum_part_macros = Vec::new(); - let enum_name_str = enum_ty.variant_name(); - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_str = enum_ty.variant_name(); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - for pallet_decl in pallet_decls { - let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { continue }; + for pallet_decl in pallet_decls { + let Some(pallet_entry) = pallet_decl.find_part(enum_name_str) else { + continue; + }; - let path = &pallet_decl.path; - let pallet_name = &pallet_decl.name; - let index = pallet_decl.index; - let instance = pallet_decl.instance.as_ref(); - let generics = &pallet_entry.generics; + let path = &pallet_decl.path; + let pallet_name = &pallet_decl.name; + let index = pallet_decl.index; + let instance = pallet_decl.instance.as_ref(); + let generics = &pallet_entry.generics; - if instance.is_some() && generics.params.is_empty() { - let msg = format!( - "Instantiable pallet with no generic `{}` cannot \ + if instance.is_some() && generics.params.is_empty() { + let msg = format!( + "Instantiable pallet with no generic `{}` cannot \ be constructed: pallet `{}` must have generic `{}`", - enum_name_str, pallet_name, enum_name_str, - ); - return Err(syn::Error::new(pallet_name.span(), msg)) - } + enum_name_str, pallet_name, enum_name_str, + ); + return Err(syn::Error::new(pallet_name.span(), msg)); + } - let part_is_generic = !generics.params.is_empty(); - let pallet_enum = match (instance, part_is_generic) { - (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), - (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), - (None, true) => quote!(#path::#enum_ty::<#runtime>), - (None, false) => quote!(#path::#enum_ty), - }; + let part_is_generic = !generics.params.is_empty(); + let pallet_enum = match (instance, part_is_generic) { + (Some(inst), true) => quote!(#path::#enum_ty::<#runtime, #path::#inst>), + (Some(inst), false) => quote!(#path::#enum_ty::<#path::#inst>), + (None, true) => quote!(#path::#enum_ty::<#runtime>), + (None, false) => quote!(#path::#enum_ty), + }; - enum_variants.extend(expand_enum_variant( - runtime, - pallet_decl, - index, - instance, - generics, - enum_ty, - )); - enum_conversions.extend(expand_enum_conversion( - pallet_decl, - &pallet_enum, - &enum_name_ident, - )); + enum_variants.extend(expand_enum_variant( + runtime, + pallet_decl, + index, + instance, + generics, + enum_ty, + )); + enum_conversions.extend(expand_enum_conversion( + pallet_decl, + &pallet_enum, + &enum_name_ident, + )); - if enum_ty == OuterEnumType::Event { - query_enum_part_macros.push(quote! { - #path::__substrate_event_check::is_event_part_defined!(#pallet_name); - }); - } - } + if enum_ty == OuterEnumType::Event { + query_enum_part_macros.push(quote! { + #path::__substrate_event_check::is_event_part_defined!(#pallet_name); + }); + } + } - // Derives specific for the event. - let event_custom_derives = - if enum_ty == OuterEnumType::Event { quote!(Clone, PartialEq, Eq,) } else { quote!() }; + // Derives specific for the event. + let event_custom_derives = if enum_ty == OuterEnumType::Event { + quote!(Clone, PartialEq, Eq,) + } else { + quote!() + }; - // Implementation specific for errors. - let error_custom_impl = generate_error_impl(scrate, enum_ty); + // Implementation specific for errors. + let error_custom_impl = generate_error_impl(scrate, enum_ty); - Ok(quote! { - #( #query_enum_part_macros )* + Ok(quote! { + #( #query_enum_part_macros )* - #[derive( - #event_custom_derives - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - #[allow(non_camel_case_types)] - pub enum #enum_name_ident { - #enum_variants - } + #[derive( + #event_custom_derives + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + #[allow(non_camel_case_types)] + pub enum #enum_name_ident { + #enum_variants + } - #enum_conversions + #enum_conversions - #error_custom_impl - }) + #error_custom_impl + }) } fn expand_enum_variant( - runtime: &Ident, - pallet: &Pallet, - index: u8, - instance: Option<&Ident>, - generics: &Generics, - enum_ty: OuterEnumType, + runtime: &Ident, + pallet: &Pallet, + index: u8, + instance: Option<&Ident>, + generics: &Generics, + enum_ty: OuterEnumType, ) -> TokenStream { - let path = &pallet.path; - let variant_name = &pallet.name; - let part_is_generic = !generics.params.is_empty(); - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let path = &pallet.path; + let variant_name = &pallet.name; + let part_is_generic = !generics.params.is_empty(); + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - match instance { - Some(inst) if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime, #path::#inst>), - }, - Some(inst) => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#path::#inst>), - }, - None if part_is_generic => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty<#runtime>), - }, - None => quote! { - #attr - #[codec(index = #index)] - #variant_name(#path::#enum_ty), - }, - } + match instance { + Some(inst) if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime, #path::#inst>), + }, + Some(inst) => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#path::#inst>), + }, + None if part_is_generic => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty<#runtime>), + }, + None => quote! { + #attr + #[codec(index = #index)] + #variant_name(#path::#enum_ty), + }, + } } fn expand_enum_conversion( - pallet: &Pallet, - pallet_enum: &TokenStream, - enum_name_ident: &Ident, + pallet: &Pallet, + pallet_enum: &TokenStream, + enum_name_ident: &Ident, ) -> TokenStream { - let variant_name = &pallet.name; - let attr = pallet.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + let variant_name = &pallet.name; + let attr = pallet + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - quote! { - #attr - impl From<#pallet_enum> for #enum_name_ident { - fn from(x: #pallet_enum) -> Self { - #enum_name_ident - ::#variant_name(x) - } - } - #attr - impl TryInto<#pallet_enum> for #enum_name_ident { - type Error = (); + quote! { + #attr + impl From<#pallet_enum> for #enum_name_ident { + fn from(x: #pallet_enum) -> Self { + #enum_name_ident + ::#variant_name(x) + } + } + #attr + impl TryInto<#pallet_enum> for #enum_name_ident { + type Error = (); - fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { - match self { - Self::#variant_name(evt) => Ok(evt), - _ => Err(()), - } - } - } - } + fn try_into(self) -> ::core::result::Result<#pallet_enum, Self::Error> { + match self { + Self::#variant_name(evt) => Ok(evt), + _ => Err(()), + } + } + } + } } fn generate_error_impl(scrate: &TokenStream, enum_ty: OuterEnumType) -> TokenStream { - // Implementation is specific to `Error`s. - if enum_ty == OuterEnumType::Event { - return quote! {} - } + // Implementation is specific to `Error`s. + if enum_ty == OuterEnumType::Event { + return quote! {}; + } - let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); + let enum_name_ident = Ident::new(enum_ty.struct_name(), Span::call_site()); - quote! { - impl #enum_name_ident { - /// Optionally convert the `DispatchError` into the `RuntimeError`. - /// - /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. - pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { - let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; + quote! { + impl #enum_name_ident { + /// Optionally convert the `DispatchError` into the `RuntimeError`. + /// + /// Returns `Some` if the error matches the `DispatchError::Module` variant, otherwise `None`. + pub fn from_dispatch_error(err: #scrate::sp_runtime::DispatchError) -> Option { + let #scrate::sp_runtime::DispatchError::Module(module_error) = err else { return None }; - let bytes = #scrate::__private::codec::Encode::encode(&module_error); - #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() - } - } - } + let bytes = #scrate::__private::codec::Encode::encode(&module_error); + #scrate::__private::codec::Decode::decode(&mut &bytes[..]).ok() + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs index 892b842b1..0695d8102 100644 --- a/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs +++ b/support/procedural-fork/src/construct_runtime/expand/slash_reason.rs @@ -21,44 +21,44 @@ use proc_macro2::TokenStream; use quote::quote; pub fn expand_outer_slash_reason(pallet_decls: &[Pallet], scrate: &TokenStream) -> TokenStream { - let mut conversion_fns = Vec::new(); - let mut slash_reason_variants = Vec::new(); - for decl in pallet_decls { - if let Some(_) = decl.find_part("SlashReason") { - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - let instance = decl.instance.as_ref(); + let mut conversion_fns = Vec::new(); + let mut slash_reason_variants = Vec::new(); + for decl in pallet_decls { + if let Some(_) = decl.find_part("SlashReason") { + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + let instance = decl.instance.as_ref(); - conversion_fns.push(composite_helper::expand_conversion_fn( - "SlashReason", - path, - instance, - variant_name, - )); + conversion_fns.push(composite_helper::expand_conversion_fn( + "SlashReason", + path, + instance, + variant_name, + )); - slash_reason_variants.push(composite_helper::expand_variant( - "SlashReason", - index, - path, - instance, - variant_name, - )); - } - } + slash_reason_variants.push(composite_helper::expand_variant( + "SlashReason", + index, + path, + instance, + variant_name, + )); + } + } - quote! { - /// A reason for slashing funds. - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeSlashReason { - #( #slash_reason_variants )* - } + quote! { + /// A reason for slashing funds. + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeSlashReason { + #( #slash_reason_variants )* + } - #( #conversion_fns )* - } + #( #conversion_fns )* + } } diff --git a/support/procedural-fork/src/construct_runtime/expand/task.rs b/support/procedural-fork/src/construct_runtime/expand/task.rs index 6531c0e9e..94a5f52bb 100644 --- a/support/procedural-fork/src/construct_runtime/expand/task.rs +++ b/support/procedural-fork/src/construct_runtime/expand/task.rs @@ -21,111 +21,111 @@ use quote::quote; /// Expands aggregate `RuntimeTask` enum. pub fn expand_outer_task( - runtime_name: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime_name: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let mut from_impls = Vec::new(); - let mut task_variants = Vec::new(); - let mut variant_names = Vec::new(); - let mut task_paths = Vec::new(); - for decl in pallet_decls { - if decl.find_part("Task").is_none() { - continue - } - - let variant_name = &decl.name; - let path = &decl.path; - let index = decl.index; - - from_impls.push(quote! { - impl From<#path::Task<#runtime_name>> for RuntimeTask { - fn from(hr: #path::Task<#runtime_name>) -> Self { - RuntimeTask::#variant_name(hr) - } - } - - impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { - type Error = (); - - fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { - match self { - RuntimeTask::#variant_name(hr) => Ok(hr), - _ => Err(()), - } - } - } - }); - - task_variants.push(quote! { - #[codec(index = #index)] - #variant_name(#path::Task<#runtime_name>), - }); - - variant_names.push(quote!(#variant_name)); - - task_paths.push(quote!(#path::Task)); - } - - let prelude = quote!(#scrate::traits::tasks::__private); - - const INCOMPLETE_MATCH_QED: &'static str = - "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; - - let output = quote! { - /// An aggregation of all `Task` enums across all pallets included in the current runtime. - #[derive( - Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, - #scrate::__private::codec::Decode, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - pub enum RuntimeTask { - #( #task_variants )* - } - - #[automatically_derived] - impl #scrate::traits::Task for RuntimeTask { - type Enumeration = #prelude::IntoIter; - - fn is_valid(&self) -> bool { - match self { - #(RuntimeTask::#variant_names(val) => val.is_valid(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { - match self { - #(RuntimeTask::#variant_names(val) => val.run(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn weight(&self) -> #scrate::pallet_prelude::Weight { - match self { - #(RuntimeTask::#variant_names(val) => val.weight(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn task_index(&self) -> u32 { - match self { - #(RuntimeTask::#variant_names(val) => val.task_index(),)* - _ => unreachable!(#INCOMPLETE_MATCH_QED), - } - } - - fn iter() -> Self::Enumeration { - let mut all_tasks = Vec::new(); - #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* - all_tasks.into_iter() - } - } - - #( #from_impls )* - }; - - output + let mut from_impls = Vec::new(); + let mut task_variants = Vec::new(); + let mut variant_names = Vec::new(); + let mut task_paths = Vec::new(); + for decl in pallet_decls { + if decl.find_part("Task").is_none() { + continue; + } + + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + + from_impls.push(quote! { + impl From<#path::Task<#runtime_name>> for RuntimeTask { + fn from(hr: #path::Task<#runtime_name>) -> Self { + RuntimeTask::#variant_name(hr) + } + } + + impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { + type Error = (); + + fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { + match self { + RuntimeTask::#variant_name(hr) => Ok(hr), + _ => Err(()), + } + } + } + }); + + task_variants.push(quote! { + #[codec(index = #index)] + #variant_name(#path::Task<#runtime_name>), + }); + + variant_names.push(quote!(#variant_name)); + + task_paths.push(quote!(#path::Task)); + } + + let prelude = quote!(#scrate::traits::tasks::__private); + + const INCOMPLETE_MATCH_QED: &'static str = + "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; + + let output = quote! { + /// An aggregation of all `Task` enums across all pallets included in the current runtime. + #[derive( + Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeTask { + #( #task_variants )* + } + + #[automatically_derived] + impl #scrate::traits::Task for RuntimeTask { + type Enumeration = #prelude::IntoIter; + + fn is_valid(&self) -> bool { + match self { + #(RuntimeTask::#variant_names(val) => val.is_valid(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { + match self { + #(RuntimeTask::#variant_names(val) => val.run(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self { + #(RuntimeTask::#variant_names(val) => val.weight(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn task_index(&self) -> u32 { + match self { + #(RuntimeTask::#variant_names(val) => val.task_index(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn iter() -> Self::Enumeration { + let mut all_tasks = Vec::new(); + #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* + all_tasks.into_iter() + } + } + + #( #from_impls )* + }; + + output } diff --git a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs index 33aadba0d..109f7081c 100644 --- a/support/procedural-fork/src/construct_runtime/expand/unsigned.rs +++ b/support/procedural-fork/src/construct_runtime/expand/unsigned.rs @@ -22,68 +22,71 @@ use std::str::FromStr; use syn::Ident; pub fn expand_outer_validate_unsigned( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream, ) -> TokenStream { - let mut pallet_names = Vec::new(); - let mut pallet_attrs = Vec::new(); - let mut query_validate_unsigned_part_macros = Vec::new(); + let mut pallet_names = Vec::new(); + let mut pallet_attrs = Vec::new(); + let mut query_validate_unsigned_part_macros = Vec::new(); - for pallet_decl in pallet_decls { - if pallet_decl.exists_part("ValidateUnsigned") { - let name = &pallet_decl.name; - let path = &pallet_decl.path; - let attr = pallet_decl.cfg_pattern.iter().fold(TokenStream::new(), |acc, pattern| { - let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }); + for pallet_decl in pallet_decls { + if pallet_decl.exists_part("ValidateUnsigned") { + let name = &pallet_decl.name; + let path = &pallet_decl.path; + let attr = pallet_decl + .cfg_pattern + .iter() + .fold(TokenStream::new(), |acc, pattern| { + let attr = TokenStream::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }); - pallet_names.push(name); - pallet_attrs.push(attr); - query_validate_unsigned_part_macros.push(quote! { + pallet_names.push(name); + pallet_attrs.push(attr); + query_validate_unsigned_part_macros.push(quote! { #path::__substrate_validate_unsigned_check::is_validate_unsigned_part_defined!(#name); }); - } - } + } + } - quote! { - #( #query_validate_unsigned_part_macros )* + quote! { + #( #query_validate_unsigned_part_macros )* - impl #scrate::unsigned::ValidateUnsigned for #runtime { - type Call = RuntimeCall; + impl #scrate::unsigned::ValidateUnsigned for #runtime { + type Call = RuntimeCall; - fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), - )* - // pre-dispatch should not stop inherent extrinsics, validation should prevent - // including arbitrary (non-inherent) extrinsics to blocks. - _ => Ok(()), - } - } + fn pre_dispatch(call: &Self::Call) -> Result<(), #scrate::unsigned::TransactionValidityError> { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::pre_dispatch(inner_call), + )* + // pre-dispatch should not stop inherent extrinsics, validation should prevent + // including arbitrary (non-inherent) extrinsics to blocks. + _ => Ok(()), + } + } - fn validate_unsigned( - #[allow(unused_variables)] - source: #scrate::unsigned::TransactionSource, - call: &Self::Call, - ) -> #scrate::unsigned::TransactionValidity { - #[allow(unreachable_patterns)] - match call { - #( - #pallet_attrs - RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), - )* - _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), - } - } - } - } + fn validate_unsigned( + #[allow(unused_variables)] + source: #scrate::unsigned::TransactionSource, + call: &Self::Call, + ) -> #scrate::unsigned::TransactionValidity { + #[allow(unreachable_patterns)] + match call { + #( + #pallet_attrs + RuntimeCall::#pallet_names(inner_call) => #pallet_names::validate_unsigned(source, inner_call), + )* + _ => #scrate::unsigned::UnknownTransaction::NoUnsignedValidator.into(), + } + } + } + } } diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index b083abbb2..de688b3d6 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -214,7 +214,7 @@ pub(crate) mod parse; use crate::pallet::parse::helper::two128_str; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use itertools::Itertools; use parse::{ExplicitRuntimeDeclaration, ImplicitRuntimeDeclaration, Pallet, RuntimeDeclaration}; @@ -230,48 +230,48 @@ const SYSTEM_PALLET_NAME: &str = "System"; /// Implementation of `construct_runtime` macro. Either expand to some code which will call /// `construct_runtime` again, or expand to the final runtime definition. pub fn construct_runtime(input: TokenStream) -> TokenStream { - let input_copy = input.clone(); - let definition = syn::parse_macro_input!(input as RuntimeDeclaration); - - let (check_pallet_number_res, res) = match definition { - RuntimeDeclaration::Implicit(implicit_def) => ( - check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), - construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), - ), - RuntimeDeclaration::Explicit(explicit_decl) => ( - check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), - construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), - ), - RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( - check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), - construct_runtime_final_expansion(explicit_decl), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - let res = expander::Expander::new("construct_runtime") - .dry(std::env::var("EXPAND_MACROS").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + let input_copy = input.clone(); + let definition = syn::parse_macro_input!(input as RuntimeDeclaration); + + let (check_pallet_number_res, res) = match definition { + RuntimeDeclaration::Implicit(implicit_def) => ( + check_pallet_number(input_copy.clone().into(), implicit_def.pallets.len()), + construct_runtime_implicit_to_explicit(input_copy.into(), implicit_def), + ), + RuntimeDeclaration::Explicit(explicit_decl) => ( + check_pallet_number(input_copy.clone().into(), explicit_decl.pallets.len()), + construct_runtime_explicit_to_explicit_expanded(input_copy.into(), explicit_decl), + ), + RuntimeDeclaration::ExplicitExpanded(explicit_decl) => ( + check_pallet_number(input_copy.into(), explicit_decl.pallets.len()), + construct_runtime_final_expansion(explicit_decl), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("EXPAND_MACROS").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } /// All pallets that have implicit pallet parts (ie `System: frame_system`) are @@ -282,30 +282,37 @@ pub fn construct_runtime(input: TokenStream) -> TokenStream { /// /// For more details, please refer to the root documentation. fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitRuntimeDeclaration, + input: TokenStream2, + definition: ImplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition.pallets.iter().filter(|pallet| pallet.pallet_parts.is_none()) { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition + .pallets + .iter() + .filter(|pallet| pallet.pallet_parts.is_none()) + { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets that have @@ -318,264 +325,283 @@ fn construct_runtime_implicit_to_explicit( /// /// For more details, please refer to the root documentation. fn construct_runtime_explicit_to_explicit_expanded( - input: TokenStream2, - definition: ExplicitRuntimeDeclaration, + input: TokenStream2, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let mut expansion = quote::quote!( - #frame_support::construct_runtime! { #input } - ); - for pallet in definition.pallets.iter().filter(|pallet| !pallet.is_expanded) { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(::<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_extra_parts }] - your_tt_return = [{ #frame_support::__private::tt_return }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name: #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let mut expansion = quote::quote!( + #frame_support::construct_runtime! { #input } + ); + for pallet in definition + .pallets + .iter() + .filter(|pallet| !pallet.is_expanded) + { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(::<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_extra_parts }] + your_tt_return = [{ #frame_support::__private::tt_return }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name: #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } /// All pallets have explicit definition of parts, this will expand to the runtime declaration. fn construct_runtime_final_expansion( - definition: ExplicitRuntimeDeclaration, + definition: ExplicitRuntimeDeclaration, ) -> Result { - let ExplicitRuntimeDeclaration { name, pallets, pallets_token, where_section } = definition; - - let system_pallet = - pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { - syn::Error::new( - pallets_token.span.join(), - "`System` pallet declaration is missing. \ + let ExplicitRuntimeDeclaration { + name, + pallets, + pallets_token, + where_section, + } = definition; + + let system_pallet = pallets + .iter() + .find(|decl| decl.name == SYSTEM_PALLET_NAME) + .ok_or_else(|| { + syn::Error::new( + pallets_token.span.join(), + "`System` pallet declaration is missing. \ Please add this line: `System: frame_system,`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )) - } - - let features = pallets - .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - }) - .flatten() - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let outer_event = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; - let outer_error = - expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; - - let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); - let tasks = expand::expand_outer_task(&name, &pallets, &scrate); - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); - let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); - let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); - let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let warning = where_section.map_or(None, |where_section| { - Some( - proc_macro_warning::Warning::new_deprecated("WhereSection") - .old("use a `where` clause in `construct_runtime`") - .new( - "use `frame_system::Config` to set the `Block` type and delete this clause. + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )); + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let outer_event = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Event)?; + let outer_error = + expand::expand_outer_enum(&name, &pallets, &scrate, expand::OuterEnumType::Error)?; + + let outer_origin = expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?; + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); + let tasks = expand::expand_outer_task(&name, &pallets, &scrate); + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let freeze_reason = expand::expand_outer_freeze_reason(&pallets, &scrate); + let hold_reason = expand::expand_outer_hold_reason(&pallets, &scrate); + let lock_id = expand::expand_outer_lock_id(&pallets, &scrate); + let slash_reason = expand::expand_outer_slash_reason(&pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let warning = where_section.map_or(None, |where_section| { + Some( + proc_macro_warning::Warning::new_deprecated("WhereSection") + .old("use a `where` clause in `construct_runtime`") + .new( + "use `frame_system::Config` to set the `Block` type and delete this clause. It is planned to be removed in December 2023", - ) - .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) - .span(where_section.span) - .build_or_panic(), - ) - }); - - let res = quote!( - #warning - - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` - // is called. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} + ) + .help_links(&["https://github.com/paritytech/substrate/pull/14437"]) + .span(where_section.span) + .build_or_panic(), + ) + }); + + let res = quote!( + #warning + + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; - #outer_event + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } - #outer_error + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `construct_runtime!` may be called without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro construct_runtime!). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `construct_runtime!` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `construct_runtime!` + // is called. - #outer_origin + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} - #all_pallets + #outer_event - #pallet_to_index + #outer_error - #dispatch + #outer_origin - #tasks + #all_pallets - #metadata + #pallet_to_index - #outer_config + #dispatch - #inherent + #tasks - #validate_unsigned + #metadata - #freeze_reason + #outer_config - #hold_reason + #inherent - #lock_id + #validate_unsigned - #slash_reason + #freeze_reason - #integrity_test + #hold_reason - #static_assertions - ); + #lock_id - Ok(res) + #slash_reason + + #integrity_test + + #static_assertions + ); + + Ok(res) } pub(crate) fn decl_all_pallets<'a>( - runtime: &'a Ident, - pallet_declarations: impl Iterator, - features: &HashSet<&str>, + runtime: &'a Ident, + pallet_declarations: impl Iterator, + features: &HashSet<&str>, ) -> TokenStream2 { - let mut types = TokenStream2::new(); - - // Every feature set to the pallet names that should be included by this feature set. - let mut features_to_names = features - .iter() - .map(|f| *f) - .powerset() - .map(|feat| (HashSet::from_iter(feat), Vec::new())) - .collect::, Vec<_>)>>(); - - for pallet_declaration in pallet_declarations { - let type_name = &pallet_declaration.name; - let pallet = &pallet_declaration.path; - let mut generics = vec![quote!(#runtime)]; - generics.extend(pallet_declaration.instance.iter().map(|name| quote!(#pallet::#name))); - let mut attrs = Vec::new(); - for cfg in &pallet_declaration.cfg_pattern { - let feat = format!("#[cfg({})]\n", cfg.original()); - attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); - } - let type_decl = quote!( - #(#attrs)* - pub type #type_name = #pallet::Pallet <#(#generics),*>; - ); - types.extend(type_decl); - - if pallet_declaration.cfg_pattern.is_empty() { - for (_, names) in features_to_names.iter_mut() { - names.push(&pallet_declaration.name); - } - } else { - for (feature_set, names) in &mut features_to_names { - // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the - // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, - // meaning that all of them must match in order to activate the item - let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { - expr.eval(|pred| match pred { - Predicate::Feature(f) => feature_set.contains(f), - Predicate::Test => feature_set.contains(&"test"), - _ => false, - }) - }); - - if is_feature_active { - names.push(&pallet_declaration.name); - } - } - } - } - - // All possible features. This will be used below for the empty feature set. - let mut all_features = features_to_names - .iter() - .flat_map(|f| f.0.iter().cloned()) - .collect::>(); - let attribute_to_names = features_to_names + let mut types = TokenStream2::new(); + + // Every feature set to the pallet names that should be included by this feature set. + let mut features_to_names = features + .iter() + .map(|f| *f) + .powerset() + .map(|feat| (HashSet::from_iter(feat), Vec::new())) + .collect::, Vec<_>)>>(); + + for pallet_declaration in pallet_declarations { + let type_name = &pallet_declaration.name; + let pallet = &pallet_declaration.path; + let mut generics = vec![quote!(#runtime)]; + generics.extend( + pallet_declaration + .instance + .iter() + .map(|name| quote!(#pallet::#name)), + ); + let mut attrs = Vec::new(); + for cfg in &pallet_declaration.cfg_pattern { + let feat = format!("#[cfg({})]\n", cfg.original()); + attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); + } + let type_decl = quote!( + #(#attrs)* + pub type #type_name = #pallet::Pallet <#(#generics),*>; + ); + types.extend(type_decl); + + if pallet_declaration.cfg_pattern.is_empty() { + for (_, names) in features_to_names.iter_mut() { + names.push(&pallet_declaration.name); + } + } else { + for (feature_set, names) in &mut features_to_names { + // Rust tidbit: if we have multiple `#[cfg]` feature on the same item, then the + // predicates listed in all `#[cfg]` attributes are effectively joined by `and()`, + // meaning that all of them must match in order to activate the item + let is_feature_active = pallet_declaration.cfg_pattern.iter().all(|expr| { + expr.eval(|pred| match pred { + Predicate::Feature(f) => feature_set.contains(f), + Predicate::Test => feature_set.contains(&"test"), + _ => false, + }) + }); + + if is_feature_active { + names.push(&pallet_declaration.name); + } + } + } + } + + // All possible features. This will be used below for the empty feature set. + let mut all_features = features_to_names + .iter() + .flat_map(|f| f.0.iter().cloned()) + .collect::>(); + let attribute_to_names = features_to_names .into_iter() .map(|(mut features, names)| { // If this is the empty feature set, it needs to be changed to negate all available @@ -598,212 +624,222 @@ pub(crate) fn decl_all_pallets<'a>( }) .collect::>(); - let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { - let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - /// Excludes the System pallet. - pub type AllPalletsWithoutSystem = ( #(#names,)* ); - } - }); - - let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { - quote! { - #attr - /// All pallets included in the runtime as a nested tuple of types. - pub type AllPalletsWithSystem = ( #(#names,)* ); - } - }); - - quote!( - #types - - #( #all_pallets_with_system )* - - #( #all_pallets_without_system )* - ) + let all_pallets_without_system = attribute_to_names.iter().map(|(attr, names)| { + let names = names.iter().filter(|n| **n != SYSTEM_PALLET_NAME); + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + /// Excludes the System pallet. + pub type AllPalletsWithoutSystem = ( #(#names,)* ); + } + }); + + let all_pallets_with_system = attribute_to_names.iter().map(|(attr, names)| { + quote! { + #attr + /// All pallets included in the runtime as a nested tuple of types. + pub type AllPalletsWithSystem = ( #(#names,)* ); + } + }); + + quote!( + #types + + #( #all_pallets_with_system )* + + #( #all_pallets_without_system )* + ) } pub(crate) fn decl_pallet_runtime_setup( - runtime: &Ident, - pallet_declarations: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_declarations: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let names = pallet_declarations.iter().map(|d| &d.name).collect::>(); - let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); - let name_hashes = pallet_declarations.iter().map(|d| two128_str(&d.name.to_string())); - let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); - let indices = pallet_declarations.iter().map(|pallet| pallet.index as usize); - let pallet_structs = pallet_declarations - .iter() - .map(|pallet| { - let path = &pallet.path; - match pallet.instance.as_ref() { - Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), - None => quote!(#path::Pallet<#runtime>), - } - }) - .collect::>(); - let pallet_attrs = pallet_declarations - .iter() - .map(|pallet| { - pallet.cfg_pattern.iter().fold(TokenStream2::new(), |acc, pattern| { - let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) - .expect("was successfully parsed before; qed"); - quote! { - #acc - #attr - } - }) - }) - .collect::>(); - - quote!( - /// Provides an implementation of `PalletInfo` to provide information - /// about the pallet setup in the runtime. - pub struct PalletInfo; - - impl #scrate::traits::PalletInfo for PalletInfo { - - fn index() -> Option { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#indices) - } - )* - - None - } - - fn name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_strings) - } - )* - - None - } - - fn name_hash() -> Option<[u8; 16]> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#name_hashes) - } - )* - - None - } - - fn module_name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some(#module_names) - } - )* - - None - } - - fn crate_version() -> Option<#scrate::traits::CrateVersion> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); - #( - #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { - return Some( - <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() - ) - } - )* - - None - } - } - ) + let names = pallet_declarations + .iter() + .map(|d| &d.name) + .collect::>(); + let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); + let name_hashes = pallet_declarations + .iter() + .map(|d| two128_str(&d.name.to_string())); + let module_names = pallet_declarations.iter().map(|d| d.path.module_name()); + let indices = pallet_declarations + .iter() + .map(|pallet| pallet.index as usize); + let pallet_structs = pallet_declarations + .iter() + .map(|pallet| { + let path = &pallet.path; + match pallet.instance.as_ref() { + Some(inst) => quote!(#path::Pallet<#runtime, #path::#inst>), + None => quote!(#path::Pallet<#runtime>), + } + }) + .collect::>(); + let pallet_attrs = pallet_declarations + .iter() + .map(|pallet| { + pallet + .cfg_pattern + .iter() + .fold(TokenStream2::new(), |acc, pattern| { + let attr = TokenStream2::from_str(&format!("#[cfg({})]", pattern.original())) + .expect("was successfully parsed before; qed"); + quote! { + #acc + #attr + } + }) + }) + .collect::>(); + + quote!( + /// Provides an implementation of `PalletInfo` to provide information + /// about the pallet setup in the runtime. + pub struct PalletInfo; + + impl #scrate::traits::PalletInfo for PalletInfo { + + fn index() -> Option { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#indices) + } + )* + + None + } + + fn name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_strings) + } + )* + + None + } + + fn name_hash() -> Option<[u8; 16]> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#name_hashes) + } + )* + + None + } + + fn module_name() -> Option<&'static str> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some(#module_names) + } + )* + + None + } + + fn crate_version() -> Option<#scrate::traits::CrateVersion> { + let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + #( + #pallet_attrs + if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + return Some( + <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() + ) + } + )* + + None + } + } + ) } pub(crate) fn decl_integrity_test(scrate: &TokenStream2) -> TokenStream2 { - quote!( - #[cfg(test)] - mod __construct_runtime_integrity_test { - use super::*; - - #[test] - pub fn runtime_integrity_tests() { - #scrate::__private::sp_tracing::try_init_simple(); - ::integrity_test(); - } - } - ) + quote!( + #[cfg(test)] + mod __construct_runtime_integrity_test { + use super::*; + + #[test] + pub fn runtime_integrity_tests() { + #scrate::__private::sp_tracing::try_init_simple(); + ::integrity_test(); + } + } + ) } pub(crate) fn decl_static_assertions( - runtime: &Ident, - pallet_decls: &[Pallet], - scrate: &TokenStream2, + runtime: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, ) -> TokenStream2 { - let error_encoded_size_check = pallet_decls.iter().map(|decl| { - let path = &decl.path; - let assert_message = format!( - "The maximum encoded size of the error type in the `{}` pallet exceeds \ + let error_encoded_size_check = pallet_decls.iter().map(|decl| { + let path = &decl.path; + let assert_message = format!( + "The maximum encoded size of the error type in the `{}` pallet exceeds \ `MAX_MODULE_ERROR_ENCODED_SIZE`", - decl.name, - ); - - quote! { - #scrate::__private::tt_call! { - macro = [{ #path::tt_error_token }] - your_tt_return = [{ #scrate::__private::tt_return }] - ~~> #scrate::assert_error_encoded_size! { - path = [{ #path }] - runtime = [{ #runtime }] - assert_message = [{ #assert_message }] - } - } - } - }); - - quote! { - #(#error_encoded_size_check)* - } + decl.name, + ); + + quote! { + #scrate::__private::tt_call! { + macro = [{ #path::tt_error_token }] + your_tt_return = [{ #scrate::__private::tt_return }] + ~~> #scrate::assert_error_encoded_size! { + path = [{ #path }] + runtime = [{ #runtime }] + assert_message = [{ #assert_message }] + } + } + } + }); + + quote! { + #(#error_encoded_size_check)* + } } pub(crate) fn check_pallet_number(input: TokenStream2, pallet_num: usize) -> Result<()> { - let max_pallet_num = { - if cfg!(feature = "tuples-96") { - 96 - } else if cfg!(feature = "tuples-128") { - 128 - } else { - 64 - } - }; - - if pallet_num > max_pallet_num { - let no_feature = max_pallet_num == 128; - return Err(syn::Error::new( - input.span(), - format!( - "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", - "The number of pallets exceeds the maximum number of tuple elements.", - max_pallet_num + 32, - if no_feature { - "If the feature does not exist - it needs to be implemented." - } else { - "" - }, - ), - )) - } - - Ok(()) + let max_pallet_num = { + if cfg!(feature = "tuples-96") { + 96 + } else if cfg!(feature = "tuples-128") { + 128 + } else { + 64 + } + }; + + if pallet_num > max_pallet_num { + let no_feature = max_pallet_num == 128; + return Err(syn::Error::new( + input.span(), + format!( + "{} To increase this limit, enable the tuples-{} feature of [frame_support]. {}", + "The number of pallets exceeds the maximum number of tuple elements.", + max_pallet_num + 32, + if no_feature { + "If the feature does not exist - it needs to be implemented." + } else { + "" + }, + ), + )); + } + + Ok(()) } diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs index 31866c787..173a8dd12 100644 --- a/support/procedural-fork/src/construct_runtime/parse.rs +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -20,34 +20,34 @@ use proc_macro2::{Span, TokenStream}; use quote::ToTokens; use std::collections::{HashMap, HashSet}; use syn::{ - ext::IdentExt, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, Attribute, Error, Ident, Path, Result, Token, + ext::IdentExt, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, Attribute, Error, Ident, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(Block); - syn::custom_keyword!(NodeBlock); - syn::custom_keyword!(UncheckedExtrinsic); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(Call); - syn::custom_keyword!(Storage); - syn::custom_keyword!(Event); - syn::custom_keyword!(Error); - syn::custom_keyword!(Config); - syn::custom_keyword!(Origin); - syn::custom_keyword!(Inherent); - syn::custom_keyword!(ValidateUnsigned); - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(Task); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(exclude_parts); - syn::custom_keyword!(use_parts); - syn::custom_keyword!(expanded); + syn::custom_keyword!(Block); + syn::custom_keyword!(NodeBlock); + syn::custom_keyword!(UncheckedExtrinsic); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(Call); + syn::custom_keyword!(Storage); + syn::custom_keyword!(Event); + syn::custom_keyword!(Error); + syn::custom_keyword!(Config); + syn::custom_keyword!(Origin); + syn::custom_keyword!(Inherent); + syn::custom_keyword!(ValidateUnsigned); + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(Task); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(exclude_parts); + syn::custom_keyword!(use_parts); + syn::custom_keyword!(expanded); } /// Declaration of a runtime. @@ -57,266 +57,298 @@ mod keyword { /// implicit. #[derive(Debug)] pub enum RuntimeDeclaration { - Implicit(ImplicitRuntimeDeclaration), - Explicit(ExplicitRuntimeDeclaration), - ExplicitExpanded(ExplicitRuntimeDeclaration), + Implicit(ImplicitRuntimeDeclaration), + Explicit(ExplicitRuntimeDeclaration), + ExplicitExpanded(ExplicitRuntimeDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug)] pub struct ImplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug)] pub struct ExplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, - pub pallets: Vec, - pub pallets_token: token::Brace, + pub name: Ident, + pub where_section: Option, + pub pallets: Vec, + pub pallets_token: token::Brace, } impl Parse for RuntimeDeclaration { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - // Support either `enum` or `struct`. - if input.peek(Token![struct]) { - input.parse::()?; - } else { - input.parse::()?; - } - - let name = input.parse::()?; - let where_section = if input.peek(token::Where) { Some(input.parse()?) } else { None }; - let pallets = - input.parse::>>()?; - let pallets_token = pallets.token; - - match convert_pallets(pallets.content.inner.into_iter().collect())? { - PalletsConversion::Implicit(pallets) => - Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { - name, - where_section, - pallets, - })), - PalletsConversion::Explicit(pallets) => - Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - })), - PalletsConversion::ExplicitExpanded(pallets) => - Ok(RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { - name, - where_section, - pallets, - pallets_token, - })), - } - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + // Support either `enum` or `struct`. + if input.peek(Token![struct]) { + input.parse::()?; + } else { + input.parse::()?; + } + + let name = input.parse::()?; + let where_section = if input.peek(token::Where) { + Some(input.parse()?) + } else { + None + }; + let pallets = + input.parse::>>()?; + let pallets_token = pallets.token; + + match convert_pallets(pallets.content.inner.into_iter().collect())? { + PalletsConversion::Implicit(pallets) => { + Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { + name, + where_section, + pallets, + })) + } + PalletsConversion::Explicit(pallets) => { + Ok(RuntimeDeclaration::Explicit(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + })) + } + PalletsConversion::ExplicitExpanded(pallets) => Ok( + RuntimeDeclaration::ExplicitExpanded(ExplicitRuntimeDeclaration { + name, + where_section, + pallets, + pallets_token, + }), + ), + } + } } #[derive(Debug)] pub struct WhereSection { - pub span: Span, - pub block: syn::TypePath, - pub node_block: syn::TypePath, - pub unchecked_extrinsic: syn::TypePath, + pub span: Span, + pub block: syn::TypePath, + pub node_block: syn::TypePath, + pub unchecked_extrinsic: syn::TypePath, } impl Parse for WhereSection { - fn parse(input: ParseStream) -> Result { - input.parse::()?; - - let mut definitions = Vec::new(); - while !input.peek(token::Brace) { - let definition: WhereDefinition = input.parse()?; - definitions.push(definition); - if !input.peek(Token![,]) { - if !input.peek(token::Brace) { - return Err(input.error("Expected `,` or `{`")) - } - break - } - input.parse::()?; - } - let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; - let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; - let unchecked_extrinsic = - remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; - if let Some(WhereDefinition { ref kind_span, ref kind, .. }) = definitions.first() { - let msg = format!( - "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", - kind, kind - ); - return Err(Error::new(*kind_span, msg)) - } - Ok(Self { span: input.span(), block, node_block, unchecked_extrinsic }) - } + fn parse(input: ParseStream) -> Result { + input.parse::()?; + + let mut definitions = Vec::new(); + while !input.peek(token::Brace) { + let definition: WhereDefinition = input.parse()?; + definitions.push(definition); + if !input.peek(Token![,]) { + if !input.peek(token::Brace) { + return Err(input.error("Expected `,` or `{`")); + } + break; + } + input.parse::()?; + } + let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; + let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; + let unchecked_extrinsic = + remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; + if let Some(WhereDefinition { + ref kind_span, + ref kind, + .. + }) = definitions.first() + { + let msg = format!( + "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", + kind, kind + ); + return Err(Error::new(*kind_span, msg)); + } + Ok(Self { + span: input.span(), + block, + node_block, + unchecked_extrinsic, + }) + } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum WhereKind { - Block, - NodeBlock, - UncheckedExtrinsic, + Block, + NodeBlock, + UncheckedExtrinsic, } #[derive(Debug)] pub struct WhereDefinition { - pub kind_span: Span, - pub kind: WhereKind, - pub value: syn::TypePath, + pub kind_span: Span, + pub kind: WhereKind, + pub value: syn::TypePath, } impl Parse for WhereDefinition { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - let (kind_span, kind) = if lookahead.peek(keyword::Block) { - (input.parse::()?.span(), WhereKind::Block) - } else if lookahead.peek(keyword::NodeBlock) { - (input.parse::()?.span(), WhereKind::NodeBlock) - } else if lookahead.peek(keyword::UncheckedExtrinsic) { - (input.parse::()?.span(), WhereKind::UncheckedExtrinsic) - } else { - return Err(lookahead.error()) - }; - - Ok(Self { - kind_span, - kind, - value: { - let _: Token![=] = input.parse()?; - input.parse()? - }, - }) - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + let (kind_span, kind) = if lookahead.peek(keyword::Block) { + (input.parse::()?.span(), WhereKind::Block) + } else if lookahead.peek(keyword::NodeBlock) { + ( + input.parse::()?.span(), + WhereKind::NodeBlock, + ) + } else if lookahead.peek(keyword::UncheckedExtrinsic) { + ( + input.parse::()?.span(), + WhereKind::UncheckedExtrinsic, + ) + } else { + return Err(lookahead.error()); + }; + + Ok(Self { + kind_span, + kind, + value: { + let _: Token![=] = input.parse()?; + input.parse()? + }, + }) + } } /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// Optional fixed index, e.g. `MyPallet ... = 3,`. - pub index: Option, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The declared pallet parts, - /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` - /// or `None` for `System: system`. - pub pallet_parts: Option>, - /// The specified parts, either use_parts or exclude_parts. - pub specified_parts: SpecifiedParts, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// Optional fixed index, e.g. `MyPallet ... = 3,`. + pub index: Option, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The declared pallet parts, + /// e.g. `Some([Pallet, Call])` for `System: system::{Pallet, Call}` + /// or `None` for `System: system`. + pub pallet_parts: Option>, + /// The specified parts, either use_parts or exclude_parts. + pub specified_parts: SpecifiedParts, } /// The possible declaration of pallet parts to use. #[derive(Debug, Clone)] pub enum SpecifiedParts { - /// Use all the pallet parts except those specified. - Exclude(Vec), - /// Use only the specified pallet parts. - Use(Vec), - /// Use the all the pallet parts. - All, + /// Use all the pallet parts except those specified. + Exclude(Vec), + /// Use only the specified pallet parts. + Use(Vec), + /// Use the all the pallet parts. + All, } impl Parse for PalletDeclaration { - fn parse(input: ParseStream) -> Result { - let attrs = input.call(Attribute::parse_outer)?; - - let name = input.parse()?; - let _: Token![:] = input.parse()?; - let path = input.parse()?; - - // Parse for instance. - let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { - let _: Token![::] = input.parse()?; - let _: Token![<] = input.parse()?; - let res = Some(input.parse()?); - let _: Token![>] = input.parse()?; - res - } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) && - !input.peek(keyword::expanded) && - !input.peek(keyword::exclude_parts) && - !input.peek(keyword::use_parts) && - !input.peek(Token![=]) && - !input.peek(Token![,]) && - !input.is_empty() - { - return Err(input.error( + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + + let name = input.parse()?; + let _: Token![:] = input.parse()?; + let path = input.parse()?; + + // Parse for instance. + let instance = if input.peek(Token![::]) && input.peek3(Token![<]) { + let _: Token![::] = input.parse()?; + let _: Token![<] = input.parse()?; + let res = Some(input.parse()?); + let _: Token![>] = input.parse()?; + res + } else if !(input.peek(Token![::]) && input.peek3(token::Brace)) + && !input.peek(keyword::expanded) + && !input.peek(keyword::exclude_parts) + && !input.peek(keyword::use_parts) + && !input.peek(Token![=]) + && !input.peek(Token![,]) + && !input.is_empty() + { + return Err(input.error( "Unexpected tokens, expected one of `::$ident` `::{`, `exclude_parts`, `use_parts`, `=`, `,`", )); - } else { - None - }; - - // Check if the pallet is fully expanded. - let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { - let _: keyword::expanded = input.parse()?; - let _: Token![::] = input.parse()?; - (true, parse_pallet_parts(input)?) - } else { - (false, vec![]) - }; - - // Parse for explicit parts - let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { - let _: Token![::] = input.parse()?; - let mut parts = parse_pallet_parts(input)?; - parts.extend(extra_parts.into_iter()); - Some(parts) - } else if !input.peek(keyword::exclude_parts) && - !input.peek(keyword::use_parts) && - !input.peek(Token![=]) && - !input.peek(Token![,]) && - !input.is_empty() - { - return Err(input.error( - "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", - )) - } else { - is_expanded.then_some(extra_parts) - }; - - // Parse for specified parts - let specified_parts = if input.peek(keyword::exclude_parts) { - let _: keyword::exclude_parts = input.parse()?; - SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) - } else if input.peek(keyword::use_parts) { - let _: keyword::use_parts = input.parse()?; - SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) - } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")) - } else { - SpecifiedParts::All - }; - - // Parse for pallet index - let index = if input.peek(Token![=]) { - input.parse::()?; - let index = input.parse::()?; - let index = index.base10_parse::()?; - Some(index) - } else if !input.peek(Token![,]) && !input.is_empty() { - return Err(input.error("Unexpected tokens, expected one of `=`, `,`")) - } else { - None - }; - - Ok(Self { is_expanded, attrs, name, path, instance, pallet_parts, specified_parts, index }) - } + } else { + None + }; + + // Check if the pallet is fully expanded. + let (is_expanded, extra_parts) = if input.peek(keyword::expanded) { + let _: keyword::expanded = input.parse()?; + let _: Token![::] = input.parse()?; + (true, parse_pallet_parts(input)?) + } else { + (false, vec![]) + }; + + // Parse for explicit parts + let pallet_parts = if input.peek(Token![::]) && input.peek3(token::Brace) { + let _: Token![::] = input.parse()?; + let mut parts = parse_pallet_parts(input)?; + parts.extend(extra_parts.into_iter()); + Some(parts) + } else if !input.peek(keyword::exclude_parts) + && !input.peek(keyword::use_parts) + && !input.peek(Token![=]) + && !input.peek(Token![,]) + && !input.is_empty() + { + return Err(input.error( + "Unexpected tokens, expected one of `::{`, `exclude_parts`, `use_parts`, `=`, `,`", + )); + } else { + is_expanded.then_some(extra_parts) + }; + + // Parse for specified parts + let specified_parts = if input.peek(keyword::exclude_parts) { + let _: keyword::exclude_parts = input.parse()?; + SpecifiedParts::Exclude(parse_pallet_parts_no_generic(input)?) + } else if input.peek(keyword::use_parts) { + let _: keyword::use_parts = input.parse()?; + SpecifiedParts::Use(parse_pallet_parts_no_generic(input)?) + } else if !input.peek(Token![=]) && !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `exclude_parts`, `=`, `,`")); + } else { + SpecifiedParts::All + }; + + // Parse for pallet index + let index = if input.peek(Token![=]) { + input.parse::()?; + let index = input.parse::()?; + let index = index.base10_parse::()?; + Some(index) + } else if !input.peek(Token![,]) && !input.is_empty() { + return Err(input.error("Unexpected tokens, expected one of `=`, `,`")); + } else { + None + }; + + Ok(Self { + is_expanded, + attrs, + name, + path, + instance, + pallet_parts, + specified_parts, + index, + }) + } } /// A struct representing a path to a pallet. `PalletPath` is almost identical to the standard @@ -325,303 +357,312 @@ impl Parse for PalletDeclaration { /// - Path segments can only consist of identifiers separated by colons #[derive(Debug, Clone)] pub struct PalletPath { - pub inner: Path, + pub inner: Path, } impl PalletPath { - pub fn module_name(&self) -> String { - self.inner.segments.iter().fold(String::new(), |mut acc, segment| { - if !acc.is_empty() { - acc.push_str("::"); - } - acc.push_str(&segment.ident.to_string()); - acc - }) - } + pub fn module_name(&self) -> String { + self.inner + .segments + .iter() + .fold(String::new(), |mut acc, segment| { + if !acc.is_empty() { + acc.push_str("::"); + } + acc.push_str(&segment.ident.to_string()); + acc + }) + } } impl Parse for PalletPath { - fn parse(input: ParseStream) -> Result { - let mut res = - PalletPath { inner: Path { leading_colon: None, segments: Punctuated::new() } }; - - let lookahead = input.lookahead1(); - if lookahead.peek(Token![crate]) || - lookahead.peek(Token![self]) || - lookahead.peek(Token![super]) || - lookahead.peek(Ident) - { - let ident = input.call(Ident::parse_any)?; - res.inner.segments.push(ident.into()); - } else { - return Err(lookahead.error()) - } - - while input.peek(Token![::]) && input.peek3(Ident) { - input.parse::()?; - let ident = input.parse::()?; - res.inner.segments.push(ident.into()); - } - Ok(res) - } + fn parse(input: ParseStream) -> Result { + let mut res = PalletPath { + inner: Path { + leading_colon: None, + segments: Punctuated::new(), + }, + }; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![crate]) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Ident) + { + let ident = input.call(Ident::parse_any)?; + res.inner.segments.push(ident.into()); + } else { + return Err(lookahead.error()); + } + + while input.peek(Token![::]) && input.peek3(Ident) { + input.parse::()?; + let ident = input.parse::()?; + res.inner.segments.push(ident.into()); + } + Ok(res) + } } impl quote::ToTokens for PalletPath { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.inner.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.inner.to_tokens(tokens); + } } /// Parse [`PalletPart`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = input.parse()?; + let pallet_parts: ext::Braces> = input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.name(), - ); - return Err(Error::new(part.keyword.span(), msg)) - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.name(), + ); + return Err(Error::new(part.keyword.span(), msg)); + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } #[derive(Debug, Clone)] pub enum PalletPartKeyword { - Pallet(keyword::Pallet), - Call(keyword::Call), - Storage(keyword::Storage), - Event(keyword::Event), - Error(keyword::Error), - Config(keyword::Config), - Origin(keyword::Origin), - Inherent(keyword::Inherent), - ValidateUnsigned(keyword::ValidateUnsigned), - FreezeReason(keyword::FreezeReason), - HoldReason(keyword::HoldReason), - Task(keyword::Task), - LockId(keyword::LockId), - SlashReason(keyword::SlashReason), + Pallet(keyword::Pallet), + Call(keyword::Call), + Storage(keyword::Storage), + Event(keyword::Event), + Error(keyword::Error), + Config(keyword::Config), + Origin(keyword::Origin), + Inherent(keyword::Inherent), + ValidateUnsigned(keyword::ValidateUnsigned), + FreezeReason(keyword::FreezeReason), + HoldReason(keyword::HoldReason), + Task(keyword::Task), + LockId(keyword::LockId), + SlashReason(keyword::SlashReason), } impl Parse for PalletPartKeyword { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - - if lookahead.peek(keyword::Pallet) { - Ok(Self::Pallet(input.parse()?)) - } else if lookahead.peek(keyword::Call) { - Ok(Self::Call(input.parse()?)) - } else if lookahead.peek(keyword::Storage) { - Ok(Self::Storage(input.parse()?)) - } else if lookahead.peek(keyword::Event) { - Ok(Self::Event(input.parse()?)) - } else if lookahead.peek(keyword::Error) { - Ok(Self::Error(input.parse()?)) - } else if lookahead.peek(keyword::Config) { - Ok(Self::Config(input.parse()?)) - } else if lookahead.peek(keyword::Origin) { - Ok(Self::Origin(input.parse()?)) - } else if lookahead.peek(keyword::Inherent) { - Ok(Self::Inherent(input.parse()?)) - } else if lookahead.peek(keyword::ValidateUnsigned) { - Ok(Self::ValidateUnsigned(input.parse()?)) - } else if lookahead.peek(keyword::FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(keyword::Task) { - Ok(Self::Task(input.parse()?)) - } else if lookahead.peek(keyword::LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(keyword::SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + + if lookahead.peek(keyword::Pallet) { + Ok(Self::Pallet(input.parse()?)) + } else if lookahead.peek(keyword::Call) { + Ok(Self::Call(input.parse()?)) + } else if lookahead.peek(keyword::Storage) { + Ok(Self::Storage(input.parse()?)) + } else if lookahead.peek(keyword::Event) { + Ok(Self::Event(input.parse()?)) + } else if lookahead.peek(keyword::Error) { + Ok(Self::Error(input.parse()?)) + } else if lookahead.peek(keyword::Config) { + Ok(Self::Config(input.parse()?)) + } else if lookahead.peek(keyword::Origin) { + Ok(Self::Origin(input.parse()?)) + } else if lookahead.peek(keyword::Inherent) { + Ok(Self::Inherent(input.parse()?)) + } else if lookahead.peek(keyword::ValidateUnsigned) { + Ok(Self::ValidateUnsigned(input.parse()?)) + } else if lookahead.peek(keyword::FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(keyword::Task) { + Ok(Self::Task(input.parse()?)) + } else if lookahead.peek(keyword::LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(keyword::SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } impl PalletPartKeyword { - /// Returns the name of `Self`. - fn name(&self) -> &'static str { - match self { - Self::Pallet(_) => "Pallet", - Self::Call(_) => "Call", - Self::Storage(_) => "Storage", - Self::Event(_) => "Event", - Self::Error(_) => "Error", - Self::Config(_) => "Config", - Self::Origin(_) => "Origin", - Self::Inherent(_) => "Inherent", - Self::ValidateUnsigned(_) => "ValidateUnsigned", - Self::FreezeReason(_) => "FreezeReason", - Self::HoldReason(_) => "HoldReason", - Self::Task(_) => "Task", - Self::LockId(_) => "LockId", - Self::SlashReason(_) => "SlashReason", - } - } - - /// Returns `true` if this pallet part is allowed to have generic arguments. - fn allows_generic(&self) -> bool { - Self::all_generic_arg().iter().any(|n| *n == self.name()) - } - - /// Returns the names of all pallet parts that allow to have a generic argument. - fn all_generic_arg() -> &'static [&'static str] { - &["Event", "Error", "Origin", "Config", "Task"] - } + /// Returns the name of `Self`. + fn name(&self) -> &'static str { + match self { + Self::Pallet(_) => "Pallet", + Self::Call(_) => "Call", + Self::Storage(_) => "Storage", + Self::Event(_) => "Event", + Self::Error(_) => "Error", + Self::Config(_) => "Config", + Self::Origin(_) => "Origin", + Self::Inherent(_) => "Inherent", + Self::ValidateUnsigned(_) => "ValidateUnsigned", + Self::FreezeReason(_) => "FreezeReason", + Self::HoldReason(_) => "HoldReason", + Self::Task(_) => "Task", + Self::LockId(_) => "LockId", + Self::SlashReason(_) => "SlashReason", + } + } + + /// Returns `true` if this pallet part is allowed to have generic arguments. + fn allows_generic(&self) -> bool { + Self::all_generic_arg().iter().any(|n| *n == self.name()) + } + + /// Returns the names of all pallet parts that allow to have a generic argument. + fn all_generic_arg() -> &'static [&'static str] { + &["Event", "Error", "Origin", "Config", "Task"] + } } impl ToTokens for PalletPartKeyword { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - Self::Pallet(inner) => inner.to_tokens(tokens), - Self::Call(inner) => inner.to_tokens(tokens), - Self::Storage(inner) => inner.to_tokens(tokens), - Self::Event(inner) => inner.to_tokens(tokens), - Self::Error(inner) => inner.to_tokens(tokens), - Self::Config(inner) => inner.to_tokens(tokens), - Self::Origin(inner) => inner.to_tokens(tokens), - Self::Inherent(inner) => inner.to_tokens(tokens), - Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), - Self::FreezeReason(inner) => inner.to_tokens(tokens), - Self::HoldReason(inner) => inner.to_tokens(tokens), - Self::Task(inner) => inner.to_tokens(tokens), - Self::LockId(inner) => inner.to_tokens(tokens), - Self::SlashReason(inner) => inner.to_tokens(tokens), - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Pallet(inner) => inner.to_tokens(tokens), + Self::Call(inner) => inner.to_tokens(tokens), + Self::Storage(inner) => inner.to_tokens(tokens), + Self::Event(inner) => inner.to_tokens(tokens), + Self::Error(inner) => inner.to_tokens(tokens), + Self::Config(inner) => inner.to_tokens(tokens), + Self::Origin(inner) => inner.to_tokens(tokens), + Self::Inherent(inner) => inner.to_tokens(tokens), + Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), + Self::FreezeReason(inner) => inner.to_tokens(tokens), + Self::HoldReason(inner) => inner.to_tokens(tokens), + Self::Task(inner) => inner.to_tokens(tokens), + Self::LockId(inner) => inner.to_tokens(tokens), + Self::SlashReason(inner) => inner.to_tokens(tokens), + } + } } #[derive(Debug, Clone)] pub struct PalletPart { - pub keyword: PalletPartKeyword, - pub generics: syn::Generics, + pub keyword: PalletPartKeyword, + pub generics: syn::Generics, } impl Parse for PalletPart { - fn parse(input: ParseStream) -> Result { - let keyword: PalletPartKeyword = input.parse()?; - - let generics: syn::Generics = input.parse()?; - if !generics.params.is_empty() && !keyword.allows_generic() { - let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); - let msg = format!( - "`{}` is not allowed to have generics. \ + fn parse(input: ParseStream) -> Result { + let keyword: PalletPartKeyword = input.parse()?; + + let generics: syn::Generics = input.parse()?; + if !generics.params.is_empty() && !keyword.allows_generic() { + let valid_generics = PalletPart::format_names(PalletPartKeyword::all_generic_arg()); + let msg = format!( + "`{}` is not allowed to have generics. \ Only the following pallets are allowed to have generics: {}.", - keyword.name(), - valid_generics, - ); - return Err(syn::Error::new(keyword.span(), msg)) - } + keyword.name(), + valid_generics, + ); + return Err(syn::Error::new(keyword.span(), msg)); + } - Ok(Self { keyword, generics }) - } + Ok(Self { keyword, generics }) + } } impl PalletPart { - pub fn format_names(names: &[&'static str]) -> String { - let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); - res.join(", ") - } + pub fn format_names(names: &[&'static str]) -> String { + let res: Vec<_> = names.iter().map(|s| format!("`{}`", s)).collect(); + res.join(", ") + } - /// The name of this pallet part. - pub fn name(&self) -> &'static str { - self.keyword.name() - } + /// The name of this pallet part. + pub fn name(&self) -> &'static str { + self.keyword.name() + } } fn remove_kind( - input: ParseStream, - kind: WhereKind, - definitions: &mut Vec, + input: ParseStream, + kind: WhereKind, + definitions: &mut Vec, ) -> Result { - if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { - Ok(definitions.remove(pos)) - } else { - let msg = format!( - "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", - kind, kind - ); - Err(input.error(msg)) - } + if let Some(pos) = definitions.iter().position(|d| d.kind == kind) { + Ok(definitions.remove(pos)) + } else { + let msg = format!( + "Missing associated type for `{:?}`. Add `{:?}` = ... to where section.", + kind, kind + ); + Err(input.error(msg)) + } } /// The declaration of a part without its generics #[derive(Debug, Clone)] pub struct PalletPartNoGeneric { - keyword: PalletPartKeyword, + keyword: PalletPartKeyword, } impl Parse for PalletPartNoGeneric { - fn parse(input: ParseStream) -> Result { - Ok(Self { keyword: input.parse()? }) - } + fn parse(input: ParseStream) -> Result { + Ok(Self { + keyword: input.parse()?, + }) + } } /// Parse [`PalletPartNoGeneric`]'s from a braces enclosed list that is split by commas, e.g. /// /// `{ Call, Event }` fn parse_pallet_parts_no_generic(input: ParseStream) -> Result> { - let pallet_parts: ext::Braces> = - input.parse()?; + let pallet_parts: ext::Braces> = + input.parse()?; - let mut resolved = HashSet::new(); - for part in pallet_parts.content.inner.iter() { - if !resolved.insert(part.keyword.name()) { - let msg = format!( - "`{}` was already declared before. Please remove the duplicate declaration", - part.keyword.name(), - ); - return Err(Error::new(part.keyword.span(), msg)) - } - } + let mut resolved = HashSet::new(); + for part in pallet_parts.content.inner.iter() { + if !resolved.insert(part.keyword.name()) { + let msg = format!( + "`{}` was already declared before. Please remove the duplicate declaration", + part.keyword.name(), + ); + return Err(Error::new(part.keyword.span(), msg)); + } + } - Ok(pallet_parts.content.inner.into_iter().collect()) + Ok(pallet_parts.content.inner.into_iter().collect()) } /// The final definition of a pallet with the resulting fixed index and explicit parts. #[derive(Debug, Clone)] pub struct Pallet { - /// Is this pallet fully expanded? - pub is_expanded: bool, - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). - pub index: u8, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: PalletPath, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, - /// The pallet parts to use for the pallet. - pub pallet_parts: Vec, - /// Expressions specified inside of a #[cfg] attribute. - pub cfg_pattern: Vec, + /// Is this pallet fully expanded? + pub is_expanded: bool, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Either automatically inferred, or defined (e.g. `MyPallet ... = 3,`). + pub index: u8, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: PalletPath, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, + /// The pallet parts to use for the pallet. + pub pallet_parts: Vec, + /// Expressions specified inside of a #[cfg] attribute. + pub cfg_pattern: Vec, } impl Pallet { - /// Get resolved pallet parts - pub fn pallet_parts(&self) -> &[PalletPart] { - &self.pallet_parts - } + /// Get resolved pallet parts + pub fn pallet_parts(&self) -> &[PalletPart] { + &self.pallet_parts + } - /// Find matching parts - pub fn find_part(&self, name: &str) -> Option<&PalletPart> { - self.pallet_parts.iter().find(|part| part.name() == name) - } + /// Find matching parts + pub fn find_part(&self, name: &str) -> Option<&PalletPart> { + self.pallet_parts.iter().find(|part| part.name() == name) + } - /// Return whether pallet contains part - pub fn exists_part(&self, name: &str) -> bool { - self.find_part(name).is_some() - } + /// Return whether pallet contains part + pub fn exists_part(&self, name: &str) -> bool { + self.find_part(name).is_some() + } } /// Result of a conversion of a declaration of pallets. @@ -634,26 +675,26 @@ impl Pallet { /// +----------+ +----------+ +------------------+ /// ``` enum PalletsConversion { - /// Pallets implicitly declare parts. - /// - /// `System: frame_system`. - Implicit(Vec), - /// Pallets explicitly declare parts. - /// - /// `System: frame_system::{Pallet, Call}` - /// - /// However, for backwards compatibility with Polkadot/Kusama - /// we must propagate some other parts to the pallet by default. - Explicit(Vec), - /// Pallets explicitly declare parts that are fully expanded. - /// - /// This is the end state that contains extra parts included by - /// default by Substrate. - /// - /// `System: frame_system expanded::{Error} ::{Pallet, Call}` - /// - /// For this example, the `Pallet`, `Call` and `Error` parts are collected. - ExplicitExpanded(Vec), + /// Pallets implicitly declare parts. + /// + /// `System: frame_system`. + Implicit(Vec), + /// Pallets explicitly declare parts. + /// + /// `System: frame_system::{Pallet, Call}` + /// + /// However, for backwards compatibility with Polkadot/Kusama + /// we must propagate some other parts to the pallet by default. + Explicit(Vec), + /// Pallets explicitly declare parts that are fully expanded. + /// + /// This is the end state that contains extra parts included by + /// default by Substrate. + /// + /// `System: frame_system expanded::{Error} ::{Pallet, Call}` + /// + /// For this example, the `Pallet`, `Call` and `Error` parts are collected. + ExplicitExpanded(Vec), } /// Convert from the parsed pallet declaration to their final information. @@ -662,125 +703,137 @@ enum PalletsConversion { /// pallet using same rules as rust for fieldless enum. I.e. implicit are assigned number /// incrementally from last explicit or 0. fn convert_pallets(pallets: Vec) -> syn::Result { - if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { - return Ok(PalletsConversion::Implicit(pallets)) - } - - let mut indices = HashMap::new(); - let mut last_index: Option = None; - let mut names = HashMap::new(); - let mut is_expanded = true; - - let pallets = pallets - .into_iter() - .map(|pallet| { - let final_index = match pallet.index { - Some(i) => i, - None => last_index.map_or(Some(0), |i| i.checked_add(1)).ok_or_else(|| { - let msg = "Pallet index doesn't fit into u8, index is 256"; - syn::Error::new(pallet.name.span(), msg) - })?, - }; - - last_index = Some(final_index); - - if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { - let msg = format!( - "Pallet indices are conflicting: Both pallets {} and {} are at index {}", - used_pallet, pallet.name, final_index, - ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err) - } - - if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { - let msg = "Two pallets with the same name!"; - - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet.name.span(), &msg)); - return Err(err) - } - - let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); - - let available_parts = - pallet_parts.iter().map(|part| part.keyword.name()).collect::>(); - - // Check parts are correctly specified - match &pallet.specified_parts { - SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => - for part in parts { - if !available_parts.contains(part.keyword.name()) { - let msg = format!( - "Invalid pallet part specified, the pallet `{}` doesn't have the \ + if pallets.iter().any(|pallet| pallet.pallet_parts.is_none()) { + return Ok(PalletsConversion::Implicit(pallets)); + } + + let mut indices = HashMap::new(); + let mut last_index: Option = None; + let mut names = HashMap::new(); + let mut is_expanded = true; + + let pallets = pallets + .into_iter() + .map(|pallet| { + let final_index = match pallet.index { + Some(i) => i, + None => last_index + .map_or(Some(0), |i| i.checked_add(1)) + .ok_or_else(|| { + let msg = "Pallet index doesn't fit into u8, index is 256"; + syn::Error::new(pallet.name.span(), msg) + })?, + }; + + last_index = Some(final_index); + + if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, final_index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err); + } + + if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet.name.span(), &msg)); + return Err(err); + } + + let mut pallet_parts = pallet.pallet_parts.expect("Checked above"); + + let available_parts = pallet_parts + .iter() + .map(|part| part.keyword.name()) + .collect::>(); + + // Check parts are correctly specified + match &pallet.specified_parts { + SpecifiedParts::Exclude(parts) | SpecifiedParts::Use(parts) => { + for part in parts { + if !available_parts.contains(part.keyword.name()) { + let msg = format!( + "Invalid pallet part specified, the pallet `{}` doesn't have the \ `{}` part. Available parts are: {}.", - pallet.name, - part.keyword.name(), - pallet_parts.iter().fold(String::new(), |fold, part| { - if fold.is_empty() { - format!("`{}`", part.keyword.name()) - } else { - format!("{}, `{}`", fold, part.keyword.name()) - } - }) - ); - return Err(syn::Error::new(part.keyword.span(), msg)) - } - }, - SpecifiedParts::All => (), - } - - // Set only specified parts. - match pallet.specified_parts { - SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { - !excluded_parts - .iter() - .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { - used_parts.iter().any(|use_part| use_part.keyword.name() == part.keyword.name()) - }), - SpecifiedParts::All => (), - } - - let cfg_pattern = pallet - .attrs - .iter() - .map(|attr| { - if attr.path().segments.first().map_or(false, |s| s.ident != "cfg") { - let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ + pallet.name, + part.keyword.name(), + pallet_parts.iter().fold(String::new(), |fold, part| { + if fold.is_empty() { + format!("`{}`", part.keyword.name()) + } else { + format!("{}, `{}`", fold, part.keyword.name()) + } + }) + ); + return Err(syn::Error::new(part.keyword.span(), msg)); + } + } + } + SpecifiedParts::All => (), + } + + // Set only specified parts. + match pallet.specified_parts { + SpecifiedParts::Exclude(excluded_parts) => pallet_parts.retain(|part| { + !excluded_parts + .iter() + .any(|excluded_part| excluded_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::Use(used_parts) => pallet_parts.retain(|part| { + used_parts + .iter() + .any(|use_part| use_part.keyword.name() == part.keyword.name()) + }), + SpecifiedParts::All => (), + } + + let cfg_pattern = pallet + .attrs + .iter() + .map(|attr| { + if attr + .path() + .segments + .first() + .map_or(false, |s| s.ident != "cfg") + { + let msg = "Unsupported attribute, only #[cfg] is supported on pallet \ declarations in `construct_runtime`"; - return Err(syn::Error::new(attr.span(), msg)) - } - - attr.parse_args_with(|input: syn::parse::ParseStream| { - // Required, otherwise the parse stream doesn't advance and will result in - // an error. - let input = input.parse::()?; - cfg_expr::Expression::parse(&input.to_string()) - .map_err(|e| syn::Error::new(attr.span(), e.to_string())) - }) - }) - .collect::>>()?; - - is_expanded &= pallet.is_expanded; - - Ok(Pallet { - is_expanded: pallet.is_expanded, - name: pallet.name, - index: final_index, - path: pallet.path, - instance: pallet.instance, - cfg_pattern, - pallet_parts, - }) - }) - .collect::>>()?; - - if is_expanded { - Ok(PalletsConversion::ExplicitExpanded(pallets)) - } else { - Ok(PalletsConversion::Explicit(pallets)) - } + return Err(syn::Error::new(attr.span(), msg)); + } + + attr.parse_args_with(|input: syn::parse::ParseStream| { + // Required, otherwise the parse stream doesn't advance and will result in + // an error. + let input = input.parse::()?; + cfg_expr::Expression::parse(&input.to_string()) + .map_err(|e| syn::Error::new(attr.span(), e.to_string())) + }) + }) + .collect::>>()?; + + is_expanded &= pallet.is_expanded; + + Ok(Pallet { + is_expanded: pallet.is_expanded, + name: pallet.name, + index: final_index, + path: pallet.path, + instance: pallet.instance, + cfg_pattern, + pallet_parts, + }) + }) + .collect::>>()?; + + if is_expanded { + Ok(PalletsConversion::ExplicitExpanded(pallets)) + } else { + Ok(PalletsConversion::Explicit(pallets)) + } } diff --git a/support/procedural-fork/src/crate_version.rs b/support/procedural-fork/src/crate_version.rs index 8c8975a42..63e7c7279 100644 --- a/support/procedural-fork/src/crate_version.rs +++ b/support/procedural-fork/src/crate_version.rs @@ -24,31 +24,31 @@ use syn::{Error, Result}; /// Create an error that will be shown by rustc at the call site of the macro. fn create_error(message: &str) -> Error { - Error::new(Span::call_site(), message) + Error::new(Span::call_site(), message) } /// Implementation of the `crate_to_crate_version!` macro. pub fn crate_to_crate_version(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(create_error("No arguments expected!")) - } + if !input.is_empty() { + return Err(create_error("No arguments expected!")); + } - let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") - .map_err(|_| create_error("Major version needs to fit into `u16`"))?; + let major_version = get_cargo_env_var::("CARGO_PKG_VERSION_MAJOR") + .map_err(|_| create_error("Major version needs to fit into `u16`"))?; - let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") - .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; + let minor_version = get_cargo_env_var::("CARGO_PKG_VERSION_MINOR") + .map_err(|_| create_error("Minor version needs to fit into `u8`"))?; - let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") - .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; + let patch_version = get_cargo_env_var::("CARGO_PKG_VERSION_PATCH") + .map_err(|_| create_error("Patch version needs to fit into `u8`"))?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; - Ok(quote::quote! { - #crate_::traits::CrateVersion { - major: #major_version, - minor: #minor_version, - patch: #patch_version, - } - }) + Ok(quote::quote! { + #crate_::traits::CrateVersion { + major: #major_version, + minor: #minor_version, + patch: #patch_version, + } + }) } diff --git a/support/procedural-fork/src/derive_impl.rs b/support/procedural-fork/src/derive_impl.rs index 54755f116..e91f9c534 100644 --- a/support/procedural-fork/src/derive_impl.rs +++ b/support/procedural-fork/src/derive_impl.rs @@ -23,65 +23,67 @@ use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::collections::HashSet; use syn::{ - parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, + parse2, parse_quote, spanned::Spanned, token, Ident, ImplItem, ItemImpl, Path, Result, Token, }; mod keyword { - syn::custom_keyword!(inject_runtime_type); - syn::custom_keyword!(no_aggregated_types); + syn::custom_keyword!(inject_runtime_type); + syn::custom_keyword!(no_aggregated_types); } #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] - RuntimeType(keyword::inject_runtime_type), + #[peek(keyword::inject_runtime_type, name = "inject_runtime_type")] + RuntimeType(keyword::inject_runtime_type), } #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + typ: PalletAttrType, } fn is_runtime_type(item: &syn::ImplItemType) -> bool { - item.attrs.iter().any(|attr| { - if let Ok(PalletAttr { typ: PalletAttrType::RuntimeType(_), .. }) = - parse2::(attr.into_token_stream()) - { - return true - } - false - }) + item.attrs.iter().any(|attr| { + if let Ok(PalletAttr { + typ: PalletAttrType::RuntimeType(_), + .. + }) = parse2::(attr.into_token_stream()) + { + return true; + } + false + }) } #[derive(Parse, Debug)] pub struct DeriveImplAttrArgs { - pub default_impl_path: Path, - _as: Option, - #[parse_if(_as.is_some())] - pub disambiguation_path: Option, - _comma: Option, - #[parse_if(_comma.is_some())] - pub no_aggregated_types: Option, + pub default_impl_path: Path, + _as: Option, + #[parse_if(_as.is_some())] + pub disambiguation_path: Option, + _comma: Option, + #[parse_if(_comma.is_some())] + pub no_aggregated_types: Option, } impl ForeignPath for DeriveImplAttrArgs { - fn foreign_path(&self) -> &Path { - &self.default_impl_path - } + fn foreign_path(&self) -> &Path { + &self.default_impl_path + } } impl ToTokens for DeriveImplAttrArgs { - fn to_tokens(&self, tokens: &mut TokenStream2) { - tokens.extend(self.default_impl_path.to_token_stream()); - tokens.extend(self._as.to_token_stream()); - tokens.extend(self.disambiguation_path.to_token_stream()); - tokens.extend(self._comma.to_token_stream()); - tokens.extend(self.no_aggregated_types.to_token_stream()); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + tokens.extend(self.default_impl_path.to_token_stream()); + tokens.extend(self._as.to_token_stream()); + tokens.extend(self.disambiguation_path.to_token_stream()); + tokens.extend(self._comma.to_token_stream()); + tokens.extend(self.no_aggregated_types.to_token_stream()); + } } /// Gets the [`Ident`] representation of the given [`ImplItem`], if one exists. Otherwise @@ -90,13 +92,13 @@ impl ToTokens for DeriveImplAttrArgs { /// Used by [`combine_impls`] to determine whether we can compare [`ImplItem`]s by [`Ident`] /// or not. fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { - match impl_item { - ImplItem::Const(item) => Some(&item.ident), - ImplItem::Fn(item) => Some(&item.sig.ident), - ImplItem::Type(item) => Some(&item.ident), - ImplItem::Macro(item) => item.mac.path.get_ident(), - _ => None, - } + match impl_item { + ImplItem::Const(item) => Some(&item.ident), + ImplItem::Fn(item) => Some(&item.sig.ident), + ImplItem::Type(item) => Some(&item.ident), + ImplItem::Macro(item) => item.mac.path.get_ident(), + _ => None, + } } /// The real meat behind `derive_impl`. Takes in a `local_impl`, which is the impl for which we @@ -112,64 +114,68 @@ fn impl_item_ident(impl_item: &ImplItem) -> Option<&Ident> { /// into `local_impl`. Items that lack an ident and also exist verbatim in `local_impl` are not /// copied over. fn combine_impls( - local_impl: ItemImpl, - foreign_impl: ItemImpl, - default_impl_path: Path, - disambiguation_path: Path, - inject_runtime_types: bool, + local_impl: ItemImpl, + foreign_impl: ItemImpl, + default_impl_path: Path, + disambiguation_path: Path, + inject_runtime_types: bool, ) -> ItemImpl { - let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = - local_impl - .items - .iter() - .cloned() - .partition(|impl_item| impl_item_ident(impl_item).is_some()); - let existing_local_keys: HashSet = existing_local_keys - .into_iter() - .filter_map(|item| impl_item_ident(&item).cloned()) - .collect(); - let mut final_impl = local_impl; - let extended_items = foreign_impl.items.into_iter().filter_map(|item| { - if let Some(ident) = impl_item_ident(&item) { - if existing_local_keys.contains(&ident) { - // do not copy colliding items that have an ident - return None - } - if let ImplItem::Type(typ) = item.clone() { - let cfg_attrs = typ - .attrs - .iter() - .filter(|attr| attr.path().get_ident().map_or(false, |ident| ident == "cfg")) - .map(|attr| attr.to_token_stream()); - if is_runtime_type(&typ) { - let item: ImplItem = if inject_runtime_types { - parse_quote! { - #( #cfg_attrs )* - type #ident = #ident; - } - } else { - item - }; - return Some(item) - } - // modify and insert uncolliding type items - let modified_item: ImplItem = parse_quote! { - #( #cfg_attrs )* - type #ident = <#default_impl_path as #disambiguation_path>::#ident; - }; - return Some(modified_item) - } - // copy uncolliding non-type items that have an ident - Some(item) - } else { - // do not copy colliding items that lack an ident - (!existing_unsupported_items.contains(&item)) - // copy uncolliding items without an ident verbatim - .then_some(item) - } - }); - final_impl.items.extend(extended_items); - final_impl + let (existing_local_keys, existing_unsupported_items): (HashSet, HashSet) = + local_impl + .items + .iter() + .cloned() + .partition(|impl_item| impl_item_ident(impl_item).is_some()); + let existing_local_keys: HashSet = existing_local_keys + .into_iter() + .filter_map(|item| impl_item_ident(&item).cloned()) + .collect(); + let mut final_impl = local_impl; + let extended_items = foreign_impl.items.into_iter().filter_map(|item| { + if let Some(ident) = impl_item_ident(&item) { + if existing_local_keys.contains(&ident) { + // do not copy colliding items that have an ident + return None; + } + if let ImplItem::Type(typ) = item.clone() { + let cfg_attrs = typ + .attrs + .iter() + .filter(|attr| { + attr.path() + .get_ident() + .map_or(false, |ident| ident == "cfg") + }) + .map(|attr| attr.to_token_stream()); + if is_runtime_type(&typ) { + let item: ImplItem = if inject_runtime_types { + parse_quote! { + #( #cfg_attrs )* + type #ident = #ident; + } + } else { + item + }; + return Some(item); + } + // modify and insert uncolliding type items + let modified_item: ImplItem = parse_quote! { + #( #cfg_attrs )* + type #ident = <#default_impl_path as #disambiguation_path>::#ident; + }; + return Some(modified_item); + } + // copy uncolliding non-type items that have an ident + Some(item) + } else { + // do not copy colliding items that lack an ident + (!existing_unsupported_items.contains(&item)) + // copy uncolliding items without an ident verbatim + .then_some(item) + } + }); + final_impl.items.extend(extended_items); + final_impl } /// Computes the disambiguation path for the `derive_impl` attribute macro. @@ -178,25 +184,26 @@ fn combine_impls( /// disambiguation is used as is. If not, we infer the disambiguation path from the /// `foreign_impl_path` and the computed scope. fn compute_disambiguation_path( - disambiguation_path: Option, - foreign_impl: ItemImpl, - default_impl_path: Path, + disambiguation_path: Option, + foreign_impl: ItemImpl, + default_impl_path: Path, ) -> Result { - match (disambiguation_path, foreign_impl.clone().trait_) { - (Some(disambiguation_path), _) => Ok(disambiguation_path), - (None, Some((_, foreign_impl_path, _))) => - if default_impl_path.segments.len() > 1 { - let scope = default_impl_path.segments.first(); - Ok(parse_quote!(#scope :: #foreign_impl_path)) - } else { - Ok(foreign_impl_path) - }, - _ => Err(syn::Error::new( - default_impl_path.span(), - "Impl statement must have a defined type being implemented \ + match (disambiguation_path, foreign_impl.clone().trait_) { + (Some(disambiguation_path), _) => Ok(disambiguation_path), + (None, Some((_, foreign_impl_path, _))) => { + if default_impl_path.segments.len() > 1 { + let scope = default_impl_path.segments.first(); + Ok(parse_quote!(#scope :: #foreign_impl_path)) + } else { + Ok(foreign_impl_path) + } + } + _ => Err(syn::Error::new( + default_impl_path.span(), + "Impl statement must have a defined type being implemented \ for a defined type such as `impl A for B`", - )), - } + )), + } } /// Internal implementation behind [`#[derive_impl(..)]`](`macro@crate::derive_impl`). @@ -211,93 +218,99 @@ fn compute_disambiguation_path( /// `disambiguation_path`: the module path of the external trait we will use to qualify /// defaults imported from the external `impl` statement pub fn derive_impl( - default_impl_path: TokenStream2, - foreign_tokens: TokenStream2, - local_tokens: TokenStream2, - disambiguation_path: Option, - no_aggregated_types: Option, + default_impl_path: TokenStream2, + foreign_tokens: TokenStream2, + local_tokens: TokenStream2, + disambiguation_path: Option, + no_aggregated_types: Option, ) -> Result { - let local_impl = parse2::(local_tokens)?; - let foreign_impl = parse2::(foreign_tokens)?; - let default_impl_path = parse2::(default_impl_path)?; + let local_impl = parse2::(local_tokens)?; + let foreign_impl = parse2::(foreign_tokens)?; + let default_impl_path = parse2::(default_impl_path)?; - let disambiguation_path = compute_disambiguation_path( - disambiguation_path, - foreign_impl.clone(), - default_impl_path.clone(), - )?; + let disambiguation_path = compute_disambiguation_path( + disambiguation_path, + foreign_impl.clone(), + default_impl_path.clone(), + )?; - // generate the combined impl - let combined_impl = combine_impls( - local_impl, - foreign_impl, - default_impl_path, - disambiguation_path, - no_aggregated_types.is_none(), - ); + // generate the combined impl + let combined_impl = combine_impls( + local_impl, + foreign_impl, + default_impl_path, + disambiguation_path, + no_aggregated_types.is_none(), + ); - Ok(quote!(#combined_impl)) + Ok(quote!(#combined_impl)) } #[test] fn test_derive_impl_attr_args_parsing() { - parse2::(quote!( - some::path::TestDefaultConfig as some::path::DefaultConfig - )) - .unwrap(); - parse2::(quote!( - frame_system::prelude::testing::TestDefaultConfig as DefaultConfig - )) - .unwrap(); - parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); - parse2::(quote!(Something as DefaultConfig)).unwrap(); - parse2::(quote!(DefaultConfig)).unwrap(); - assert!(parse2::(quote!()).is_err()); - assert!(parse2::(quote!(Config Config)).is_err()); + parse2::(quote!( + some::path::TestDefaultConfig as some::path::DefaultConfig + )) + .unwrap(); + parse2::(quote!( + frame_system::prelude::testing::TestDefaultConfig as DefaultConfig + )) + .unwrap(); + parse2::(quote!(Something as some::path::DefaultConfig)).unwrap(); + parse2::(quote!(Something as DefaultConfig)).unwrap(); + parse2::(quote!(DefaultConfig)).unwrap(); + assert!(parse2::(quote!()).is_err()); + assert!(parse2::(quote!(Config Config)).is_err()); } #[test] fn test_runtime_type_with_doc() { - trait TestTrait { - type Test; - } - #[allow(unused)] - struct TestStruct; - let p = parse2::(quote!( - impl TestTrait for TestStruct { - /// Some doc - #[inject_runtime_type] - type Test = u32; - } - )) - .unwrap(); - for item in p.items { - if let ImplItem::Type(typ) = item { - assert_eq!(is_runtime_type(&typ), true); - } - } + trait TestTrait { + type Test; + } + #[allow(unused)] + struct TestStruct; + let p = parse2::(quote!( + impl TestTrait for TestStruct { + /// Some doc + #[inject_runtime_type] + type Test = u32; + } + )) + .unwrap(); + for item in p.items { + if let ImplItem::Type(typ) = item { + assert_eq!(is_runtime_type(&typ), true); + } + } } #[test] fn test_disambiguation_path() { - let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); - let default_impl_path: Path = parse_quote!(SomeScope::SomeType); + let foreign_impl: ItemImpl = parse_quote!(impl SomeTrait for SomeType {}); + let default_impl_path: Path = parse_quote!(SomeScope::SomeType); - // disambiguation path is specified - let disambiguation_path = compute_disambiguation_path( - Some(parse_quote!(SomeScope::SomePath)), - foreign_impl.clone(), - default_impl_path.clone(), - ); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomePath)); + // disambiguation path is specified + let disambiguation_path = compute_disambiguation_path( + Some(parse_quote!(SomeScope::SomePath)), + foreign_impl.clone(), + default_impl_path.clone(), + ); + assert_eq!( + disambiguation_path.unwrap(), + parse_quote!(SomeScope::SomePath) + ); - // disambiguation path is not specified and the default_impl_path has more than one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeScope::SomeTrait)); + // disambiguation path is not specified and the default_impl_path has more than one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), default_impl_path.clone()); + assert_eq!( + disambiguation_path.unwrap(), + parse_quote!(SomeScope::SomeTrait) + ); - // disambiguation path is not specified and the default_impl_path has only one segment - let disambiguation_path = - compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); - assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); + // disambiguation path is not specified and the default_impl_path has only one segment + let disambiguation_path = + compute_disambiguation_path(None, foreign_impl.clone(), parse_quote!(SomeType)); + assert_eq!(disambiguation_path.unwrap(), parse_quote!(SomeTrait)); } diff --git a/support/procedural-fork/src/dummy_part_checker.rs b/support/procedural-fork/src/dummy_part_checker.rs index 34d9a3e23..6bed541d1 100644 --- a/support/procedural-fork/src/dummy_part_checker.rs +++ b/support/procedural-fork/src/dummy_part_checker.rs @@ -19,61 +19,63 @@ use crate::COUNTER; use proc_macro::TokenStream; pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { - if !input.is_empty() { - return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") - .to_compile_error() - .into() - } + if !input.is_empty() { + return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") + .to_compile_error() + .into(); + } - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let no_op_macro_ident = - syn::Ident::new(&format!("__dummy_part_checker_{}", count), proc_macro2::Span::call_site()); + let no_op_macro_ident = syn::Ident::new( + &format!("__dummy_part_checker_{}", count), + proc_macro2::Span::call_site(), + ); - quote::quote!( - #[macro_export] - #[doc(hidden)] - macro_rules! #no_op_macro_ident { - ( $( $tt:tt )* ) => {}; - } + quote::quote!( + #[macro_export] + #[doc(hidden)] + macro_rules! #no_op_macro_ident { + ( $( $tt:tt )* ) => {}; + } - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #no_op_macro_ident as is_std_enabled_for_genesis; - } + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #no_op_macro_ident as is_std_enabled_for_genesis; + } - #[doc(hidden)] - pub mod __substrate_event_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_event_part_defined; - } + #[doc(hidden)] + pub mod __substrate_event_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_event_part_defined; + } - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_inherent_part_defined; - } + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_inherent_part_defined; + } - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_validate_unsigned_part_defined; - } + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_validate_unsigned_part_defined; + } - #[doc(hidden)] - pub mod __substrate_call_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_call_part_defined; - } + #[doc(hidden)] + pub mod __substrate_call_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_call_part_defined; + } - #[doc(hidden)] - pub mod __substrate_origin_check { - #[doc(hidden)] - pub use #no_op_macro_ident as is_origin_part_defined; - } - ) - .into() + #[doc(hidden)] + pub mod __substrate_origin_check { + #[doc(hidden)] + pub use #no_op_macro_ident as is_origin_part_defined; + } + ) + .into() } diff --git a/support/procedural-fork/src/dynamic_params.rs b/support/procedural-fork/src/dynamic_params.rs index 29399a885..70a18bf34 100644 --- a/support/procedural-fork/src/dynamic_params.rs +++ b/support/procedural-fork/src/dynamic_params.rs @@ -26,123 +26,132 @@ use syn::{parse2, spanned::Spanned, visit_mut, visit_mut::VisitMut, Result, Toke /// Parse and expand a `#[dynamic_params(..)]` module. pub fn dynamic_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicParamModAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_pallet_params(..)]` attribute. pub fn dynamic_pallet_params(attr: TokenStream, item: TokenStream) -> Result { - DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) + DynamicPalletParamAttr::parse(attr, item).map(ToTokens::into_token_stream) } /// Parse and expand `#[dynamic_aggregated_params_internal]` attribute. pub fn dynamic_aggregated_params_internal( - _attr: TokenStream, - item: TokenStream, + _attr: TokenStream, + item: TokenStream, ) -> Result { - parse2::(item).map(ToTokens::into_token_stream) + parse2::(item).map(ToTokens::into_token_stream) } /// A top `#[dynamic_params(..)]` attribute together with a mod. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttr { - params_mod: syn::ItemMod, - meta: DynamicParamModAttrMeta, + params_mod: syn::ItemMod, + meta: DynamicParamModAttrMeta, } /// The inner meta of a `#[dynamic_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicParamModAttrMeta { - name: syn::Ident, - _comma: Option, - #[parse_if(_comma.is_some())] - params_pallet: Option, + name: syn::Ident, + _comma: Option, + #[parse_if(_comma.is_some())] + params_pallet: Option, } impl DynamicParamModAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - let params_mod = parse2(item)?; - let meta = parse2(attr)?; - Ok(Self { params_mod, meta }) - } - - pub fn inner_mods(&self) -> Vec { - self.params_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Mod(m) => Some(m), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + let params_mod = parse2(item)?; + let meta = parse2(attr)?; + Ok(Self { params_mod, meta }) + } + + pub fn inner_mods(&self) -> Vec { + self.params_mod + .content + .as_ref() + .map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Mod(m) => Some(m), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicParamModAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); - let dynam_params_ident = ¶ms_mod.ident; - - let mut quoted_enum = quote! {}; - for m in self.inner_mods() { - let aggregate_name = - syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); - let mod_name = &m.ident; - - let mut attrs = m.attrs.clone(); - attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); - if let Err(err) = ensure_codec_index(&attrs, m.span()) { - tokens.extend(err.into_compile_error()); - return - } - - quoted_enum.extend(quote! { - #(#attrs)* - #aggregate_name(#dynam_params_ident::#mod_name::Parameters), - }); - } - - // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. - if let Some(params_pallet) = &self.meta.params_pallet { - MacroInjectArgs { runtime_params: name.clone(), params_pallet: params_pallet.clone() } - .visit_item_mod_mut(&mut params_mod); - } - - tokens.extend(quote! { - #params_mod - - #[#scrate::dynamic_params::dynamic_aggregated_params_internal] - pub enum #name { - #quoted_enum - } - }); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (mut params_mod, name) = (self.params_mod.clone(), &self.meta.name); + let dynam_params_ident = ¶ms_mod.ident; + + let mut quoted_enum = quote! {}; + for m in self.inner_mods() { + let aggregate_name = + syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); + let mod_name = &m.ident; + + let mut attrs = m.attrs.clone(); + attrs.retain(|attr| !attr.path().is_ident("dynamic_pallet_params")); + if let Err(err) = ensure_codec_index(&attrs, m.span()) { + tokens.extend(err.into_compile_error()); + return; + } + + quoted_enum.extend(quote! { + #(#attrs)* + #aggregate_name(#dynam_params_ident::#mod_name::Parameters), + }); + } + + // Inject the outer args into the inner `#[dynamic_pallet_params(..)]` attribute. + if let Some(params_pallet) = &self.meta.params_pallet { + MacroInjectArgs { + runtime_params: name.clone(), + params_pallet: params_pallet.clone(), + } + .visit_item_mod_mut(&mut params_mod); + } + + tokens.extend(quote! { + #params_mod + + #[#scrate::dynamic_params::dynamic_aggregated_params_internal] + pub enum #name { + #quoted_enum + } + }); + } } /// Ensure there is a `#[codec(index = ..)]` attribute. fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { - let mut found = false; - - for attr in attrs.iter() { - if attr.path().is_ident("codec") { - let meta: syn::ExprAssign = attr.parse_args()?; - if meta.left.to_token_stream().to_string() == "index" { - found = true; - break - } - } - } - - if !found { - Err(syn::Error::new(span, "Missing explicit `#[codec(index = ..)]` attribute")) - } else { - Ok(()) - } + let mut found = false; + + for attr in attrs.iter() { + if attr.path().is_ident("codec") { + let meta: syn::ExprAssign = attr.parse_args()?; + if meta.left.to_token_stream().to_string() == "index" { + found = true; + break; + } + } + } + + if !found { + Err(syn::Error::new( + span, + "Missing explicit `#[codec(index = ..)]` attribute", + )) + } else { + Ok(()) + } } /// Used to inject arguments into the inner `#[dynamic_pallet_params(..)]` attribute. @@ -150,110 +159,125 @@ fn ensure_codec_index(attrs: &Vec, span: Span) -> Result<()> { /// This allows the outer `#[dynamic_params(..)]` attribute to specify some arguments that don't /// need to be repeated every time. struct MacroInjectArgs { - runtime_params: syn::Ident, - params_pallet: syn::Type, + runtime_params: syn::Ident, + params_pallet: syn::Type, } impl VisitMut for MacroInjectArgs { - fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { - // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. - let attr = item.attrs.iter_mut().find(|attr| attr.path().is_ident("dynamic_pallet_params")); - - if let Some(attr) = attr { - match &attr.meta { - syn::Meta::Path(path) => - assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params"), - _ => (), - } - - let runtime_params = &self.runtime_params; - let params_pallet = &self.params_pallet; - - attr.meta = syn::parse2::(quote! { - dynamic_pallet_params(#runtime_params, #params_pallet) - }) - .unwrap() - .into(); - } - - visit_mut::visit_item_mod_mut(self, item); - } + fn visit_item_mod_mut(&mut self, item: &mut syn::ItemMod) { + // Check if the mod has a `#[dynamic_pallet_params(..)]` attribute. + let attr = item + .attrs + .iter_mut() + .find(|attr| attr.path().is_ident("dynamic_pallet_params")); + + if let Some(attr) = attr { + match &attr.meta { + syn::Meta::Path(path) => { + assert_eq!(path.to_token_stream().to_string(), "dynamic_pallet_params") + } + _ => (), + } + + let runtime_params = &self.runtime_params; + let params_pallet = &self.params_pallet; + + attr.meta = syn::parse2::(quote! { + dynamic_pallet_params(#runtime_params, #params_pallet) + }) + .unwrap() + .into(); + } + + visit_mut::visit_item_mod_mut(self, item); + } } /// The helper attribute of a `#[dynamic_pallet_params(runtime_params, params_pallet)]` /// attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttr { - inner_mod: syn::ItemMod, - meta: DynamicPalletParamAttrMeta, + inner_mod: syn::ItemMod, + meta: DynamicPalletParamAttrMeta, } /// The inner meta of a `#[dynamic_pallet_params(..)]` attribute. #[derive(derive_syn_parse::Parse)] pub struct DynamicPalletParamAttrMeta { - runtime_params: syn::Ident, - _comma: Token![,], - parameter_pallet: syn::Type, + runtime_params: syn::Ident, + _comma: Token![,], + parameter_pallet: syn::Type, } impl DynamicPalletParamAttr { - pub fn parse(attr: TokenStream, item: TokenStream) -> Result { - Ok(Self { inner_mod: parse2(item)?, meta: parse2(attr)? }) - } - - pub fn statics(&self) -> Vec { - self.inner_mod.content.as_ref().map_or(Vec::new(), |(_, items)| { - items - .iter() - .filter_map(|i| match i { - syn::Item::Static(s) => Some(s), - _ => None, - }) - .cloned() - .collect() - }) - } + pub fn parse(attr: TokenStream, item: TokenStream) -> Result { + Ok(Self { + inner_mod: parse2(item)?, + meta: parse2(attr)?, + }) + } + + pub fn statics(&self) -> Vec { + self.inner_mod + .content + .as_ref() + .map_or(Vec::new(), |(_, items)| { + items + .iter() + .filter_map(|i| match i { + syn::Item::Static(s) => Some(s), + _ => None, + }) + .cloned() + .collect() + }) + } } impl ToTokens for DynamicPalletParamAttr { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let (params_mod, parameter_pallet, runtime_params) = - (&self.inner_mod, &self.meta.parameter_pallet, &self.meta.runtime_params); - - let aggregate_name = - syn::Ident::new(¶ms_mod.ident.to_string().to_class_case(), params_mod.ident.span()); - let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); - let statics = self.statics(); - - let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - Vec<_>, - ) = Default::default(); - - for s in statics.iter() { - if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { - tokens.extend(err.into_compile_error()); - return - } - - key_names.push(&s.ident); - key_values.push(format_ident!("{}Value", &s.ident)); - defaults.push(&s.expr); - attrs.push(&s.attrs); - value_types.push(&s.ty); - } - - let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); - let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); - let runtime_key_ident = format_ident!("{}Key", runtime_params); - let runtime_value_ident = format_ident!("{}Value", runtime_params); - - tokens.extend(quote! { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let (params_mod, parameter_pallet, runtime_params) = ( + &self.inner_mod, + &self.meta.parameter_pallet, + &self.meta.runtime_params, + ); + + let aggregate_name = syn::Ident::new( + ¶ms_mod.ident.to_string().to_class_case(), + params_mod.ident.span(), + ); + let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); + let statics = self.statics(); + + let (mut key_names, mut key_values, mut defaults, mut attrs, mut value_types): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = Default::default(); + + for s in statics.iter() { + if let Err(err) = ensure_codec_index(&s.attrs, s.span()) { + tokens.extend(err.into_compile_error()); + return; + } + + key_names.push(&s.ident); + key_values.push(format_ident!("{}Value", &s.ident)); + defaults.push(&s.expr); + attrs.push(&s.attrs); + value_types.push(&s.ty); + } + + let key_ident = syn::Ident::new("ParametersKey", params_mod.ident.span()); + let value_ident = syn::Ident::new("ParametersValue", params_mod.ident.span()); + let runtime_key_ident = format_ident!("{}Key", runtime_params); + let runtime_value_ident = format_ident!("{}Value", runtime_params); + + tokens.extend(quote! { pub mod #mod_name { use super::*; @@ -426,44 +450,44 @@ impl ToTokens for DynamicPalletParamAttr { )* } }); - } + } } #[derive(derive_syn_parse::Parse)] pub struct DynamicParamAggregatedEnum { - aggregated_enum: syn::ItemEnum, + aggregated_enum: syn::ItemEnum, } impl ToTokens for DynamicParamAggregatedEnum { - fn to_tokens(&self, tokens: &mut TokenStream) { - let scrate = match crate_access() { - Ok(path) => path, - Err(err) => return tokens.extend(err), - }; - let params_enum = &self.aggregated_enum; - let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); - - let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = - Default::default(); - let mut attributes = Vec::new(); - for (i, variant) in params_enum.variants.iter().enumerate() { - indices.push(i); - param_names.push(&variant.ident); - attributes.push(&variant.attrs); - - param_types.push(match &variant.fields { + fn to_tokens(&self, tokens: &mut TokenStream) { + let scrate = match crate_access() { + Ok(path) => path, + Err(err) => return tokens.extend(err), + }; + let params_enum = &self.aggregated_enum; + let (name, vis) = (¶ms_enum.ident, ¶ms_enum.vis); + + let (mut indices, mut param_names, mut param_types): (Vec<_>, Vec<_>, Vec<_>) = + Default::default(); + let mut attributes = Vec::new(); + for (i, variant) in params_enum.variants.iter().enumerate() { + indices.push(i); + param_names.push(&variant.ident); + attributes.push(&variant.attrs); + + param_types.push(match &variant.fields { syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => &fields.unnamed[0].ty, _ => { *tokens = quote! { compile_error!("Only unnamed enum variants with one inner item are supported") }; return }, }); - } + } - let params_key_ident = format_ident!("{}Key", params_enum.ident); - let params_value_ident = format_ident!("{}Value", params_enum.ident); + let params_key_ident = format_ident!("{}Key", params_enum.ident); + let params_value_ident = format_ident!("{}Value", params_enum.ident); - tokens.extend(quote! { + tokens.extend(quote! { #[doc(hidden)] #[derive( Clone, @@ -554,10 +578,10 @@ impl ToTokens for DynamicParamAggregatedEnum { } )* }); - } + } } /// Get access to the current crate and convert the error to a compile error. fn crate_access() -> core::result::Result { - generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) + generate_access_from_frame_or_crate("frame-support").map_err(|e| e.to_compile_error()) } diff --git a/support/procedural-fork/src/key_prefix.rs b/support/procedural-fork/src/key_prefix.rs index 7f1ab6866..aea60ce3b 100644 --- a/support/procedural-fork/src/key_prefix.rs +++ b/support/procedural-fork/src/key_prefix.rs @@ -23,82 +23,84 @@ use syn::{Ident, Result}; const MAX_IDENTS: usize = 18; pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result { - if !input.is_empty() { - return Err(syn::Error::new(Span::call_site(), "No arguments expected")) - } - - let mut all_trait_impls = TokenStream::new(); - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - for i in 2..=MAX_IDENTS { - let current_tuple = (0..i) - .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) - .collect::>(); - - for prefix_count in 1..i { - let (prefixes, suffixes) = current_tuple.split_at(prefix_count); - - let hashers = current_tuple - .iter() - .map(|ident| format_ident!("Hasher{}", ident)) - .collect::>(); - let kargs = - prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); - let partial_keygen = generate_keygen(prefixes); - let suffix_keygen = generate_keygen(suffixes); - let suffix_tuple = generate_tuple(suffixes); - - let trait_impls = quote! { - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: StorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - type Suffix = #suffix_tuple; - - fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { - <#partial_keygen>::final_key(prefix) - } - } - - impl< - #(#current_tuple: FullCodec + StaticTypeInfo,)* - #(#hashers: ReversibleStorageHasher,)* - #(#kargs: EncodeLike<#prefixes>),* - > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { - fn decode_partial_key(key_material: &[u8]) -> Result< - Self::Suffix, - #frame_support::__private::codec::Error, - > { - <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) - } - } - }; - - all_trait_impls.extend(trait_impls); - } - } - - Ok(all_trait_impls) + if !input.is_empty() { + return Err(syn::Error::new(Span::call_site(), "No arguments expected")); + } + + let mut all_trait_impls = TokenStream::new(); + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + for i in 2..=MAX_IDENTS { + let current_tuple = (0..i) + .map(|n| Ident::new(&format!("Tuple{}", n), Span::call_site())) + .collect::>(); + + for prefix_count in 1..i { + let (prefixes, suffixes) = current_tuple.split_at(prefix_count); + + let hashers = current_tuple + .iter() + .map(|ident| format_ident!("Hasher{}", ident)) + .collect::>(); + let kargs = prefixes + .iter() + .map(|ident| format_ident!("KArg{}", ident)) + .collect::>(); + let partial_keygen = generate_keygen(prefixes); + let suffix_keygen = generate_keygen(suffixes); + let suffix_tuple = generate_tuple(suffixes); + + let trait_impls = quote! { + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: StorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + type Suffix = #suffix_tuple; + + fn partial_key(prefix: ( #( #kargs, )* )) -> Vec { + <#partial_keygen>::final_key(prefix) + } + } + + impl< + #(#current_tuple: FullCodec + StaticTypeInfo,)* + #(#hashers: ReversibleStorageHasher,)* + #(#kargs: EncodeLike<#prefixes>),* + > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { + fn decode_partial_key(key_material: &[u8]) -> Result< + Self::Suffix, + #frame_support::__private::codec::Error, + > { + <#suffix_keygen>::decode_final_key(key_material).map(|k| k.0) + } + } + }; + + all_trait_impls.extend(trait_impls); + } + } + + Ok(all_trait_impls) } fn generate_tuple(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - idents[0].to_token_stream() - } else { - quote!((#(#idents),*)) - } + if idents.len() == 1 { + idents[0].to_token_stream() + } else { + quote!((#(#idents),*)) + } } fn generate_keygen(idents: &[Ident]) -> TokenStream { - if idents.len() == 1 { - let key = &idents[0]; - let hasher = format_ident!("Hasher{}", key); + if idents.len() == 1 { + let key = &idents[0]; + let hasher = format_ident!("Hasher{}", key); - quote!(Key<#hasher, #key>) - } else { - let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); + quote!(Key<#hasher, #key>) + } else { + let hashers = idents.iter().map(|ident| format_ident!("Hasher{}", ident)); - quote!((#(Key<#hashers, #idents>),*)) - } + quote!((#(Key<#hashers, #idents>),*)) + } } diff --git a/support/procedural-fork/src/match_and_insert.rs b/support/procedural-fork/src/match_and_insert.rs index aa9cc56d7..a80b6e95f 100644 --- a/support/procedural-fork/src/match_and_insert.rs +++ b/support/procedural-fork/src/match_and_insert.rs @@ -22,138 +22,152 @@ use std::iter::once; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(target); - syn::custom_keyword!(pattern); - syn::custom_keyword!(tokens); + syn::custom_keyword!(target); + syn::custom_keyword!(pattern); + syn::custom_keyword!(tokens); } pub fn match_and_insert(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let MatchAndInsertDef { pattern, tokens, target } = - syn::parse_macro_input!(input as MatchAndInsertDef); - - match expand_in_stream(&pattern, &mut Some(tokens), target) { - Ok(stream) => stream.into(), - Err(err) => err.to_compile_error().into(), - } + let MatchAndInsertDef { + pattern, + tokens, + target, + } = syn::parse_macro_input!(input as MatchAndInsertDef); + + match expand_in_stream(&pattern, &mut Some(tokens), target) { + Ok(stream) => stream.into(), + Err(err) => err.to_compile_error().into(), + } } struct MatchAndInsertDef { - // Token stream to search and insert tokens into. - target: TokenStream, - // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal - // (i.e. contains only Punct or Ident), and not being empty. - pattern: Vec, - // Token stream to insert after the match pattern. - tokens: TokenStream, + // Token stream to search and insert tokens into. + target: TokenStream, + // Pattern to match against, this is ensured to have no TokenTree::Group nor TokenTree::Literal + // (i.e. contains only Punct or Ident), and not being empty. + pattern: Vec, + // Token stream to insert after the match pattern. + tokens: TokenStream, } impl syn::parse::Parse for MatchAndInsertDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut target; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); - let target = target.parse()?; - - let mut pattern; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); - let pattern = pattern.parse::()?.into_iter().collect::>(); - - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { - return Err(syn::Error::new(t.span(), "Unexpected group token tree")) - } - if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { - return Err(syn::Error::new(t.span(), "Unexpected literal token tree")) - } - - if pattern.is_empty() { - return Err(syn::Error::new(Span::call_site(), "empty match pattern is invalid")) - } - - let mut tokens; - let _ = input.parse::()?; - let _ = input.parse::()?; - let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); - let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); - let tokens = tokens.parse()?; - - Ok(Self { tokens, pattern, target }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut target; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(target in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(target in target); + let target = target.parse()?; + + let mut pattern; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(pattern in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(pattern in pattern); + let pattern = pattern + .parse::()? + .into_iter() + .collect::>(); + + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Group(_))) { + return Err(syn::Error::new(t.span(), "Unexpected group token tree")); + } + if let Some(t) = pattern.iter().find(|t| matches!(t, TokenTree::Literal(_))) { + return Err(syn::Error::new(t.span(), "Unexpected literal token tree")); + } + + if pattern.is_empty() { + return Err(syn::Error::new( + Span::call_site(), + "empty match pattern is invalid", + )); + } + + let mut tokens; + let _ = input.parse::()?; + let _ = input.parse::()?; + let _replace_with_bracket: syn::token::Bracket = syn::bracketed!(tokens in input); + let _replace_with_brace: syn::token::Brace = syn::braced!(tokens in tokens); + let tokens = tokens.parse()?; + + Ok(Self { + tokens, + pattern, + target, + }) + } } // Insert `tokens` after the first matching `pattern`. // `tokens` must be some (Option is used for internal simplification). // `pattern` must not be empty and should only contain Ident or Punct. fn expand_in_stream( - pattern: &[TokenTree], - tokens: &mut Option, - stream: TokenStream, + pattern: &[TokenTree], + tokens: &mut Option, + stream: TokenStream, ) -> syn::Result { - assert!( - tokens.is_some(), - "`tokens` must be some, Option is used because `tokens` is used only once" - ); - assert!( - !pattern.is_empty(), - "`pattern` must not be empty, otherwise there is nothing to match against" - ); - - let stream_span = stream.span(); - let mut stream = stream.into_iter(); - let mut extended = TokenStream::new(); - let mut match_cursor = 0; - - while let Some(token) = stream.next() { - match token { - TokenTree::Group(group) => { - match_cursor = 0; - let group_stream = group.stream(); - match expand_in_stream(pattern, tokens, group_stream) { - Ok(s) => { - extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); - extended.extend(stream); - return Ok(extended) - }, - Err(_) => { - extended.extend(once(TokenTree::Group(group))); - }, - } - }, - other => { - advance_match_cursor(&other, pattern, &mut match_cursor); - - extended.extend(once(other)); - - if match_cursor == pattern.len() { - extended - .extend(once(tokens.take().expect("tokens is used to replace only once"))); - extended.extend(stream); - return Ok(extended) - } - }, - } - } - // if we reach this point, it means the stream is empty and we haven't found a matching pattern - let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); - Err(syn::Error::new(stream_span, msg)) + assert!( + tokens.is_some(), + "`tokens` must be some, Option is used because `tokens` is used only once" + ); + assert!( + !pattern.is_empty(), + "`pattern` must not be empty, otherwise there is nothing to match against" + ); + + let stream_span = stream.span(); + let mut stream = stream.into_iter(); + let mut extended = TokenStream::new(); + let mut match_cursor = 0; + + while let Some(token) = stream.next() { + match token { + TokenTree::Group(group) => { + match_cursor = 0; + let group_stream = group.stream(); + match expand_in_stream(pattern, tokens, group_stream) { + Ok(s) => { + extended.extend(once(TokenTree::Group(Group::new(group.delimiter(), s)))); + extended.extend(stream); + return Ok(extended); + } + Err(_) => { + extended.extend(once(TokenTree::Group(group))); + } + } + } + other => { + advance_match_cursor(&other, pattern, &mut match_cursor); + + extended.extend(once(other)); + + if match_cursor == pattern.len() { + extended.extend(once( + tokens.take().expect("tokens is used to replace only once"), + )); + extended.extend(stream); + return Ok(extended); + } + } + } + } + // if we reach this point, it means the stream is empty and we haven't found a matching pattern + let msg = format!("Cannot find pattern `{:?}` in given token stream", pattern); + Err(syn::Error::new(stream_span, msg)) } fn advance_match_cursor(other: &TokenTree, pattern: &[TokenTree], match_cursor: &mut usize) { - use TokenTree::{Ident, Punct}; - - let does_match_other_pattern = match (other, &pattern[*match_cursor]) { - (Ident(i1), Ident(i2)) => i1 == i2, - (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), - _ => false, - }; - - if does_match_other_pattern { - *match_cursor += 1; - } else { - *match_cursor = 0; - } + use TokenTree::{Ident, Punct}; + + let does_match_other_pattern = match (other, &pattern[*match_cursor]) { + (Ident(i1), Ident(i2)) => i1 == i2, + (Punct(p1), Punct(p2)) => p1.as_char() == p2.as_char(), + _ => false, + }; + + if does_match_other_pattern { + *match_cursor += 1; + } else { + *match_cursor = 0; + } } diff --git a/support/procedural-fork/src/no_bound/clone.rs b/support/procedural-fork/src/no_bound/clone.rs index 346bf450f..17039bdc8 100644 --- a/support/procedural-fork/src/no_bound/clone.rs +++ b/support/procedural-fork/src/no_bound/clone.rs @@ -19,89 +19,93 @@ use syn::spanned::Spanned; /// Derive Clone but do not bound any generic. pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named.named.iter().map(|i| &i.ident).map(|i| { - quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(&self.#i) - ) - }); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self { #( #fields, )* } ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = - unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(&self.#i) - ) - }); + quote::quote!( Self { #( #fields, )* } ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(&self.#i) + ) + }); - quote::quote!( Self ( #( #fields, )* ) ) - }, - syn::Fields::Unit => { - quote::quote!(Self) - }, - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let cloned = captured.clone().map(|i| { - ::quote::quote_spanned!(i.span() => - #i: ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} - ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let cloned = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - ::core::clone::Clone::clone(#i) - ) - }); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) - ) - }, - syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), - } - }); + quote::quote!( Self ( #( #fields, )* ) ) + } + syn::Fields::Unit => { + quote::quote!(Self) + } + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let cloned = captured.clone().map(|i| { + ::quote::quote_spanned!(i.span() => + #i: ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + ) + } + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + ::core::clone::Clone::clone(#i) + ) + }); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + ) + } + syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), + } + }); - quote::quote!(match self { - #( #variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(CloneNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!(match self { + #( #variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(CloneNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { - fn clone(&self) -> Self { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::clone::Clone for #name #ty_generics #where_clause { + fn clone(&self) -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/debug.rs b/support/procedural-fork/src/no_bound/debug.rs index a1b3f4f0d..8034bb5ec 100644 --- a/support/procedural-fork/src/no_bound/debug.rs +++ b/support/procedural-fork/src/no_bound/debug.rs @@ -19,103 +19,103 @@ use syn::spanned::Spanned; /// Derive Debug but do not bound any generics. pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let input_ident = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let input_ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = - named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), - ); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), + ); - quote::quote!( - fmt.debug_struct(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); + quote::quote!( + fmt.debug_struct(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); - quote::quote!( - fmt.debug_tuple(stringify!(#input_ident)) - #( #fields )* - .finish() - ) - }, - syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), - }, - syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - let full_variant_str = format!("{}::{}", input_ident, ident); - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let debugged = captured.clone().map(|i| { - quote::quote_spanned!(i.span() => - .field(stringify!(#i), &#i) - ) - }); - quote::quote!( - Self::#ident { #( ref #captured, )* } => { - fmt.debug_struct(#full_variant_str) - #( #debugged )* - .finish() - } - ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let debugged = captured - .clone() - .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => { - fmt.debug_tuple(#full_variant_str) - #( #debugged )* - .finish() - } - ) - }, - syn::Fields::Unit => quote::quote!( - Self::#ident => fmt.write_str(#full_variant_str) - ), - } - }); + quote::quote!( + fmt.debug_tuple(stringify!(#input_ident)) + #( #fields )* + .finish() + ) + } + syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), + }, + syn::Data::Enum(enum_) => { + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + let full_variant_str = format!("{}::{}", input_ident, ident); + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let debugged = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + .field(stringify!(#i), &#i) + ) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => { + fmt.debug_struct(#full_variant_str) + #( #debugged )* + .finish() + } + ) + } + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let debugged = captured + .clone() + .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => { + fmt.debug_tuple(#full_variant_str) + #( #debugged )* + .finish() + } + ) + } + syn::Fields::Unit => quote::quote!( + Self::#ident => fmt.write_str(#full_variant_str) + ), + } + }); - quote::quote!(match *self { - #( #variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(DebugNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!(match *self { + #( #variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(DebugNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::fmt::Debug for #input_ident #ty_generics #where_clause { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index 0524247d2..1c0d90531 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -21,51 +21,57 @@ use syn::{spanned::Spanned, Data, DeriveInput, Fields}; /// Derive Default but do not bound any generic. pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as DeriveInput); - - let name = &input.ident; - - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - - let impl_ = match input.data { - Data::Struct(struct_) => match struct_.fields { - Fields::Named(named) => { - let fields = named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span() => - #ident: ::core::default::Default::default() - } - }); - - quote!(Self { #( #fields, )* }) - }, - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(Self( #( #fields, )* )) - }, - Fields::Unit => { - quote!(Self) - }, - }, - Data::Enum(enum_) => { - if enum_.variants.is_empty() { - return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") - .to_compile_error() - .into() - } - - // all #[default] attrs with the variant they're on; i.e. a var - let default_variants = enum_ - .variants - .into_iter() - .filter(|variant| variant.attrs.iter().any(|attr| attr.path().is_ident("default"))) - .collect::>(); - - match &*default_variants { + let input = syn::parse_macro_input!(input as DeriveInput); + + let name = &input.ident; + + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let impl_ = + match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + } + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + } + Fields::Unit => { + quote!(Self) + } + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into(); + } + + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| { + variant + .attrs + .iter() + .any(|attr| attr.path().is_ident("default")) + }) + .collect::>(); + + match &*default_variants { [] => return syn::Error::new( name.clone().span(), "no default declared, make a variant default by placing `#[default]` above it", @@ -137,25 +143,26 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To return err.into_compile_error().into() }, } - }, - Data::Union(union_) => - return syn::Error::new_spanned( - union_.union_token, - "Union type not supported by `derive(DefaultNoBound)`", - ) - .to_compile_error() - .into(), - }; - - quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { - fn default() -> Self { - #impl_ - } - } - }; - ) - .into() + } + Data::Union(union_) => { + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into() + } + }; + + quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::default::Default for #name #ty_generics #where_clause { + fn default() -> Self { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/ord.rs b/support/procedural-fork/src/no_bound/ord.rs index b24d27c04..20f30eb9d 100644 --- a/support/procedural-fork/src/no_bound/ord.rs +++ b/support/procedural-fork/src/no_bound/ord.rs @@ -19,57 +19,57 @@ use syn::spanned::Spanned; /// Derive Ord but do not bound any generic. pub fn derive_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i.cmp(&other.#i) )); - quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) - }, - syn::Fields::Unit => { - quote::quote!(core::cmp::Ordering::Equal) - }, - }, - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(OrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( core::cmp::Ordering::Equal #( .then_with(|| #fields) )* ) + } + syn::Fields::Unit => { + quote::quote!(core::cmp::Ordering::Equal) + } + }, + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(OrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::Ord for #name #ty_generics #where_clause { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_eq.rs b/support/procedural-fork/src/no_bound/partial_eq.rs index a1be71a96..8833f6e5f 100644 --- a/support/procedural-fork/src/no_bound/partial_eq.rs +++ b/support/procedural-fork/src/no_bound/partial_eq.rs @@ -19,119 +19,119 @@ use syn::spanned::Spanned; /// Derive PartialEq but do not bound any generic. pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = syn::parse_macro_input!(input as syn::DeriveInput); + let input = syn::parse_macro_input!(input as syn::DeriveInput); - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => match struct_.fields { - syn::Fields::Named(named) => { - let fields = named - .named - .iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + let impl_ = match input.data { + syn::Data::Struct(struct_) => match struct_.fields { + syn::Fields::Named(named) => { + let fields = named + .named + .iter() + .map(|i| &i.ident) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); + quote::quote!( true #( && #fields )* ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, _)| syn::Index::from(i)) + .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); - quote::quote!( true #( && #fields )* ) - }, - syn::Fields::Unit => { - quote::quote!(true) - }, - }, - syn::Data::Enum(enum_) => { - let variants = - enum_.variants.iter().map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let names = named.named.iter().map(|i| &i.ident); - let other_names = names.clone().enumerate().map(|(n, ident)| { - syn::Ident::new(&format!("_{}", n), ident.span()) - }); + quote::quote!( true #( && #fields )* ) + } + syn::Fields::Unit => { + quote::quote!(true) + } + }, + syn::Data::Enum(enum_) => { + let variants = + enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let names = named.named.iter().map(|i| &i.ident); + let other_names = names.clone().enumerate().map(|(n, ident)| { + syn::Ident::new(&format!("_{}", n), ident.span()) + }); - let capture = names.clone(); - let other_capture = names - .clone() - .zip(other_names.clone()) - .map(|(i, other_i)| quote::quote!(#i: #other_i)); - let eq = names.zip(other_names).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident { #( #capture, )* }, - Self::#ident { #( #other_capture, )* }, - ) => true #( && #eq )* - ) - }, - syn::Fields::Unnamed(unnamed) => { - let names = unnamed - .unnamed - .iter() - .enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let other_names = - unnamed.unnamed.iter().enumerate().map(|(i, f)| { - syn::Ident::new(&format!("_{}_other", i), f.span()) - }); - let eq = names.clone().zip(other_names.clone()).map( - |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), - ); - quote::quote!( - ( - Self::#ident ( #( #names, )* ), - Self::#ident ( #( #other_names, )* ), - ) => true #( && #eq )* - ) - }, - syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), - } - }); + let capture = names.clone(); + let other_capture = names + .clone() + .zip(other_names.clone()) + .map(|(i, other_i)| quote::quote!(#i: #other_i)); + let eq = names.zip(other_names).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident { #( #capture, )* }, + Self::#ident { #( #other_capture, )* }, + ) => true #( && #eq )* + ) + } + syn::Fields::Unnamed(unnamed) => { + let names = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let other_names = + unnamed.unnamed.iter().enumerate().map(|(i, f)| { + syn::Ident::new(&format!("_{}_other", i), f.span()) + }); + let eq = names.clone().zip(other_names.clone()).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); + quote::quote!( + ( + Self::#ident ( #( #names, )* ), + Self::#ident ( #( #other_names, )* ), + ) => true #( && #eq )* + ) + } + syn::Fields::Unit => quote::quote!( (Self::#ident, Self::#ident) => true ), + } + }); - let mut different_variants = vec![]; - for (i, i_variant) in enum_.variants.iter().enumerate() { - for (j, j_variant) in enum_.variants.iter().enumerate() { - if i != j { - let i_ident = &i_variant.ident; - let j_ident = &j_variant.ident; - different_variants.push(quote::quote!( - (Self::#i_ident { .. }, Self::#j_ident { .. }) => false - )) - } - } - } + let mut different_variants = vec![]; + for (i, i_variant) in enum_.variants.iter().enumerate() { + for (j, j_variant) in enum_.variants.iter().enumerate() { + if i != j { + let i_ident = &i_variant.ident; + let j_ident = &j_variant.ident; + different_variants.push(quote::quote!( + (Self::#i_ident { .. }, Self::#j_ident { .. }) => false + )) + } + } + } - quote::quote!( match (self, other) { - #( #variants, )* - #( #different_variants, )* - }) - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialEqNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( match (self, other) { + #( #variants, )* + #( #different_variants, )* + }) + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialEqNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - #[automatically_derived] - impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { - fn eq(&self, other: &Self) -> bool { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + #[automatically_derived] + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause { + fn eq(&self, other: &Self) -> bool { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/no_bound/partial_ord.rs b/support/procedural-fork/src/no_bound/partial_ord.rs index 86aa42be9..c73199d4e 100644 --- a/support/procedural-fork/src/no_bound/partial_ord.rs +++ b/support/procedural-fork/src/no_bound/partial_ord.rs @@ -19,71 +19,72 @@ use syn::spanned::Spanned; /// Derive PartialOrd but do not bound any generic. pub fn derive_partial_ord_no_bound(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: syn::DeriveInput = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; + let input: syn::DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = match input.data { - syn::Data::Struct(struct_) => - match struct_.fields { - syn::Fields::Named(named) => { - let fields = - named.named.iter().map(|i| &i.ident).map( - |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), - ); + let impl_ = + match input.data { + syn::Data::Struct(struct_) => { + match struct_.fields { + syn::Fields::Named(named) => { + let fields = named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), + ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - }, - syn::Fields::Unnamed(unnamed) => { - let fields = + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + } + syn::Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map( |i| quote::quote_spanned!(i.span() => self.#i.partial_cmp(&other.#i)), ); - quote::quote!( - Some(core::cmp::Ordering::Equal) - #( - .and_then(|order| { - let next_order = #fields?; - Some(order.then(next_order)) - }) - )* - ) - }, - syn::Fields::Unit => { - quote::quote!(Some(core::cmp::Ordering::Equal)) - }, - }, - syn::Data::Enum(_) => { - let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - syn::Data::Union(_) => { - let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; - return syn::Error::new(input.span(), msg).to_compile_error().into() - }, - }; + quote::quote!( + Some(core::cmp::Ordering::Equal) + #( + .and_then(|order| { + let next_order = #fields?; + Some(order.then(next_order)) + }) + )* + ) + } + syn::Fields::Unit => { + quote::quote!(Some(core::cmp::Ordering::Equal)) + } + } + } + syn::Data::Enum(_) => { + let msg = "Enum type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + syn::Data::Union(_) => { + let msg = "Union type not supported by `derive(PartialOrdNoBound)`"; + return syn::Error::new(input.span(), msg).to_compile_error().into(); + } + }; - quote::quote!( - const _: () = { - impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { - fn partial_cmp(&self, other: &Self) -> Option { - #impl_ - } - } - }; - ) - .into() + quote::quote!( + const _: () = { + impl #impl_generics core::cmp::PartialOrd for #name #ty_generics #where_clause { + fn partial_cmp(&self, other: &Self) -> Option { + #impl_ + } + } + }; + ) + .into() } diff --git a/support/procedural-fork/src/pallet/expand/call.rs b/support/procedural-fork/src/pallet/expand/call.rs index f395872c8..a39e81fd1 100644 --- a/support/procedural-fork/src/pallet/expand/call.rs +++ b/support/procedural-fork/src/pallet/expand/call.rs @@ -16,12 +16,12 @@ // limitations under the License. use crate::{ - pallet::{ - expand::warnings::{weight_constant_warning, weight_witness_warning}, - parse::call::CallWeightDef, - Def, - }, - COUNTER, + pallet::{ + expand::warnings::{weight_constant_warning, weight_witness_warning}, + parse::call::CallWeightDef, + Def, + }, + COUNTER, }; use proc_macro2::TokenStream as TokenStream2; use proc_macro_warning::Warning; @@ -32,45 +32,56 @@ use syn::spanned::Spanned; /// * Generate enum call and implement various trait on it. /// * Implement Callable and call_function on `Pallet` pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { - let (span, where_clause, methods, docs) = match def.call.as_ref() { - Some(call) => { - let span = call.attr_span; - let where_clause = call.where_clause.clone(); - let methods = call.methods.clone(); - let docs = call.docs.clone(); - - (span, where_clause, methods, docs) - }, - None => (def.item.span(), def.config.where_clause.clone(), Vec::new(), Vec::new()), - }; - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(span); - let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); - let type_use_gen = &def.type_use_generics(span); - let call_ident = syn::Ident::new("Call", span); - let pallet_ident = &def.pallet_struct.pallet; - - let fn_name = methods.iter().map(|method| &method.name).collect::>(); - let call_index = methods.iter().map(|method| method.call_index).collect::>(); - let new_call_variant_fn_name = fn_name - .iter() - .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) - .collect::>(); - - let new_call_variant_doc = fn_name - .iter() - .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) - .collect::>(); - - let mut call_index_warnings = Vec::new(); - // Emit a warning for each call that is missing `call_index` when not in dev-mode. - for method in &methods { - if method.explicit_call_index || def.dev_mode { - continue - } - - let warning = Warning::new_deprecated("ImplicitCallIndex") + let (span, where_clause, methods, docs) = match def.call.as_ref() { + Some(call) => { + let span = call.attr_span; + let where_clause = call.where_clause.clone(); + let methods = call.methods.clone(); + let docs = call.docs.clone(); + + (span, where_clause, methods, docs) + } + None => ( + def.item.span(), + def.config.where_clause.clone(), + Vec::new(), + Vec::new(), + ), + }; + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(span); + let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); + let type_use_gen = &def.type_use_generics(span); + let call_ident = syn::Ident::new("Call", span); + let pallet_ident = &def.pallet_struct.pallet; + + let fn_name = methods + .iter() + .map(|method| &method.name) + .collect::>(); + let call_index = methods + .iter() + .map(|method| method.call_index) + .collect::>(); + let new_call_variant_fn_name = fn_name + .iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name + .iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); + + let mut call_index_warnings = Vec::new(); + // Emit a warning for each call that is missing `call_index` when not in dev-mode. + for method in &methods { + if method.explicit_call_index || def.dev_mode { + continue; + } + + let warning = Warning::new_deprecated("ImplicitCallIndex") .index(call_index_warnings.len()) .old("use implicit call indices") .new("ensure that all calls have a `pallet::call_index` attribute or put the pallet into `dev` mode") @@ -80,373 +91,408 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ]) .span(method.name.span()) .build_or_panic(); - call_index_warnings.push(warning); - } - - let mut fn_weight = Vec::::new(); - let mut weight_warnings = Vec::new(); - for method in &methods { - match &method.weight { - CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), - CallWeightDef::Immediate(e) => { - weight_constant_warning(e, def.dev_mode, &mut weight_warnings); - weight_witness_warning(method, def.dev_mode, &mut weight_warnings); - - fn_weight.push(e.into_token_stream()); - }, - CallWeightDef::Inherited => { - let pallet_weight = def - .call - .as_ref() - .expect("we have methods; we have calls; qed") - .inherited_call_weight - .as_ref() - .expect("the parser prevents this"); - - // Expand `<::WeightInfo>::call_name()`. - let t = &pallet_weight.typename; - let n = &method.name; - fn_weight.push(quote!({ < #t > :: #n () })); - }, - } - } - debug_assert_eq!(fn_weight.len(), methods.len()); - - let fn_doc = methods.iter().map(|method| &method.docs).collect::>(); - - let args_name = methods - .iter() - .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) - .collect::>(); - - let args_name_stripped = methods - .iter() - .map(|method| { - method - .args - .iter() - .map(|(_, name, _)| { - syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) - }) - .collect::>() - }) - .collect::>(); - - let make_args_name_pattern = |ref_tok| { - args_name - .iter() - .zip(args_name_stripped.iter()) - .map(|(args_name, args_name_stripped)| { - args_name - .iter() - .zip(args_name_stripped) - .map(|(args_name, args_name_stripped)| { - if args_name == args_name_stripped { - quote::quote!( #ref_tok #args_name ) - } else { - quote::quote!( #args_name_stripped: #ref_tok #args_name ) - } - }) - .collect::>() - }) - .collect::>() - }; - - let args_name_pattern = make_args_name_pattern(None); - let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); - - let args_type = methods - .iter() - .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) - .collect::>(); - - let args_compact_attr = methods.iter().map(|method| { - method - .args - .iter() - .map(|(is_compact, _, type_)| { - if *is_compact { - quote::quote_spanned!(type_.span() => #[codec(compact)] ) - } else { - quote::quote!() - } - }) - .collect::>() - }); - - let default_docs = - [syn::parse_quote!(r"Contains a variant per dispatchable extrinsic that this pallet has.")]; - let docs = if docs.is_empty() { &default_docs[..] } else { &docs[..] }; - - let maybe_compile_error = if def.call.is_none() { - quote::quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ - construct_runtime?", - )); - } - } else { - proc_macro2::TokenStream::new() - }; - - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // Wrap all calls inside of storage layers - if let Some(syn::Item::Impl(item_impl)) = def - .call - .as_ref() - .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) - { - item_impl.items.iter_mut().for_each(|i| { - if let syn::ImplItem::Fn(method) = i { - let block = &method.block; - method.block = syn::parse_quote! {{ - // We execute all dispatchable in a new storage layer, allowing them - // to return an error at any point, and undoing any storage changes. - #frame_support::storage::with_storage_layer(|| #block) - }}; - } - }); - } - - // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings - let maybe_allow_attrs = methods - .iter() - .map(|method| { - method - .attrs - .iter() - .find(|attr| attr.path().is_ident("allow")) - .map_or(proc_macro2::TokenStream::new(), |attr| attr.to_token_stream()) - }) - .collect::>(); - - let cfg_attrs = methods - .iter() - .map(|method| { - let attrs = - method.cfg_attrs.iter().map(|attr| attr.to_token_stream()).collect::>(); - quote::quote!( #( #attrs )* ) - }) - .collect::>(); - - let feeless_check = methods.iter().map(|method| &method.feeless_check).collect::>(); - let feeless_check_result = - feeless_check.iter().zip(args_name.iter()).map(|(feeless_check, arg_name)| { - if let Some(feeless_check) = feeless_check { - quote::quote!(#feeless_check(origin, #( #arg_name, )*)) - } else { - quote::quote!(false) - } - }); - - quote::quote_spanned!(span => - #[doc(hidden)] - mod warnings { - #( - #call_index_warnings - )* - #( - #weight_warnings - )* - } - - #[allow(unused_imports)] - #[doc(hidden)] - pub mod __substrate_call_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - }; - } - - #[doc(hidden)] - pub use #macro_ident as is_call_part_defined; - } - - #( #[doc = #docs] )* - #[derive( - #frame_support::RuntimeDebugNoBound, - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - #[allow(non_camel_case_types)] - pub enum #call_ident<#type_decl_bounded_gen> #where_clause { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#type_use_gen,)>, - #frame_support::Never, - ), - #( - #cfg_attrs - #( #[doc = #fn_doc] )* - #[codec(index = #call_index)] - #fn_name { - #( - #[allow(missing_docs)] - #args_compact_attr #args_name_stripped: #args_type - ),* - }, - )* - } - - impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { - #( - #cfg_attrs - #[doc = #new_call_variant_doc] - pub fn #new_call_variant_fn_name( - #( #args_name_stripped: #args_type ),* - ) -> Self { - Self::#fn_name { - #( #args_name_stripped ),* - } - } - )* - } - - impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo - for #call_ident<#type_use_gen> - #where_clause - { - fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - let __pallet_base_weight = #fn_weight; - - let __pallet_weight = < - dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> - >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_class = < - dyn #frame_support::dispatch::ClassifyDispatch< - ( #( & #args_type, )* ) - > - >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); - - let __pallet_pays_fee = < - dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> - >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); - - #frame_support::dispatch::DispatchInfo { - weight: __pallet_weight, - class: __pallet_class, - pays_fee: __pallet_pays_fee, - } - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> - #where_clause - { - type Origin = #frame_system::pallet_prelude::OriginFor; - #[allow(unused_variables)] - fn is_feeless(&self, origin: &Self::Origin) -> bool { - match *self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern_ref, )* } => { - #feeless_check_result - }, - )* - Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), - } - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_name(&self) -> &'static str { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_names() -> &'static [&'static str] { - &[ #( #cfg_attrs stringify!(#fn_name), )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_index(&self) -> u8 { - match *self { - #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_indices() -> &'static [u8] { - &[ #( #cfg_attrs #call_index, )* ] - } - } - - impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable - for #call_ident<#type_use_gen> - #where_clause - { - type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; - fn dispatch_bypass_filter( - self, - origin: Self::RuntimeOrigin - ) -> #frame_support::dispatch::DispatchResultWithPostInfo { - #frame_support::dispatch_context::run_in_context(|| { - match self { - #( - #cfg_attrs - Self::#fn_name { #( #args_name_pattern, )* } => { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) - ); - #maybe_allow_attrs - <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) - .map(Into::into).map_err(Into::into) - }, - )* - Self::__Ignore(_, _) => { - let _ = origin; // Use origin for empty Call enum - unreachable!("__PhantomItem cannot be used."); - }, - } - }) - } - } - - impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> - #where_clause - { - type RuntimeCall = #call_ident<#type_use_gen>; - } - - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { - #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() - } - } - ) + call_index_warnings.push(warning); + } + + let mut fn_weight = Vec::::new(); + let mut weight_warnings = Vec::new(); + for method in &methods { + match &method.weight { + CallWeightDef::DevModeDefault => fn_weight.push(syn::parse_quote!(0)), + CallWeightDef::Immediate(e) => { + weight_constant_warning(e, def.dev_mode, &mut weight_warnings); + weight_witness_warning(method, def.dev_mode, &mut weight_warnings); + + fn_weight.push(e.into_token_stream()); + } + CallWeightDef::Inherited => { + let pallet_weight = def + .call + .as_ref() + .expect("we have methods; we have calls; qed") + .inherited_call_weight + .as_ref() + .expect("the parser prevents this"); + + // Expand `<::WeightInfo>::call_name()`. + let t = &pallet_weight.typename; + let n = &method.name; + fn_weight.push(quote!({ < #t > :: #n () })); + } + } + } + debug_assert_eq!(fn_weight.len(), methods.len()); + + let fn_doc = methods + .iter() + .map(|method| &method.docs) + .collect::>(); + + let args_name = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| name.clone()) + .collect::>() + }) + .collect::>(); + + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + + let args_type = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, _, type_)| type_.clone()) + .collect::>() + }) + .collect::>(); + + let args_compact_attr = methods.iter().map(|method| { + method + .args + .iter() + .map(|(is_compact, _, type_)| { + if *is_compact { + quote::quote_spanned!(type_.span() => #[codec(compact)] ) + } else { + quote::quote!() + } + }) + .collect::>() + }); + + let default_docs = [syn::parse_quote!( + r"Contains a variant per dispatchable extrinsic that this pallet has." + )]; + let docs = if docs.is_empty() { + &default_docs[..] + } else { + &docs[..] + }; + + let maybe_compile_error = if def.call.is_none() { + quote::quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::call] defined, perhaps you should remove `Call` from \ + construct_runtime?", + )); + } + } else { + proc_macro2::TokenStream::new() + }; + + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = syn::Ident::new(&format!("__is_call_part_defined_{}", count), span); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // Wrap all calls inside of storage layers + if let Some(syn::Item::Impl(item_impl)) = def + .call + .as_ref() + .map(|c| &mut def.item.content.as_mut().expect("Checked by def parser").1[c.index]) + { + item_impl.items.iter_mut().for_each(|i| { + if let syn::ImplItem::Fn(method) = i { + let block = &method.block; + method.block = syn::parse_quote! {{ + // We execute all dispatchable in a new storage layer, allowing them + // to return an error at any point, and undoing any storage changes. + #frame_support::storage::with_storage_layer(|| #block) + }}; + } + }); + } + + // Extracts #[allow] attributes, necessary so that we don't run into compiler warnings + let maybe_allow_attrs = methods + .iter() + .map(|method| { + method + .attrs + .iter() + .find(|attr| attr.path().is_ident("allow")) + .map_or(proc_macro2::TokenStream::new(), |attr| { + attr.to_token_stream() + }) + }) + .collect::>(); + + let cfg_attrs = methods + .iter() + .map(|method| { + let attrs = method + .cfg_attrs + .iter() + .map(|attr| attr.to_token_stream()) + .collect::>(); + quote::quote!( #( #attrs )* ) + }) + .collect::>(); + + let feeless_check = methods + .iter() + .map(|method| &method.feeless_check) + .collect::>(); + let feeless_check_result = + feeless_check + .iter() + .zip(args_name.iter()) + .map(|(feeless_check, arg_name)| { + if let Some(feeless_check) = feeless_check { + quote::quote!(#feeless_check(origin, #( #arg_name, )*)) + } else { + quote::quote!(false) + } + }); + + quote::quote_spanned!(span => + #[doc(hidden)] + mod warnings { + #( + #call_index_warnings + )* + #( + #weight_warnings + )* + } + + #[allow(unused_imports)] + #[doc(hidden)] + pub mod __substrate_call_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + }; + } + + #[doc(hidden)] + pub use #macro_ident as is_call_part_defined; + } + + #( #[doc = #docs] )* + #[derive( + #frame_support::RuntimeDebugNoBound, + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + #[allow(non_camel_case_types)] + pub enum #call_ident<#type_decl_bounded_gen> #where_clause { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#type_use_gen,)>, + #frame_support::Never, + ), + #( + #cfg_attrs + #( #[doc = #fn_doc] )* + #[codec(index = #call_index)] + #fn_name { + #( + #[allow(missing_docs)] + #args_compact_attr #args_name_stripped: #args_type + ),* + }, + )* + } + + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #cfg_attrs + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name_stripped: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name_stripped ),* + } + } + )* + } + + impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo + for #call_ident<#type_use_gen> + #where_clause + { + fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + let __pallet_base_weight = #fn_weight; + + let __pallet_weight = < + dyn #frame_support::dispatch::WeighData<( #( & #args_type, )* )> + >::weigh_data(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_class = < + dyn #frame_support::dispatch::ClassifyDispatch< + ( #( & #args_type, )* ) + > + >::classify_dispatch(&__pallet_base_weight, ( #( #args_name, )* )); + + let __pallet_pays_fee = < + dyn #frame_support::dispatch::PaysFee<( #( & #args_type, )* )> + >::pays_fee(&__pallet_base_weight, ( #( #args_name, )* )); + + #frame_support::dispatch::DispatchInfo { + weight: __pallet_weight, + class: __pallet_class, + pays_fee: __pallet_pays_fee, + } + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::dispatch::CheckIfFeeless for #call_ident<#type_use_gen> + #where_clause + { + type Origin = #frame_system::pallet_prelude::OriginFor; + #[allow(unused_variables)] + fn is_feeless(&self, origin: &Self::Origin) -> bool { + match *self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern_ref, )* } => { + #feeless_check_result + }, + )* + Self::__Ignore(_, _) => unreachable!("__Ignore cannot be used"), + } + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallName for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_name(&self) -> &'static str { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ #( #cfg_attrs stringify!(#fn_name), )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::GetCallIndex for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_index(&self) -> u8 { + match *self { + #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_indices() -> &'static [u8] { + &[ #( #cfg_attrs #call_index, )* ] + } + } + + impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable + for #call_ident<#type_use_gen> + #where_clause + { + type RuntimeOrigin = #frame_system::pallet_prelude::OriginFor; + fn dispatch_bypass_filter( + self, + origin: Self::RuntimeOrigin + ) -> #frame_support::dispatch::DispatchResultWithPostInfo { + #frame_support::dispatch_context::run_in_context(|| { + match self { + #( + #cfg_attrs + Self::#fn_name { #( #args_name_pattern, )* } => { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) + ); + #maybe_allow_attrs + <#pallet_ident<#type_use_gen>>::#fn_name(origin, #( #args_name, )* ) + .map(Into::into).map_err(Into::into) + }, + )* + Self::__Ignore(_, _) => { + let _ = origin; // Use origin for empty Call enum + unreachable!("__PhantomItem cannot be used."); + }, + } + }) + } + } + + impl<#type_impl_gen> #frame_support::dispatch::Callable for #pallet_ident<#type_use_gen> + #where_clause + { + type RuntimeCall = #call_ident<#type_use_gen>; + } + + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { + #[allow(dead_code)] + #[doc(hidden)] + pub fn call_functions() -> #frame_support::__private::metadata_ir::PalletCallMetadataIR { + #frame_support::__private::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/composite.rs b/support/procedural-fork/src/pallet/expand/composite.rs index d449afe8f..49c0ad675 100644 --- a/support/procedural-fork/src/pallet/expand/composite.rs +++ b/support/procedural-fork/src/pallet/expand/composite.rs @@ -20,21 +20,21 @@ use proc_macro2::TokenStream; /// Expands `composite_enum` and adds the `VariantCount` implementation for it. pub fn expand_composites(def: &mut Def) -> TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for composite in &def.composites { - let name = &composite.ident; - let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); - let variants_count = composite.variant_count; + for composite in &def.composites { + let name = &composite.ident; + let (impl_generics, ty_generics, where_clause) = composite.generics.split_for_impl(); + let variants_count = composite.variant_count; - // add `VariantCount` implementation for `composite_enum` - expand.extend(quote::quote_spanned!(composite.attr_span => + // add `VariantCount` implementation for `composite_enum` + expand.extend(quote::quote_spanned!(composite.attr_span => impl #impl_generics #frame_support::traits::VariantCount for #name #ty_generics #where_clause { const VARIANT_COUNT: u32 = #variants_count; } )); - } + } - expand + expand } diff --git a/support/procedural-fork/src/pallet/expand/config.rs b/support/procedural-fork/src/pallet/expand/config.rs index 5cf4035a8..836c74ae7 100644 --- a/support/procedural-fork/src/pallet/expand/config.rs +++ b/support/procedural-fork/src/pallet/expand/config.rs @@ -23,20 +23,20 @@ use syn::{parse_quote, Item}; /// /// * Generate default rust doc pub fn expand_config(def: &mut Def) -> TokenStream { - let config = &def.config; - let config_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; - if let Item::Trait(item) = item { - item - } else { - unreachable!("Checked by config parser") - } - }; + let config = &def.config; + let config_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[config.index]; + if let Item::Trait(item) = item { + item + } else { + unreachable!("Checked by config parser") + } + }; - config_item.attrs.insert( - 0, - parse_quote!( - #[doc = r" + config_item.attrs.insert( + 0, + parse_quote!( + #[doc = r" Configuration trait of this pallet. The main purpose of this trait is to act as an interface between this pallet and the runtime in @@ -44,54 +44,54 @@ which it is embedded in. A type, function, or constant in this trait is essentia configured by the runtime that includes this pallet. Consequently, a runtime that wants to include this pallet must implement this trait." - ] - ), - ); + ] + ), + ); - // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is - // impossible consequently. - match &config.default_sub_trait { - Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { - let trait_items = &default_sub_trait - .items - .iter() - .map(|item| { - if item.1 { - if let syn::TraitItem::Type(item) = item.0.clone() { - let mut item = item.clone(); - item.bounds.clear(); - syn::TraitItem::Type(item) - } else { - item.0.clone() - } - } else { - item.0.clone() - } - }) - .collect::>(); + // we only emit `DefaultConfig` if there are trait items, so an empty `DefaultConfig` is + // impossible consequently. + match &config.default_sub_trait { + Some(default_sub_trait) if default_sub_trait.items.len() > 0 => { + let trait_items = &default_sub_trait + .items + .iter() + .map(|item| { + if item.1 { + if let syn::TraitItem::Type(item) = item.0.clone() { + let mut item = item.clone(); + item.bounds.clear(); + syn::TraitItem::Type(item) + } else { + item.0.clone() + } + } else { + item.0.clone() + } + }) + .collect::>(); - let type_param_bounds = if default_sub_trait.has_system { - let system = &def.frame_system; - quote::quote!(: #system::DefaultConfig) - } else { - quote::quote!() - }; + let type_param_bounds = if default_sub_trait.has_system { + let system = &def.frame_system; + quote::quote!(: #system::DefaultConfig) + } else { + quote::quote!() + }; - quote!( - /// Based on [`Config`]. Auto-generated by - /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). - /// Can be used in tandem with - /// [`#[register_default_config]`](`frame_support::register_default_config`) and - /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits - /// based on existing pallet config traits in a safe and developer-friendly way. - /// - /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about - /// the auto-generated `DefaultConfig` trait and how it is generated. - pub trait DefaultConfig #type_param_bounds { - #(#trait_items)* - } - ) - }, - _ => Default::default(), - } + quote!( + /// Based on [`Config`]. Auto-generated by + /// [`#[pallet::config(with_default)]`](`frame_support::pallet_macros::config`). + /// Can be used in tandem with + /// [`#[register_default_config]`](`frame_support::register_default_config`) and + /// [`#[derive_impl]`](`frame_support::derive_impl`) to derive test config traits + /// based on existing pallet config traits in a safe and developer-friendly way. + /// + /// See [here](`frame_support::pallet_macros::config`) for more information and caveats about + /// the auto-generated `DefaultConfig` trait and how it is generated. + pub trait DefaultConfig #type_param_bounds { + #(#trait_items)* + } + ) + } + _ => Default::default(), + } } diff --git a/support/procedural-fork/src/pallet/expand/constants.rs b/support/procedural-fork/src/pallet/expand/constants.rs index 57fa8b7f3..5153ccf49 100644 --- a/support/procedural-fork/src/pallet/expand/constants.rs +++ b/support/procedural-fork/src/pallet/expand/constants.rs @@ -18,91 +18,99 @@ use crate::pallet::Def; struct ConstDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// default_byte implementation - pub default_byte_impl: proc_macro2::TokenStream, - /// Constant name for Metadata (optional) - pub metadata_name: Option, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// default_byte implementation + pub default_byte_impl: proc_macro2::TokenStream, + /// Constant name for Metadata (optional) + pub metadata_name: Option, } /// /// * Impl fn module_constant_metadata for pallet. pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); - - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - - let config_consts = def.config.consts_metadata.iter().map(|const_| { - let ident = &const_.ident; - let const_type = &const_.type_; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = <::#ident as - #frame_support::traits::Get<#const_type>>::get(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: None, - } - }); - - let extra_consts = def.extra_constants.iter().flat_map(|d| &d.extra_constants).map(|const_| { - let ident = &const_.ident; - - ConstDef { - ident: const_.ident.clone(), - type_: const_.type_.clone(), - doc: const_.doc.clone(), - default_byte_impl: quote::quote!( - let value = >::#ident(); - #frame_support::__private::codec::Encode::encode(&value) - ), - metadata_name: const_.metadata_name.clone(), - } - }); - - let consts = config_consts.chain(extra_consts).map(|const_| { - let const_type = &const_.type_; - let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); - - let no_docs = vec![]; - let doc = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &const_.doc }; - - let default_byte_impl = &const_.default_byte_impl; - - quote::quote!({ - #frame_support::__private::metadata_ir::PalletConstantMetadataIR { - name: #ident_str, - ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), - value: { #default_byte_impl }, - docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], - } - }) - }); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ - - #[doc(hidden)] - pub fn pallet_constants_metadata() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> - { - #frame_support::__private::sp_std::vec![ #( #consts ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let trait_use_gen = &def.trait_use_generics(proc_macro2::Span::call_site()); + + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.extra_constants.iter().map(|d| &d.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + + let config_consts = def.config.consts_metadata.iter().map(|const_| { + let ident = &const_.ident; + let const_type = &const_.type_; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = <::#ident as + #frame_support::traits::Get<#const_type>>::get(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: None, + } + }); + + let extra_consts = def + .extra_constants + .iter() + .flat_map(|d| &d.extra_constants) + .map(|const_| { + let ident = &const_.ident; + + ConstDef { + ident: const_.ident.clone(), + type_: const_.type_.clone(), + doc: const_.doc.clone(), + default_byte_impl: quote::quote!( + let value = >::#ident(); + #frame_support::__private::codec::Encode::encode(&value) + ), + metadata_name: const_.metadata_name.clone(), + } + }); + + let consts = config_consts.chain(extra_consts).map(|const_| { + let const_type = &const_.type_; + let ident_str = format!("{}", const_.metadata_name.unwrap_or(const_.ident)); + + let no_docs = vec![]; + let doc = if cfg!(feature = "no-metadata-docs") { + &no_docs + } else { + &const_.doc + }; + + let default_byte_impl = &const_.default_byte_impl; + + quote::quote!({ + #frame_support::__private::metadata_ir::PalletConstantMetadataIR { + name: #ident_str, + ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), + value: { #default_byte_impl }, + docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], + } + }) + }); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ + + #[doc(hidden)] + pub fn pallet_constants_metadata() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> + { + #frame_support::__private::sp_std::vec![ #( #consts ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/doc_only.rs b/support/procedural-fork/src/pallet/expand/doc_only.rs index 621a051ac..3e60e9a9b 100644 --- a/support/procedural-fork/src/pallet/expand/doc_only.rs +++ b/support/procedural-fork/src/pallet/expand/doc_only.rs @@ -20,84 +20,84 @@ use proc_macro2::Span; use crate::pallet::Def; pub fn expand_doc_only(def: &mut Def) -> proc_macro2::TokenStream { - let dispatchables = if let Some(call_def) = &def.call { - let type_impl_generics = def.type_impl_generics(Span::call_site()); - call_def - .methods - .iter() - .map(|method| { - let name = &method.name; - let args = &method - .args - .iter() - .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) - .collect::(); - let docs = &method.docs; + let dispatchables = if let Some(call_def) = &def.call { + let type_impl_generics = def.type_impl_generics(Span::call_site()); + call_def + .methods + .iter() + .map(|method| { + let name = &method.name; + let args = &method + .args + .iter() + .map(|(_, arg_name, arg_type)| quote::quote!( #arg_name: #arg_type, )) + .collect::(); + let docs = &method.docs; - let real = format!(" [`Pallet::{}`].", name); - quote::quote!( - #( #[doc = #docs] )* - /// - /// # Warning: Doc-Only - /// - /// This function is an automatically generated, and is doc-only, uncallable - /// stub. See the real version in - #[ doc = #real ] - pub fn #name<#type_impl_generics>(#args) { unreachable!(); } - ) - }) - .collect::() - } else { - quote::quote!() - }; + let real = format!(" [`Pallet::{}`].", name); + quote::quote!( + #( #[doc = #docs] )* + /// + /// # Warning: Doc-Only + /// + /// This function is an automatically generated, and is doc-only, uncallable + /// stub. See the real version in + #[ doc = #real ] + pub fn #name<#type_impl_generics>(#args) { unreachable!(); } + ) + }) + .collect::() + } else { + quote::quote!() + }; - let storage_types = def - .storages - .iter() - .map(|storage| { - let storage_name = &storage.ident; - let storage_type_docs = &storage.docs; - let real = format!("[`pallet::{}`].", storage_name); - quote::quote!( - #( #[doc = #storage_type_docs] )* - /// - /// # Warning: Doc-Only - /// - /// This type is automatically generated, and is doc-only. See the real version in - #[ doc = #real ] - pub struct #storage_name(); - ) - }) - .collect::(); + let storage_types = def + .storages + .iter() + .map(|storage| { + let storage_name = &storage.ident; + let storage_type_docs = &storage.docs; + let real = format!("[`pallet::{}`].", storage_name); + quote::quote!( + #( #[doc = #storage_type_docs] )* + /// + /// # Warning: Doc-Only + /// + /// This type is automatically generated, and is doc-only. See the real version in + #[ doc = #real ] + pub struct #storage_name(); + ) + }) + .collect::(); - quote::quote!( - /// Auto-generated docs-only module listing all (public and private) defined storage types - /// for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. - /// - /// To see the actual storage type, find a struct with the same name at the root of the - /// pallet, in the list of [*Type Definitions*](../index.html#types). - #[cfg(doc)] - pub mod storage_types { - use super::*; - #storage_types - } + quote::quote!( + /// Auto-generated docs-only module listing all (public and private) defined storage types + /// for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. + /// + /// To see the actual storage type, find a struct with the same name at the root of the + /// pallet, in the list of [*Type Definitions*](../index.html#types). + #[cfg(doc)] + pub mod storage_types { + use super::*; + #storage_types + } - /// Auto-generated docs-only module listing all defined dispatchables for this pallet. - /// - /// # Warning: Doc-Only - /// - /// Members of this module cannot be used directly and are only provided for documentation - /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or - /// [`Call`]. - #[cfg(doc)] - pub mod dispatchables { - use super::*; - #dispatchables - } - ) + /// Auto-generated docs-only module listing all defined dispatchables for this pallet. + /// + /// # Warning: Doc-Only + /// + /// Members of this module cannot be used directly and are only provided for documentation + /// purposes. To see the real version of each dispatchable, look for them in [`Pallet`] or + /// [`Call`]. + #[cfg(doc)] + pub mod dispatchables { + use super::*; + #dispatchables + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs index ec19f889a..adc4f7ce9 100644 --- a/support/procedural-fork/src/pallet/expand/documentation.rs +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -28,12 +28,12 @@ const PALLET_DOC: &'static str = "pallet_doc"; /// Supported format: /// `#[pallet_doc(PATH)]`: The path of the file from which the documentation is loaded fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { - let lit: syn::LitStr = attr.parse_args().map_err(|_| { + let lit: syn::LitStr = attr.parse_args().map_err(|_| { let msg = "The `pallet_doc` received an unsupported argument. Supported format: `pallet_doc(\"PATH\")`"; syn::Error::new(attr.span(), msg) })?; - Ok(DocMetaValue::Path(lit)) + Ok(DocMetaValue::Path(lit)) } /// Get the value from the `doc` comment attribute: @@ -42,46 +42,49 @@ fn parse_pallet_doc_value(attr: &Attribute) -> syn::Result { /// - `#[doc = "A doc string"]`: Documentation as a string literal /// - `#[doc = include_str!(PATH)]`: Documentation obtained from a path fn parse_doc_value(attr: &Attribute) -> syn::Result> { - if !attr.path().is_ident(DOC) { - return Ok(None) - } - - let meta = attr.meta.require_name_value()?; - - match &meta.value { - syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), - syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => - Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))), - _ => - Err(syn::Error::new(attr.span(), "Expected `= \"docs\"` or `= include_str!(\"PATH\")`")), - } + if !attr.path().is_ident(DOC) { + return Ok(None); + } + + let meta = attr.meta.require_name_value()?; + + match &meta.value { + syn::Expr::Lit(lit) => Ok(Some(DocMetaValue::Lit(lit.lit.clone()))), + syn::Expr::Macro(mac) if mac.mac.path.is_ident("include_str") => { + Ok(Some(DocMetaValue::Path(mac.mac.parse_body()?))) + } + _ => Err(syn::Error::new( + attr.span(), + "Expected `= \"docs\"` or `= include_str!(\"PATH\")`", + )), + } } /// Supported documentation tokens. #[derive(Debug)] enum DocMetaValue { - /// Documentation with string literals. - /// - /// `#[doc = "Lit"]` - Lit(Lit), - /// Documentation with `include_str!` macro. - /// - /// The string literal represents the file `PATH`. - /// - /// `#[doc = include_str!(PATH)]` - Path(LitStr), + /// Documentation with string literals. + /// + /// `#[doc = "Lit"]` + Lit(Lit), + /// Documentation with `include_str!` macro. + /// + /// The string literal represents the file `PATH`. + /// + /// `#[doc = include_str!(PATH)]` + Path(LitStr), } impl ToTokens for DocMetaValue { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - DocMetaValue::Lit(lit) => lit.to_tokens(tokens), - DocMetaValue::Path(path_lit) => { - let decl = quote::quote!(include_str!(#path_lit)); - tokens.extend(decl) - }, - } - } + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + DocMetaValue::Lit(lit) => lit.to_tokens(tokens), + DocMetaValue::Path(path_lit) => { + let decl = quote::quote!(include_str!(#path_lit)); + tokens.extend(decl) + } + } + } } /// Extract the documentation from the given pallet definition @@ -110,63 +113,63 @@ impl ToTokens for DocMetaValue { /// Unlike the `doc` attribute, the documentation provided to the `proc_macro` attribute is /// not added to the pallet. pub fn expand_documentation(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - let pallet_ident = &def.pallet_struct.pallet; - let where_clauses = &def.config.where_clause; - - // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. - - // The `pallet_doc` attributes are excluded from the generation of the pallet, - // but they are included in the runtime metadata. - let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); - let mut index = 0; - while index < def.item.attrs.len() { - let attr = &def.item.attrs[index]; - if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { - pallet_docs.push(def.item.attrs.remove(index)); - // Do not increment the index, we have just removed the - // element from the attributes. - continue - } - - index += 1; - } - - // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. - let docs = match def - .item - .attrs - .iter() - .filter_map(|v| parse_doc_value(v).transpose()) - .collect::>>() - { - Ok(r) => r, - Err(err) => return err.into_compile_error(), - }; - - // Capture the `#[pallet_doc("../README.md")]`. - let pallet_docs = match pallet_docs - .into_iter() - .map(|attr| parse_pallet_doc_value(&attr)) - .collect::>>() - { - Ok(docs) => docs, - Err(err) => return err.into_compile_error(), - }; - - let docs = docs.iter().chain(pallet_docs.iter()); - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ - - #[doc(hidden)] - pub fn pallet_documentation_metadata() - -> #frame_support::__private::sp_std::vec::Vec<&'static str> - { - #frame_support::__private::sp_std::vec![ #( #docs ),* ] - } - } - ) + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + let pallet_ident = &def.pallet_struct.pallet; + let where_clauses = &def.config.where_clause; + + // TODO: Use [drain_filter](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.drain_filter) when it is stable. + + // The `pallet_doc` attributes are excluded from the generation of the pallet, + // but they are included in the runtime metadata. + let mut pallet_docs = Vec::with_capacity(def.item.attrs.len()); + let mut index = 0; + while index < def.item.attrs.len() { + let attr = &def.item.attrs[index]; + if attr.path().get_ident().map_or(false, |i| *i == PALLET_DOC) { + pallet_docs.push(def.item.attrs.remove(index)); + // Do not increment the index, we have just removed the + // element from the attributes. + continue; + } + + index += 1; + } + + // Capture the `#[doc = include_str!("../README.md")]` and `#[doc = "Documentation"]`. + let docs = match def + .item + .attrs + .iter() + .filter_map(|v| parse_doc_value(v).transpose()) + .collect::>>() + { + Ok(r) => r, + Err(err) => return err.into_compile_error(), + }; + + // Capture the `#[pallet_doc("../README.md")]`. + let pallet_docs = match pallet_docs + .into_iter() + .map(|attr| parse_pallet_doc_value(&attr)) + .collect::>>() + { + Ok(docs) => docs, + Err(err) => return err.into_compile_error(), + }; + + let docs = docs.iter().chain(pallet_docs.iter()); + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clauses{ + + #[doc(hidden)] + pub fn pallet_documentation_metadata() + -> #frame_support::__private::sp_std::vec::Vec<&'static str> + { + #frame_support::__private::sp_std::vec![ #( #docs ),* ] + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/error.rs b/support/procedural-fork/src/pallet/expand/error.rs index 72fb6e923..e2c3f680c 100644 --- a/support/procedural-fork/src/pallet/expand/error.rs +++ b/support/procedural-fork/src/pallet/expand/error.rs @@ -16,11 +16,11 @@ // limitations under the License. use crate::{ - pallet::{ - parse::error::{VariantDef, VariantField}, - Def, - }, - COUNTER, + pallet::{ + parse::error::{VariantDef, VariantField}, + Def, + }, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; @@ -29,49 +29,49 @@ use syn::spanned::Spanned; /// /// * impl various trait on Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let error_token_unique_id = - syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let config_where_clause = &def.config.where_clause; - - let error = if let Some(error) = &def.error { - error - } else { - return quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - } - }; - - let error_ident = &error.error; - let type_impl_gen = &def.type_impl_generics(error.attr_span); - let type_use_gen = &def.type_use_generics(error.attr_span); - - let phantom_variant: syn::Variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, - #frame_support::Never, - ) - ); - - let as_str_matches = error.variants.iter().map( + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let error_token_unique_id = + syn::Ident::new(&format!("__tt_error_token_{}", count), def.item.span()); + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let config_where_clause = &def.config.where_clause; + + let error = if let Some(error) = &def.error { + error + } else { + return quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + }; + }; + + let error_ident = &error.error; + let type_impl_gen = &def.type_impl_generics(error.attr_span); + let type_use_gen = &def.type_use_generics(error.attr_span); + + let phantom_variant: syn::Variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, + #frame_support::Never, + ) + ); + + let as_str_matches = error.variants.iter().map( |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { let variant_str = variant.to_string(); let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); @@ -89,103 +89,107 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { }, ); - let error_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; - - error_item.variants.insert(0, phantom_variant); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // derive TypeInfo for error metadata - error_item.attrs.push(syn::parse_quote! { - #[derive( - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - #frame_support::PalletError, - )] - }); - error_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] - )); - - if get_doc_literals(&error_item.attrs).is_empty() { - error_item.attrs.push(syn::parse_quote!( - #[doc = "The `Error` enum of this pallet."] - )); - } - - quote::quote_spanned!(error.attr_span => - impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> - #config_where_clause - { - fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) - -> #frame_support::__private::sp_std::fmt::Result - { - f.write_str(self.as_str()) - } - } - - impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn as_str(&self) -> &'static str { - match &self { - Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), - #( #as_str_matches )* - } - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> &'static str { - err.as_str() - } - } - - impl<#type_impl_gen> From<#error_ident<#type_use_gen>> - for #frame_support::sp_runtime::DispatchError - #config_where_clause - { - fn from(err: #error_ident<#type_use_gen>) -> Self { - use #frame_support::__private::codec::Encode; - let index = < - ::PalletInfo - as #frame_support::traits::PalletInfo - >::index::>() - .expect("Every active module has an index in the runtime; qed") as u8; - let mut encoded = err.encode(); - encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); - - #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { - index, - error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), - message: Some(err.as_str()), - }) - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #error_token_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - error = [{ #error_ident }] - } - }; - } - - pub use #error_token_unique_id as tt_error_token; - ) + let error_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; + + error_item.variants.insert(0, phantom_variant); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // derive TypeInfo for error metadata + error_item.attrs.push(syn::parse_quote! { + #[derive( + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + #frame_support::PalletError, + )] + }); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = #capture_docs)] + )); + + if get_doc_literals(&error_item.attrs).is_empty() { + error_item.attrs.push(syn::parse_quote!( + #[doc = "The `Error` enum of this pallet."] + )); + } + + quote::quote_spanned!(error.attr_span => + impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> + #config_where_clause + { + fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) + -> #frame_support::__private::sp_std::fmt::Result + { + f.write_str(self.as_str()) + } + } + + impl<#type_impl_gen> #error_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn as_str(&self) -> &'static str { + match &self { + Self::__Ignore(_, _) => unreachable!("`__Ignore` can never be constructed"), + #( #as_str_matches )* + } + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> for &'static str + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> &'static str { + err.as_str() + } + } + + impl<#type_impl_gen> From<#error_ident<#type_use_gen>> + for #frame_support::sp_runtime::DispatchError + #config_where_clause + { + fn from(err: #error_ident<#type_use_gen>) -> Self { + use #frame_support::__private::codec::Encode; + let index = < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::index::>() + .expect("Every active module has an index in the runtime; qed") as u8; + let mut encoded = err.encode(); + encoded.resize(#frame_support::MAX_MODULE_ERROR_ENCODED_SIZE, 0); + + #frame_support::sp_runtime::DispatchError::Module(#frame_support::sp_runtime::ModuleError { + index, + error: TryInto::try_into(encoded).expect("encoded error is resized to be equal to the maximum encoded error size; qed"), + message: Some(err.as_str()), + }) + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #error_token_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + error = [{ #error_ident }] + } + }; + } + + pub use #error_token_unique_id as tt_error_token; + ) } diff --git a/support/procedural-fork/src/pallet/expand/event.rs b/support/procedural-fork/src/pallet/expand/event.rs index 655fc5507..931dcd95a 100644 --- a/support/procedural-fork/src/pallet/expand/event.rs +++ b/support/procedural-fork/src/pallet/expand/event.rs @@ -16,8 +16,8 @@ // limitations under the License. use crate::{ - pallet::{parse::event::PalletEventDepositAttr, Def}, - COUNTER, + pallet::{parse::event::PalletEventDepositAttr, Def}, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; @@ -27,148 +27,159 @@ use syn::{spanned::Spanned, Ident}; /// * Impl various trait on Event including metadata /// * if deposit_event is defined, implement deposit_event on module. pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (event, macro_ident) = if let Some(event) = &def.event { - let ident = Ident::new(&format!("__is_event_part_defined_{}", count), event.attr_span); - (event, ident) - } else { - let macro_ident = - Ident::new(&format!("__is_event_part_defined_{}", count), def.item.span()); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::event] defined, perhaps you should \ - remove `Event` from construct_runtime?", - )); - } - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - } - }; - - let event_where_clause = &event.where_clause; - - // NOTE: actually event where clause must be a subset of config where clause because of - // `type RuntimeEvent: From>`. But we merge either way for potential better error - // message - let completed_where_clause = - super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); - - let event_ident = &event.event; - let frame_system = &def.frame_system; - let frame_support = &def.frame_support; - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); - - let event_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; - if let syn::Item::Enum(item) = item { - item - } else { - unreachable!("Checked by event parser") - } - }; - - // Phantom data is added for generic event. - if event.gen_kind.is_generic() { - let variant = syn::parse_quote!( - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData<(#event_use_gen)>, - #frame_support::Never, - ) - ); - - // Push ignore variant at the end. - event_item.variants.push(variant); - } - - if get_doc_literals(&event_item.attrs).is_empty() { - event_item - .attrs - .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); - } - - // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug - event_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - #frame_support::__private::codec::Encode, - #frame_support::__private::codec::Decode, - #frame_support::__private::scale_info::TypeInfo, - )] - )); - - let capture_docs = if cfg!(feature = "no-metadata-docs") { "never" } else { "always" }; - - // skip requirement for type params to implement `TypeInfo`, and set docs capture - event_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] - )); - - let deposit_event = if let Some(deposit_event) = &event.deposit_event { - let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let trait_use_gen = &def.trait_use_generics(event.attr_span); - let type_impl_gen = &def.type_impl_generics(event.attr_span); - let type_use_gen = &def.type_use_generics(event.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - - let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; - - quote::quote_spanned!(*fn_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #fn_vis fn deposit_event(event: Event<#event_use_gen>) { - let event = < - ::RuntimeEvent as - From> - >::from(event); - - let event = < - ::RuntimeEvent as - Into<::RuntimeEvent> - >::into(event); - - <#frame_system::Pallet>::deposit_event(event) - } - } - ) - } else { - Default::default() - }; - - quote::quote_spanned!(event.attr_span => - #[doc(hidden)] - pub mod __substrate_event_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => {}; - } - - #[doc(hidden)] - pub use #macro_ident as is_event_part_defined; - } - - #deposit_event - - impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { - fn from(_: #event_ident<#event_use_gen>) {} - } - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (event, macro_ident) = if let Some(event) = &def.event { + let ident = Ident::new( + &format!("__is_event_part_defined_{}", count), + event.attr_span, + ); + (event, ident) + } else { + let macro_ident = Ident::new( + &format!("__is_event_part_defined_{}", count), + def.item.span(), + ); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::event] defined, perhaps you should \ + remove `Event` from construct_runtime?", + )); + } + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + }; + }; + + let event_where_clause = &event.where_clause; + + // NOTE: actually event where clause must be a subset of config where clause because of + // `type RuntimeEvent: From>`. But we merge either way for potential better error + // message + let completed_where_clause = + super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); + + let event_ident = &event.event; + let frame_system = &def.frame_system; + let frame_support = &def.frame_support; + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); + + let event_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; + if let syn::Item::Enum(item) = item { + item + } else { + unreachable!("Checked by event parser") + } + }; + + // Phantom data is added for generic event. + if event.gen_kind.is_generic() { + let variant = syn::parse_quote!( + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData<(#event_use_gen)>, + #frame_support::Never, + ) + ); + + // Push ignore variant at the end. + event_item.variants.push(variant); + } + + if get_doc_literals(&event_item.attrs).is_empty() { + event_item + .attrs + .push(syn::parse_quote!(#[doc = "The `Event` enum of this pallet"])); + } + + // derive some traits because system event require Clone, FullCodec, Eq, PartialEq and Debug + event_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + #frame_support::__private::codec::Encode, + #frame_support::__private::codec::Decode, + #frame_support::__private::scale_info::TypeInfo, + )] + )); + + let capture_docs = if cfg!(feature = "no-metadata-docs") { + "never" + } else { + "always" + }; + + // skip requirement for type params to implement `TypeInfo`, and set docs capture + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = #capture_docs)] + )); + + let deposit_event = if let Some(deposit_event) = &event.deposit_event { + let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); + let trait_use_gen = &def.trait_use_generics(event.attr_span); + let type_impl_gen = &def.type_impl_generics(event.attr_span); + let type_use_gen = &def.type_use_generics(event.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + + let PalletEventDepositAttr { + fn_vis, fn_span, .. + } = deposit_event; + + quote::quote_spanned!(*fn_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #fn_vis fn deposit_event(event: Event<#event_use_gen>) { + let event = < + ::RuntimeEvent as + From> + >::from(event); + + let event = < + ::RuntimeEvent as + Into<::RuntimeEvent> + >::into(event); + + <#frame_system::Pallet>::deposit_event(event) + } + } + ) + } else { + Default::default() + }; + + quote::quote_spanned!(event.attr_span => + #[doc(hidden)] + pub mod __substrate_event_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => {}; + } + + #[doc(hidden)] + pub use #macro_ident as is_event_part_defined; + } + + #deposit_event + + impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { + fn from(_: #event_ident<#event_use_gen>) {} + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_build.rs b/support/procedural-fork/src/pallet/expand/genesis_build.rs index 248e83469..c6089550d 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_build.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_build.rs @@ -20,30 +20,34 @@ use crate::pallet::Def; /// /// * implement the trait `sp_runtime::BuildStorage` pub fn expand_genesis_build(def: &mut Def) -> proc_macro2::TokenStream { - let genesis_config = if let Some(genesis_config) = &def.genesis_config { - genesis_config - } else { - return Default::default() - }; - let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); + let genesis_config = if let Some(genesis_config) = &def.genesis_config { + genesis_config + } else { + return Default::default(); + }; + let genesis_build = def.genesis_build.as_ref().expect("Checked by def parser"); - let frame_support = &def.frame_support; - let type_impl_gen = &genesis_config.gen_kind.type_impl_gen(genesis_build.attr_span); - let gen_cfg_ident = &genesis_config.genesis_config; - let gen_cfg_use_gen = &genesis_config.gen_kind.type_use_gen(genesis_build.attr_span); + let frame_support = &def.frame_support; + let type_impl_gen = &genesis_config + .gen_kind + .type_impl_gen(genesis_build.attr_span); + let gen_cfg_ident = &genesis_config.genesis_config; + let gen_cfg_use_gen = &genesis_config + .gen_kind + .type_use_gen(genesis_build.attr_span); - let where_clause = &genesis_build.where_clause; + let where_clause = &genesis_build.where_clause; - quote::quote_spanned!(genesis_build.attr_span => - #[cfg(feature = "std")] - impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause - { - fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { - #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { - self.build(); - Ok(()) - }) - } - } - ) + quote::quote_spanned!(genesis_build.attr_span => + #[cfg(feature = "std")] + impl<#type_impl_gen> #frame_support::sp_runtime::BuildStorage for #gen_cfg_ident<#gen_cfg_use_gen> #where_clause + { + fn assimilate_storage(&self, storage: &mut #frame_support::sp_runtime::Storage) -> std::result::Result<(), std::string::String> { + #frame_support::__private::BasicExternalities::execute_with_storage(storage, || { + self.build(); + Ok(()) + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/genesis_config.rs b/support/procedural-fork/src/pallet/expand/genesis_config.rs index 31d519ef2..e171e2468 100644 --- a/support/procedural-fork/src/pallet/expand/genesis_config.rs +++ b/support/procedural-fork/src/pallet/expand/genesis_config.rs @@ -23,125 +23,130 @@ use syn::{spanned::Spanned, Ident}; /// /// * add various derive trait on GenesisConfig struct. pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - - let (genesis_config, def_macro_ident, std_macro_ident) = - if let Some(genesis_config) = &def.genesis_config { - let def_macro_ident = Ident::new( - &format!("__is_genesis_config_defined_{}", count), - genesis_config.genesis_config.span(), - ); - - let std_macro_ident = Ident::new( - &format!("__is_std_macro_defined_for_genesis_{}", count), - genesis_config.genesis_config.span(), - ); - - (genesis_config, def_macro_ident, std_macro_ident) - } else { - let def_macro_ident = - Ident::new(&format!("__is_genesis_config_defined_{}", count), def.item.span()); - - let std_macro_ident = - Ident::new(&format!("__is_std_enabled_for_genesis_{}", count), def.item.span()); - - return quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::genesis_config] defined, perhaps you should \ - remove `Config` from construct_runtime?", - )); - } - } - - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - } - }; - - let frame_support = &def.frame_support; - - let genesis_config_item = - &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; - - let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); - - match genesis_config_item { - syn::Item::Enum(syn::ItemEnum { attrs, .. }) | - syn::Item::Struct(syn::ItemStruct { attrs, .. }) | - syn::Item::Type(syn::ItemType { attrs, .. }) => { - if get_doc_literals(attrs).is_empty() { - attrs.push(syn::parse_quote!( - #[doc = r" + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + + let (genesis_config, def_macro_ident, std_macro_ident) = if let Some(genesis_config) = + &def.genesis_config + { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + genesis_config.genesis_config.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_macro_defined_for_genesis_{}", count), + genesis_config.genesis_config.span(), + ); + + (genesis_config, def_macro_ident, std_macro_ident) + } else { + let def_macro_ident = Ident::new( + &format!("__is_genesis_config_defined_{}", count), + def.item.span(), + ); + + let std_macro_ident = Ident::new( + &format!("__is_std_enabled_for_genesis_{}", count), + def.item.span(), + ); + + return quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::genesis_config] defined, perhaps you should \ + remove `Config` from construct_runtime?", + )); + } + } + + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + }; + }; + + let frame_support = &def.frame_support; + + let genesis_config_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; + + let serde_crate = format!("{}::__private::serde", frame_support.to_token_stream()); + + match genesis_config_item { + syn::Item::Enum(syn::ItemEnum { attrs, .. }) + | syn::Item::Struct(syn::ItemStruct { attrs, .. }) + | syn::Item::Type(syn::ItemType { attrs, .. }) => { + if get_doc_literals(attrs).is_empty() { + attrs.push(syn::parse_quote!( + #[doc = r" Can be used to configure the [genesis state](https://docs.substrate.io/build/genesis-configuration/) of this pallet. "] - )); - } - attrs.push(syn::parse_quote!( - #[derive(#frame_support::Serialize, #frame_support::Deserialize)] - )); - attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); - attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); - attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); - attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); - }, - _ => unreachable!("Checked by genesis_config parser"), - } - - quote::quote! { - #[doc(hidden)] - pub mod __substrate_genesis_config_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #def_macro_ident { - ($pallet_name:ident) => {}; - } - - #[cfg(not(feature = "std"))] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have the std feature enabled, this will cause the `", - $pallet_path, - "::GenesisConfig` type to not implement serde traits." - )); - }; - } - - #[cfg(feature = "std")] - #[macro_export] - #[doc(hidden)] - macro_rules! #std_macro_ident { - ($pallet_name:ident, $pallet_path:expr) => {}; - } - - #[doc(hidden)] - pub use #def_macro_ident as is_genesis_config_defined; - #[doc(hidden)] - pub use #std_macro_ident as is_std_enabled_for_genesis; - } - } + )); + } + attrs.push(syn::parse_quote!( + #[derive(#frame_support::Serialize, #frame_support::Deserialize)] + )); + attrs.push(syn::parse_quote!( #[serde(rename_all = "camelCase")] )); + attrs.push(syn::parse_quote!( #[serde(deny_unknown_fields)] )); + attrs.push(syn::parse_quote!( #[serde(bound(serialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(bound(deserialize = ""))] )); + attrs.push(syn::parse_quote!( #[serde(crate = #serde_crate)] )); + } + _ => unreachable!("Checked by genesis_config parser"), + } + + quote::quote! { + #[doc(hidden)] + pub mod __substrate_genesis_config_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #def_macro_ident { + ($pallet_name:ident) => {}; + } + + #[cfg(not(feature = "std"))] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have the std feature enabled, this will cause the `", + $pallet_path, + "::GenesisConfig` type to not implement serde traits." + )); + }; + } + + #[cfg(feature = "std")] + #[macro_export] + #[doc(hidden)] + macro_rules! #std_macro_ident { + ($pallet_name:ident, $pallet_path:expr) => {}; + } + + #[doc(hidden)] + pub use #def_macro_ident as is_genesis_config_defined; + #[doc(hidden)] + pub use #std_macro_ident as is_std_enabled_for_genesis; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/hooks.rs b/support/procedural-fork/src/pallet/expand/hooks.rs index 3623b5952..6967f4c08 100644 --- a/support/procedural-fork/src/pallet/expand/hooks.rs +++ b/support/procedural-fork/src/pallet/expand/hooks.rs @@ -19,322 +19,326 @@ use crate::pallet::Def; /// * implement the individual traits using the Hooks trait pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { - let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { - Some(hooks) => { - let where_clause = hooks.where_clause.clone(); - let span = hooks.attr_span; - let has_runtime_upgrade = hooks.has_runtime_upgrade; - (where_clause, span, has_runtime_upgrade) - }, - None => (def.config.where_clause.clone(), def.pallet_struct.attr_span, false), - }; + let (where_clause, span, has_runtime_upgrade) = match def.hooks.as_ref() { + Some(hooks) => { + let where_clause = hooks.where_clause.clone(); + let span = hooks.attr_span; + let has_runtime_upgrade = hooks.has_runtime_upgrade; + (where_clause, span, has_runtime_upgrade) + } + None => ( + def.config.where_clause.clone(), + def.pallet_struct.attr_span, + false, + ), + }; - let frame_support = &def.frame_support; - let type_impl_gen = &def.type_impl_generics(span); - let type_use_gen = &def.type_use_generics(span); - let pallet_ident = &def.pallet_struct.pallet; - let frame_system = &def.frame_system; - let pallet_name = quote::quote! { - < - ::PalletInfo - as - #frame_support::traits::PalletInfo - >::name::().unwrap_or("") - }; + let frame_support = &def.frame_support; + let type_impl_gen = &def.type_impl_generics(span); + let type_use_gen = &def.type_use_generics(span); + let pallet_ident = &def.pallet_struct.pallet; + let frame_system = &def.frame_system; + let pallet_name = quote::quote! { + < + ::PalletInfo + as + #frame_support::traits::PalletInfo + >::name::().unwrap_or("") + }; - let initialize_on_chain_storage_version = if let Some(in_code_version) = - &def.pallet_struct.storage_version - { - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", - #pallet_name, - #in_code_version - ); - #in_code_version.put::(); - } - } else { - quote::quote! { - let default_version = #frame_support::traits::StorageVersion::new(0); - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", - #pallet_name, - default_version - ); - default_version.put::(); - } - }; + let initialize_on_chain_storage_version = if let Some(in_code_version) = + &def.pallet_struct.storage_version + { + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. Initializing the on-chain storage version to match the storage version defined in the pallet: {:?}", + #pallet_name, + #in_code_version + ); + #in_code_version.put::(); + } + } else { + quote::quote! { + let default_version = #frame_support::traits::StorageVersion::new(0); + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🐥 New pallet {:?} detected in the runtime. The pallet has no defined storage version, so the on-chain version is being initialized to {:?}.", + #pallet_name, + default_version + ); + default_version.put::(); + } + }; - let log_runtime_upgrade = if has_runtime_upgrade { - // a migration is defined here. - quote::quote! { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "⚠️ {} declares internal migrations (which *might* execute). \ - On-chain `{:?}` vs in-code storage version `{:?}`", - #pallet_name, - ::on_chain_storage_version(), - ::in_code_storage_version(), - ); - } - } else { - // default. - quote::quote! { - #frame_support::__private::log::debug!( - target: #frame_support::LOG_TARGET, - "✅ no migration for {}", - #pallet_name, - ); - } - }; + let log_runtime_upgrade = if has_runtime_upgrade { + // a migration is defined here. + quote::quote! { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "⚠️ {} declares internal migrations (which *might* execute). \ + On-chain `{:?}` vs in-code storage version `{:?}`", + #pallet_name, + ::on_chain_storage_version(), + ::in_code_storage_version(), + ); + } + } else { + // default. + quote::quote! { + #frame_support::__private::log::debug!( + target: #frame_support::LOG_TARGET, + "✅ no migration for {}", + #pallet_name, + ); + } + }; - let hooks_impl = if def.hooks.is_none() { - let frame_system = &def.frame_system; - quote::quote! { - impl<#type_impl_gen> - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause {} - } - } else { - proc_macro2::TokenStream::new() - }; + let hooks_impl = if def.hooks.is_none() { + let frame_system = &def.frame_system; + quote::quote! { + impl<#type_impl_gen> + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause {} + } + } else { + proc_macro2::TokenStream::new() + }; - // If a storage version is set, we should ensure that the storage version on chain matches the - // in-code storage version. This assumes that `Executive` is running custom migrations before - // the pallets are called. - let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); - let in_code_version = ::in_code_storage_version(); + // If a storage version is set, we should ensure that the storage version on chain matches the + // in-code storage version. This assumes that `Executive` is running custom migrations before + // the pallets are called. + let post_storage_version_check = if def.pallet_struct.storage_version.is_some() { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); + let in_code_version = ::in_code_storage_version(); - if on_chain_version != in_code_version { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", - #pallet_name, - on_chain_version, - in_code_version, - ); + if on_chain_version != in_code_version { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} doesn't match in-code storage version {:?}.", + #pallet_name, + on_chain_version, + in_code_version, + ); - return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); - } - } - } else { - quote::quote! { - let on_chain_version = ::on_chain_storage_version(); + return Err("On chain and in-code storage version do not match. Missing runtime upgrade?".into()); + } + } + } else { + quote::quote! { + let on_chain_version = ::on_chain_storage_version(); - if on_chain_version != #frame_support::traits::StorageVersion::new(0) { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "{}: On chain storage version {:?} is set to non zero, \ - while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", - #pallet_name, - on_chain_version, - ); + if on_chain_version != #frame_support::traits::StorageVersion::new(0) { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "{}: On chain storage version {:?} is set to non zero, \ + while the pallet is missing the `#[pallet::storage_version(VERSION)]` attribute.", + #pallet_name, + on_chain_version, + ); - return Err("On chain storage version set, while the pallet doesn't \ - have the `#[pallet::storage_version(VERSION)]` attribute.".into()); - } - } - }; + return Err("On chain storage version set, while the pallet doesn't \ + have the `#[pallet::storage_version(VERSION)]` attribute.".into()); + } + } + }; - quote::quote_spanned!(span => - #hooks_impl + quote::quote_spanned!(span => + #hooks_impl - impl<#type_impl_gen> - #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_finalize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_finalize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnFinalize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_finalize(n: #frame_system::pallet_prelude::BlockNumberFor::) { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_finalize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_finalize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_idle( - n: #frame_system::pallet_prelude::BlockNumberFor::, - remaining_weight: #frame_support::weights::Weight - ) -> #frame_support::weights::Weight { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_idle(n, remaining_weight) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnIdle<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_idle( + n: #frame_system::pallet_prelude::BlockNumberFor::, + remaining_weight: #frame_support::weights::Weight + ) -> #frame_support::weights::Weight { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_idle(n, remaining_weight) + } + } - impl<#type_impl_gen> - #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_poll( - n: #frame_system::pallet_prelude::BlockNumberFor::, - weight: &mut #frame_support::weights::WeightMeter - ) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_poll(n, weight); - } - } + impl<#type_impl_gen> + #frame_support::traits::OnPoll<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_poll( + n: #frame_system::pallet_prelude::BlockNumberFor::, + weight: &mut #frame_support::weights::WeightMeter + ) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_poll(n, weight); + } + } - impl<#type_impl_gen> - #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_initialize( - n: #frame_system::pallet_prelude::BlockNumberFor:: - ) -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_initialize") - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_initialize(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OnInitialize<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_initialize( + n: #frame_system::pallet_prelude::BlockNumberFor:: + ) -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_initialize") + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_initialize(n) + } + } - impl<#type_impl_gen> - #frame_support::traits::BeforeAllRuntimeMigrations - for #pallet_ident<#type_use_gen> #where_clause - { - fn before_all_runtime_migrations() -> #frame_support::weights::Weight { - use #frame_support::traits::{Get, PalletInfoAccess}; - use #frame_support::__private::hashing::twox_128; - use #frame_support::storage::unhashed::contains_prefixed_key; - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("before_all") - ); + impl<#type_impl_gen> + #frame_support::traits::BeforeAllRuntimeMigrations + for #pallet_ident<#type_use_gen> #where_clause + { + fn before_all_runtime_migrations() -> #frame_support::weights::Weight { + use #frame_support::traits::{Get, PalletInfoAccess}; + use #frame_support::__private::hashing::twox_128; + use #frame_support::storage::unhashed::contains_prefixed_key; + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("before_all") + ); - // Check if the pallet has any keys set, including the storage version. If there are - // no keys set, the pallet was just added to the runtime and needs to have its - // version initialized. - let pallet_hashed_prefix = ::name_hash(); - let exists = contains_prefixed_key(&pallet_hashed_prefix); - if !exists { - #initialize_on_chain_storage_version - ::DbWeight::get().reads_writes(1, 1) - } else { - ::DbWeight::get().reads(1) - } - } - } + // Check if the pallet has any keys set, including the storage version. If there are + // no keys set, the pallet was just added to the runtime and needs to have its + // version initialized. + let pallet_hashed_prefix = ::name_hash(); + let exists = contains_prefixed_key(&pallet_hashed_prefix); + if !exists { + #initialize_on_chain_storage_version + ::DbWeight::get().reads_writes(1, 1) + } else { + ::DbWeight::get().reads(1) + } + } + } - impl<#type_impl_gen> - #frame_support::traits::OnRuntimeUpgrade - for #pallet_ident<#type_use_gen> #where_clause - { - fn on_runtime_upgrade() -> #frame_support::weights::Weight { - #frame_support::__private::sp_tracing::enter_span!( - #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") - ); + impl<#type_impl_gen> + #frame_support::traits::OnRuntimeUpgrade + for #pallet_ident<#type_use_gen> #where_clause + { + fn on_runtime_upgrade() -> #frame_support::weights::Weight { + #frame_support::__private::sp_tracing::enter_span!( + #frame_support::__private::sp_tracing::trace_span!("on_runtime_update") + ); - // log info about the upgrade. - #log_runtime_upgrade + // log info about the upgrade. + #log_runtime_upgrade - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::on_runtime_upgrade() - } + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::on_runtime_upgrade() + } - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::pre_upgrade() - } + #[cfg(feature = "try-runtime")] + fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::pre_upgrade() + } - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #post_storage_version_check + #[cfg(feature = "try-runtime")] + fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #post_storage_version_check - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::post_upgrade(state) - } - } + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::post_upgrade(state) + } + } - impl<#type_impl_gen> - #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::offchain_worker(n) - } - } + impl<#type_impl_gen> + #frame_support::traits::OffchainWorker<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn offchain_worker(n: #frame_system::pallet_prelude::BlockNumberFor::) { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::offchain_worker(n) + } + } - // Integrity tests are only required for when `std` is enabled. - #frame_support::std_enabled! { - impl<#type_impl_gen> - #frame_support::traits::IntegrityTest - for #pallet_ident<#type_use_gen> #where_clause - { - fn integrity_test() { - #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::integrity_test() - }); - } - } - } + // Integrity tests are only required for when `std` is enabled. + #frame_support::std_enabled! { + impl<#type_impl_gen> + #frame_support::traits::IntegrityTest + for #pallet_ident<#type_use_gen> #where_clause + { + fn integrity_test() { + #frame_support::__private::sp_io::TestExternalities::default().execute_with(|| { + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::integrity_test() + }); + } + } + } - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> - #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn try_state( - n: #frame_system::pallet_prelude::BlockNumberFor::, - _s: #frame_support::traits::TryStateSelect - ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🩺 Running {:?} try-state checks", - #pallet_name, - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::try_state(n).map_err(|err| { - #frame_support::__private::log::error!( - target: #frame_support::LOG_TARGET, - "❌ {:?} try_state checks failed: {:?}", - #pallet_name, - err - ); + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> + #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn try_state( + n: #frame_system::pallet_prelude::BlockNumberFor::, + _s: #frame_support::traits::TryStateSelect + ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #frame_support::__private::log::info!( + target: #frame_support::LOG_TARGET, + "🩺 Running {:?} try-state checks", + #pallet_name, + ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::try_state(n).map_err(|err| { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "❌ {:?} try_state checks failed: {:?}", + #pallet_name, + err + ); - err - }) - } - } - ) + err + }) + } + } + ) } diff --git a/support/procedural-fork/src/pallet/expand/inherent.rs b/support/procedural-fork/src/pallet/expand/inherent.rs index 182d79f5b..0a80d672a 100644 --- a/support/procedural-fork/src/pallet/expand/inherent.rs +++ b/support/procedural-fork/src/pallet/expand/inherent.rs @@ -21,35 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_inherents(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new(&format!("__is_inherent_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_inherent_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.inherent.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::inherent] defined, perhaps you should \ - remove `Inherent` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.inherent.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::inherent] defined, perhaps you should \ + remove `Inherent` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_inherent_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_inherent_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_inherent_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_inherent_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/instances.rs b/support/procedural-fork/src/pallet/expand/instances.rs index b6dfa7e6d..12423409c 100644 --- a/support/procedural-fork/src/pallet/expand/instances.rs +++ b/support/procedural-fork/src/pallet/expand/instances.rs @@ -22,22 +22,22 @@ use proc_macro2::Span; /// * Provide inherent instance to be used by construct_runtime /// * Provide Instance1 ..= Instance16 for instantiable pallet pub fn expand_instances(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); - let instances = if def.config.has_instance { - (1..=NUMBER_OF_INSTANCE) - .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) - .collect() - } else { - vec![] - }; + let frame_support = &def.frame_support; + let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); + let instances = if def.config.has_instance { + (1..=NUMBER_OF_INSTANCE) + .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) + .collect() + } else { + vec![] + }; - quote::quote!( - /// Hidden instance generated to be internally used when module is used without - /// instance. - #[doc(hidden)] - pub type #inherent_ident = (); + quote::quote!( + /// Hidden instance generated to be internally used when module is used without + /// instance. + #[doc(hidden)] + pub type #inherent_ident = (); - #( pub use #frame_support::instances::#instances; )* - ) + #( pub use #frame_support::instances::#instances; )* + ) } diff --git a/support/procedural-fork/src/pallet/expand/mod.rs b/support/procedural-fork/src/pallet/expand/mod.rs index 067839c28..ff4423f85 100644 --- a/support/procedural-fork/src/pallet/expand/mod.rs +++ b/support/procedural-fork/src/pallet/expand/mod.rs @@ -42,12 +42,12 @@ use quote::ToTokens; /// Merge where clause together, `where` token span is taken from the first not none one. pub fn merge_where_clauses(clauses: &[&Option]) -> Option { - let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); - let mut res = clauses.next()?.clone(); - for other in clauses { - res.predicates.extend(other.predicates.iter().cloned()) - } - Some(res) + let mut clauses = clauses.iter().filter_map(|f| f.as_ref()); + let mut res = clauses.next()?.clone(); + for other in clauses { + res.predicates.extend(other.predicates.iter().cloned()) + } + Some(res) } /// Expand definition, in particular: @@ -55,32 +55,32 @@ pub fn merge_where_clauses(clauses: &[&Option]) -> Option proc_macro2::TokenStream { - // Remove the `pallet_doc` attribute first. - let metadata_docs = documentation::expand_documentation(&mut def); - let constants = constants::expand_constants(&mut def); - let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); - let config = config::expand_config(&mut def); - let call = call::expand_call(&mut def); - let tasks = tasks::expand_tasks(&mut def); - let error = error::expand_error(&mut def); - let event = event::expand_event(&mut def); - let storages = storage::expand_storages(&mut def); - let inherents = inherent::expand_inherents(&mut def); - let instances = instances::expand_instances(&mut def); - let hooks = hooks::expand_hooks(&mut def); - let genesis_build = genesis_build::expand_genesis_build(&mut def); - let genesis_config = genesis_config::expand_genesis_config(&mut def); - let type_values = type_value::expand_type_values(&mut def); - let origins = origin::expand_origins(&mut def); - let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); - let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); - let doc_only = doc_only::expand_doc_only(&mut def); - let composites = composite::expand_composites(&mut def); + // Remove the `pallet_doc` attribute first. + let metadata_docs = documentation::expand_documentation(&mut def); + let constants = constants::expand_constants(&mut def); + let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); + let config = config::expand_config(&mut def); + let call = call::expand_call(&mut def); + let tasks = tasks::expand_tasks(&mut def); + let error = error::expand_error(&mut def); + let event = event::expand_event(&mut def); + let storages = storage::expand_storages(&mut def); + let inherents = inherent::expand_inherents(&mut def); + let instances = instances::expand_instances(&mut def); + let hooks = hooks::expand_hooks(&mut def); + let genesis_build = genesis_build::expand_genesis_build(&mut def); + let genesis_config = genesis_config::expand_genesis_config(&mut def); + let type_values = type_value::expand_type_values(&mut def); + let origins = origin::expand_origins(&mut def); + let validate_unsigned = validate_unsigned::expand_validate_unsigned(&mut def); + let tt_default_parts = tt_default_parts::expand_tt_default_parts(&mut def); + let doc_only = doc_only::expand_doc_only(&mut def); + let composites = composite::expand_composites(&mut def); - def.item.attrs.insert( - 0, - syn::parse_quote!( - #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed + def.item.attrs.insert( + 0, + syn::parse_quote!( + #[doc = r"The `pallet` module in each FRAME pallet hosts the most important items needed to construct this pallet. The main components of this pallet are: @@ -93,38 +93,38 @@ storage item. Otherwise, all storage items are listed among [*Type Definitions*] - [`Config`], which contains the configuration trait of this pallet. - [`Event`] and [`Error`], which are listed among the [*Enums*](#enums). "] - ), - ); + ), + ); - let new_items = quote::quote!( - #metadata_docs - #constants - #pallet_struct - #config - #call - #tasks - #error - #event - #storages - #inherents - #instances - #hooks - #genesis_build - #genesis_config - #type_values - #origins - #validate_unsigned - #tt_default_parts - #doc_only - #composites - ); + let new_items = quote::quote!( + #metadata_docs + #constants + #pallet_struct + #config + #call + #tasks + #error + #event + #storages + #inherents + #instances + #hooks + #genesis_build + #genesis_config + #type_values + #origins + #validate_unsigned + #tt_default_parts + #doc_only + #composites + ); - def.item - .content - .as_mut() - .expect("This is checked by parsing") - .1 - .push(syn::Item::Verbatim(new_items)); + def.item + .content + .as_mut() + .expect("This is checked by parsing") + .1 + .push(syn::Item::Verbatim(new_items)); - def.item.into_token_stream() + def.item.into_token_stream() } diff --git a/support/procedural-fork/src/pallet/expand/origin.rs b/support/procedural-fork/src/pallet/expand/origin.rs index 55865b424..167445ad6 100644 --- a/support/procedural-fork/src/pallet/expand/origin.rs +++ b/support/procedural-fork/src/pallet/expand/origin.rs @@ -21,35 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_origins(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new(&format!("__is_origin_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_origin_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.origin.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::origin] defined, perhaps you should \ - remove `Origin` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.origin.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::origin] defined, perhaps you should \ + remove `Origin` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_origin_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_origin_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_origin_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_origin_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/pallet_struct.rs b/support/procedural-fork/src/pallet/expand/pallet_struct.rs index 7cdf6bde9..c5def65ed 100644 --- a/support/procedural-fork/src/pallet/expand/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/expand/pallet_struct.rs @@ -28,263 +28,275 @@ use frame_support_procedural_tools::get_doc_literals; /// * implementation of `PalletInfoAccess` information /// * implementation of `StorageInfoTrait` on Pallet pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); - let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); - let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); - let pallet_ident = &def.pallet_struct.pallet; - let config_where_clause = &def.config.where_clause; - - let mut storages_where_clauses = vec![&def.config.where_clause]; - storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let storages_where_clauses = merge_where_clauses(&storages_where_clauses); - - let pallet_item = { - let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; - let item = &mut pallet_module_items[def.pallet_struct.index]; - if let syn::Item::Struct(item) = item { - item - } else { - unreachable!("Checked by pallet struct parser") - } - }; - - // If the first field type is `_` then we replace with `PhantomData` - if let Some(field) = pallet_item.fields.iter_mut().next() { - if field.ty == syn::parse_quote!(_) { - field.ty = syn::parse_quote!( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> - ); - } - } - - if get_doc_literals(&pallet_item.attrs).is_empty() { - pallet_item.attrs.push(syn::parse_quote!( - #[doc = r" + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); + let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); + let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); + let pallet_ident = &def.pallet_struct.pallet; + let config_where_clause = &def.config.where_clause; + + let mut storages_where_clauses = vec![&def.config.where_clause]; + storages_where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let storages_where_clauses = merge_where_clauses(&storages_where_clauses); + + let pallet_item = { + let pallet_module_items = &mut def.item.content.as_mut().expect("Checked by def").1; + let item = &mut pallet_module_items[def.pallet_struct.index]; + if let syn::Item::Struct(item) = item { + item + } else { + unreachable!("Checked by pallet struct parser") + } + }; + + // If the first field type is `_` then we replace with `PhantomData` + if let Some(field) = pallet_item.fields.iter_mut().next() { + if field.ty == syn::parse_quote!(_) { + field.ty = syn::parse_quote!( + #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> + ); + } + } + + if get_doc_literals(&pallet_item.attrs).is_empty() { + pallet_item.attrs.push(syn::parse_quote!( + #[doc = r" The `Pallet` struct, the main type that implements traits and standalone functions within the pallet. "] - )); - } - - pallet_item.attrs.push(syn::parse_quote!( - #[derive( - #frame_support::CloneNoBound, - #frame_support::EqNoBound, - #frame_support::PartialEqNoBound, - #frame_support::RuntimeDebugNoBound, - )] - )); - - let pallet_error_metadata = if let Some(error_def) = &def.error { - let error_ident = &error_def.error; - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { - ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() - }) - } - } - ) - } else { - quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { - #[doc(hidden)] - pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { - None - } - } - ) - }; - - let storage_info_span = - def.pallet_struct.without_storage_info.unwrap_or(def.pallet_struct.attr_span); - - let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); - let storage_cfg_attrs = - &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); - - // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use - // partial or full storage info from storage. - let storage_info_traits = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) - } else { - quote::quote_spanned!(storage_info_span => StorageInfoTrait) - } - }) - .collect::>(); - - let storage_info_methods = &def - .storages - .iter() - .map(|storage| { - if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { - quote::quote_spanned!(storage_info_span => partial_storage_info) - } else { - quote::quote_spanned!(storage_info_span => storage_info) - } - }) - .collect::>(); - - let storage_info = quote::quote_spanned!(storage_info_span => - impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait - for #pallet_ident<#type_use_gen> - #storages_where_clauses - { - fn storage_info() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> - { - #[allow(unused_mut)] - let mut res = #frame_support::__private::sp_std::vec![]; - - #( - #(#storage_cfg_attrs)* - { - let mut storage_info = < - #storage_names<#type_use_gen> - as #frame_support::traits::#storage_info_traits - >::#storage_info_methods(); - res.append(&mut storage_info); - } - )* - - res - } - } - ); - - let (storage_version, in_code_storage_version_ty) = - if let Some(v) = def.pallet_struct.storage_version.as_ref() { - (quote::quote! { #v }, quote::quote! { #frame_support::traits::StorageVersion }) - } else { - ( - quote::quote! { core::default::Default::default() }, - quote::quote! { #frame_support::traits::NoStorageVersionSet }, - ) - }; - - let whitelisted_storage_idents: Vec = def - .storages - .iter() - .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) - .collect(); - - let whitelisted_storage_keys_impl = quote::quote![ - use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; - impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { - fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { - use #frame_support::__private::sp_std::vec; - vec![#( - TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) - ),*] - } - } - ]; - - quote::quote_spanned!(def.pallet_struct.attr_span => - #pallet_error_metadata - - /// Type alias to `Pallet`, to be used by `construct_runtime`. - /// - /// Generated by `pallet` attribute macro. - #[deprecated(note = "use `Pallet` instead")] - #[allow(dead_code)] - pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; - - // Implement `GetStorageVersion` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::GetStorageVersion - for #pallet_ident<#type_use_gen> - #config_where_clause - { - type InCodeStorageVersion = #in_code_storage_version_ty; - - fn in_code_storage_version() -> Self::InCodeStorageVersion { - #storage_version - } - - fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { - #frame_support::traits::StorageVersion::get::() - } - } - - // Implement `OnGenesis` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::OnGenesis - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn on_genesis() { - let storage_version: #frame_support::traits::StorageVersion = #storage_version; - storage_version.put::(); - } - } - - // Implement `PalletInfoAccess` for `Pallet` - impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn index() -> usize { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::index::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn name_hash() -> [u8; 16] { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::name_hash::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn module_name() -> &'static str { - < - ::PalletInfo as #frame_support::traits::PalletInfo - >::module_name::() - .expect("Pallet is part of the runtime because pallet `Config` trait is \ - implemented by the runtime") - } - - fn crate_version() -> #frame_support::traits::CrateVersion { - #frame_support::crate_to_crate_version!() - } - } - - impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn count() -> usize { 1 } - fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { - use #frame_support::traits::PalletInfoAccess; - let item = #frame_support::traits::PalletInfoData { - index: Self::index(), - name: Self::name(), - module_name: Self::module_name(), - crate_version: Self::crate_version(), - }; - #frame_support::__private::sp_std::vec![item] - } - } - - #storage_info - #whitelisted_storage_keys_impl - ) + )); + } + + pallet_item.attrs.push(syn::parse_quote!( + #[derive( + #frame_support::CloneNoBound, + #frame_support::EqNoBound, + #frame_support::PartialEqNoBound, + #frame_support::RuntimeDebugNoBound, + )] + )); + + let pallet_error_metadata = if let Some(error_def) = &def.error { + let error_ident = &error_def.error; + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + Some(#frame_support::__private::metadata_ir::PalletErrorMetadataIR { + ty: #frame_support::__private::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) + } + } + ) + } else { + quote::quote_spanned!(def.pallet_struct.attr_span => + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + #[doc(hidden)] + pub fn error_metadata() -> Option<#frame_support::__private::metadata_ir::PalletErrorMetadataIR> { + None + } + } + ) + }; + + let storage_info_span = def + .pallet_struct + .without_storage_info + .unwrap_or(def.pallet_struct.attr_span); + + let storage_names = &def + .storages + .iter() + .map(|storage| &storage.ident) + .collect::>(); + let storage_cfg_attrs = &def + .storages + .iter() + .map(|storage| &storage.cfg_attrs) + .collect::>(); + + // Depending on the flag `without_storage_info` and the storage attribute `unbounded`, we use + // partial or full storage info from storage. + let storage_info_traits = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => PartialStorageInfoTrait) + } else { + quote::quote_spanned!(storage_info_span => StorageInfoTrait) + } + }) + .collect::>(); + + let storage_info_methods = &def + .storages + .iter() + .map(|storage| { + if storage.unbounded || def.pallet_struct.without_storage_info.is_some() { + quote::quote_spanned!(storage_info_span => partial_storage_info) + } else { + quote::quote_spanned!(storage_info_span => storage_info) + } + }) + .collect::>(); + + let storage_info = quote::quote_spanned!(storage_info_span => + impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait + for #pallet_ident<#type_use_gen> + #storages_where_clauses + { + fn storage_info() + -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> + { + #[allow(unused_mut)] + let mut res = #frame_support::__private::sp_std::vec![]; + + #( + #(#storage_cfg_attrs)* + { + let mut storage_info = < + #storage_names<#type_use_gen> + as #frame_support::traits::#storage_info_traits + >::#storage_info_methods(); + res.append(&mut storage_info); + } + )* + + res + } + } + ); + + let (storage_version, in_code_storage_version_ty) = + if let Some(v) = def.pallet_struct.storage_version.as_ref() { + ( + quote::quote! { #v }, + quote::quote! { #frame_support::traits::StorageVersion }, + ) + } else { + ( + quote::quote! { core::default::Default::default() }, + quote::quote! { #frame_support::traits::NoStorageVersionSet }, + ) + }; + + let whitelisted_storage_idents: Vec = def + .storages + .iter() + .filter_map(|s| s.whitelisted.then_some(s.ident.clone())) + .collect(); + + let whitelisted_storage_keys_impl = quote::quote![ + use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; + impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { + fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { + use #frame_support::__private::sp_std::vec; + vec![#( + TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) + ),*] + } + } + ]; + + quote::quote_spanned!(def.pallet_struct.attr_span => + #pallet_error_metadata + + /// Type alias to `Pallet`, to be used by `construct_runtime`. + /// + /// Generated by `pallet` attribute macro. + #[deprecated(note = "use `Pallet` instead")] + #[allow(dead_code)] + pub type Module<#type_decl_gen> = #pallet_ident<#type_use_gen>; + + // Implement `GetStorageVersion` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::GetStorageVersion + for #pallet_ident<#type_use_gen> + #config_where_clause + { + type InCodeStorageVersion = #in_code_storage_version_ty; + + fn in_code_storage_version() -> Self::InCodeStorageVersion { + #storage_version + } + + fn on_chain_storage_version() -> #frame_support::traits::StorageVersion { + #frame_support::traits::StorageVersion::get::() + } + } + + // Implement `OnGenesis` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::OnGenesis + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn on_genesis() { + let storage_version: #frame_support::traits::StorageVersion = #storage_version; + storage_version.put::(); + } + } + + // Implement `PalletInfoAccess` for `Pallet` + impl<#type_impl_gen> #frame_support::traits::PalletInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn index() -> usize { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::index::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn name_hash() -> [u8; 16] { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::name_hash::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn module_name() -> &'static str { + < + ::PalletInfo as #frame_support::traits::PalletInfo + >::module_name::() + .expect("Pallet is part of the runtime because pallet `Config` trait is \ + implemented by the runtime") + } + + fn crate_version() -> #frame_support::traits::CrateVersion { + #frame_support::crate_to_crate_version!() + } + } + + impl<#type_impl_gen> #frame_support::traits::PalletsInfoAccess + for #pallet_ident<#type_use_gen> + #config_where_clause + { + fn count() -> usize { 1 } + fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { + use #frame_support::traits::PalletInfoAccess; + let item = #frame_support::traits::PalletInfoData { + index: Self::index(), + name: Self::name(), + module_name: Self::module_name(), + crate_version: Self::crate_version(), + }; + #frame_support::__private::sp_std::vec![item] + } + } + + #storage_info + #whitelisted_storage_keys_impl + ) } diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs index 937b068cf..b77e9846b 100644 --- a/support/procedural-fork/src/pallet/expand/storage.rs +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -16,14 +16,14 @@ // limitations under the License. use crate::{ - counter_prefix, - pallet::{ - parse::{ - helper::two128_str, - storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, - }, - Def, - }, + counter_prefix, + pallet::{ + parse::{ + helper::two128_str, + storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, + }, + Def, + }, }; use quote::ToTokens; use std::{collections::HashMap, ops::IndexMut}; @@ -32,73 +32,76 @@ use syn::spanned::Spanned; /// Generate the prefix_ident related to the storage. /// prefix_ident is used for the prefix struct to be given to storage as first generic param. fn prefix_ident(storage: &StorageDef) -> syn::Ident { - let storage_ident = &storage.ident; - syn::Ident::new(&format!("_GeneratedPrefixForStorage{}", storage_ident), storage_ident.span()) + let storage_ident = &storage.ident; + syn::Ident::new( + &format!("_GeneratedPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) } /// Generate the counter_prefix_ident related to the storage. /// counter_prefix_ident is used for the prefix struct to be given to counted storage map. fn counter_prefix_ident(storage_ident: &syn::Ident) -> syn::Ident { - syn::Ident::new( - &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), - storage_ident.span(), - ) + syn::Ident::new( + &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) } /// Check for duplicated storage prefixes. This step is necessary since users can specify an /// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure /// that the prefix specified by the user is not a duplicate of an existing one. fn check_prefix_duplicates( - storage_def: &StorageDef, - // A hashmap of all already used prefix and their associated error if duplication - used_prefixes: &mut HashMap, + storage_def: &StorageDef, + // A hashmap of all already used prefix and their associated error if duplication + used_prefixes: &mut HashMap, ) -> syn::Result<()> { - let prefix = storage_def.prefix(); - let dup_err = syn::Error::new( - storage_def.prefix_span(), - format!("Duplicate storage prefixes found for `{}`", prefix), - ); - - if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { - let mut err = dup_err; - err.combine(other_dup_err); - return Err(err) - } - - if let Metadata::CountedMap { .. } = storage_def.metadata { - let counter_prefix = counter_prefix(&prefix); - let counter_dup_err = syn::Error::new( - storage_def.prefix_span(), - format!( - "Duplicate storage prefixes found for `{}`, used for counter associated to \ + let prefix = storage_def.prefix(); + let dup_err = syn::Error::new( + storage_def.prefix_span(), + format!("Duplicate storage prefixes found for `{}`", prefix), + ); + + if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { + let mut err = dup_err; + err.combine(other_dup_err); + return Err(err); + } + + if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix = counter_prefix(&prefix); + let counter_dup_err = syn::Error::new( + storage_def.prefix_span(), + format!( + "Duplicate storage prefixes found for `{}`, used for counter associated to \ counted storage map", - counter_prefix, - ), - ); - - if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { - let mut err = counter_dup_err; - err.combine(other_dup_err); - return Err(err) - } - } - - Ok(()) + counter_prefix, + ), + ); + + if let Some(other_dup_err) = used_prefixes.insert(counter_prefix, counter_dup_err.clone()) { + let mut err = counter_dup_err; + err.combine(other_dup_err); + return Err(err); + } + } + + Ok(()) } pub struct ResultOnEmptyStructMetadata { - /// The Rust ident that is going to be used as the name of the OnEmpty struct. - pub name: syn::Ident, - /// The path to the error type being returned by the ResultQuery. - pub error_path: syn::Path, - /// The visibility of the OnEmpty struct. - pub visibility: syn::Visibility, - /// The type of the storage item. - pub value_ty: syn::Type, - /// The name of the pallet error enum variant that is going to be returned. - pub variant_name: syn::Ident, - /// The span used to report compilation errors about the OnEmpty struct. - pub span: proc_macro2::Span, + /// The Rust ident that is going to be used as the name of the OnEmpty struct. + pub name: syn::Ident, + /// The path to the error type being returned by the ResultQuery. + pub error_path: syn::Path, + /// The visibility of the OnEmpty struct. + pub visibility: syn::Visibility, + /// The type of the storage item. + pub value_ty: syn::Type, + /// The name of the pallet error enum variant that is going to be returned. + pub variant_name: syn::Ident, + /// The span used to report compilation errors about the OnEmpty struct. + pub span: proc_macro2::Span, } /// @@ -106,277 +109,305 @@ pub struct ResultOnEmptyStructMetadata { /// * if generics are named: reorder the generic, remove their name, and add the missing ones. /// * Add `#[allow(type_alias_bounds)]` pub fn process_generics(def: &mut Def) -> syn::Result> { - let frame_support = &def.frame_support; - let mut on_empty_struct_metadata = Vec::new(); - - for storage_def in def.storages.iter_mut() { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; - - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - - typ_item.attrs.push(syn::parse_quote!(#[allow(type_alias_bounds)])); - - let typ_path = match &mut *typ_item.ty { - syn::Type::Path(p) => p, - _ => unreachable!("Checked by def"), - }; - - let args = match &mut typ_path.path.segments[0].arguments { - syn::PathArguments::AngleBracketed(args) => args, - _ => unreachable!("Checked by def"), - }; - - let prefix_ident = prefix_ident(storage_def); - let type_use_gen = if def.config.has_instance { - quote::quote_spanned!(storage_def.attr_span => T, I) - } else { - quote::quote_spanned!(storage_def.attr_span => T) - }; - - let default_query_kind: syn::Type = - syn::parse_quote!(#frame_support::storage::types::OptionQuery); - let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { - if let Some(QueryKind::ResultQuery(error_path, variant_name)) = - storage_def.query_kind.as_ref() - { - let on_empty_ident = - quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); - on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { - name: on_empty_ident.clone(), - visibility: storage_def.vis.clone(), - value_ty, - error_path: error_path.clone(), - variant_name: variant_name.clone(), - span: storage_def.attr_span, - }); - return syn::parse_quote!(#on_empty_ident) - } - syn::parse_quote!(#frame_support::traits::GetDefault) - }; - let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); - - let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { - if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { - if let syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) = - query_type - { - if let Some(seg) = segments.last_mut() { - if let syn::PathArguments::AngleBracketed( - syn::AngleBracketedGenericArguments { args, .. }, - ) = &mut seg.arguments - { - args.clear(); - args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); - } - } - } else { - let msg = format!( - "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ + let frame_support = &def.frame_support; + let mut on_empty_struct_metadata = Vec::new(); + + for storage_def in def.storages.iter_mut() { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; + + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + + typ_item + .attrs + .push(syn::parse_quote!(#[allow(type_alias_bounds)])); + + let typ_path = match &mut *typ_item.ty { + syn::Type::Path(p) => p, + _ => unreachable!("Checked by def"), + }; + + let args = match &mut typ_path.path.segments[0].arguments { + syn::PathArguments::AngleBracketed(args) => args, + _ => unreachable!("Checked by def"), + }; + + let prefix_ident = prefix_ident(storage_def); + let type_use_gen = if def.config.has_instance { + quote::quote_spanned!(storage_def.attr_span => T, I) + } else { + quote::quote_spanned!(storage_def.attr_span => T) + }; + + let default_query_kind: syn::Type = + syn::parse_quote!(#frame_support::storage::types::OptionQuery); + let mut default_on_empty = |value_ty: syn::Type| -> syn::Type { + if let Some(QueryKind::ResultQuery(error_path, variant_name)) = + storage_def.query_kind.as_ref() + { + let on_empty_ident = + quote::format_ident!("__Frame_Internal_Get{}Result", storage_def.ident); + on_empty_struct_metadata.push(ResultOnEmptyStructMetadata { + name: on_empty_ident.clone(), + visibility: storage_def.vis.clone(), + value_ty, + error_path: error_path.clone(), + variant_name: variant_name.clone(), + span: storage_def.attr_span, + }); + return syn::parse_quote!(#on_empty_ident); + } + syn::parse_quote!(#frame_support::traits::GetDefault) + }; + let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); + + let set_result_query_type_parameter = |query_type: &mut syn::Type| -> syn::Result<()> { + if let Some(QueryKind::ResultQuery(error_path, _)) = storage_def.query_kind.as_ref() { + if let syn::Type::Path(syn::TypePath { + path: syn::Path { segments, .. }, + .. + }) = query_type + { + if let Some(seg) = segments.last_mut() { + if let syn::PathArguments::AngleBracketed( + syn::AngleBracketedGenericArguments { args, .. }, + ) = &mut seg.arguments + { + args.clear(); + args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))); + } + } + } else { + let msg = format!( + "Invalid pallet::storage, unexpected type for query, expected ResultQuery \ with 1 type parameter, found `{}`", - query_type.to_token_stream().to_string() - ); - return Err(syn::Error::new(query_type.span(), msg)) - } - } - Ok(()) - }; - - if let Some(named_generics) = storage_def.named_generics.clone() { - args.args.clear(); - args.args.push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); - match named_generics { - StorageGenerics::Value { value, query_kind, on_empty } => { - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - }, - StorageGenerics::Map { hasher, key, value, query_kind, on_empty, max_values } | - StorageGenerics::CountedMap { - hasher, - key, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher)); - args.args.push(syn::GenericArgument::Type(key)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - StorageGenerics::DoubleMap { - hasher1, - key1, - hasher2, - key2, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(hasher1)); - args.args.push(syn::GenericArgument::Type(key1)); - args.args.push(syn::GenericArgument::Type(hasher2)); - args.args.push(syn::GenericArgument::Type(key2)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values } | - StorageGenerics::CountedNMap { - keygen, - value, - query_kind, - on_empty, - max_values, - } => { - args.args.push(syn::GenericArgument::Type(keygen)); - args.args.push(syn::GenericArgument::Type(value.clone())); - let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); - set_result_query_type_parameter(&mut query_kind)?; - args.args.push(syn::GenericArgument::Type(query_kind)); - let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); - args.args.push(syn::GenericArgument::Type(on_empty)); - let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); - args.args.push(syn::GenericArgument::Type(max_values)); - }, - } - } else { - args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); - - let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { - Metadata::Value { .. } => (1, 2, 3), - Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), - Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), - Metadata::DoubleMap { .. } => (5, 6, 7), - }; - - if storage_def.use_default_hasher { - let hasher_indices: Vec = match storage_def.metadata { - Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], - Metadata::DoubleMap { .. } => vec![1, 3], - _ => vec![], - }; - for hasher_idx in hasher_indices { - args.args[hasher_idx] = syn::GenericArgument::Type( - syn::parse_quote!(#frame_support::Blake2_128Concat), - ); - } - } - - if query_idx < args.args.len() { - if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { - set_result_query_type_parameter(query_kind)?; - } - } else if let Some(QueryKind::ResultQuery(error_path, _)) = - storage_def.query_kind.as_ref() - { - args.args.push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) - } - - // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to - // generate a default OnEmpty struct for it. - if on_empty_idx >= args.args.len() && - matches!(storage_def.query_kind.as_ref(), Some(QueryKind::ResultQuery(_, _))) - { - let value_ty = match args.args[value_idx].clone() { - syn::GenericArgument::Type(ty) => ty, - _ => unreachable!(), - }; - let on_empty = default_on_empty(value_ty); - args.args.push(syn::GenericArgument::Type(on_empty)); - } - } - } - - Ok(on_empty_struct_metadata) + query_type.to_token_stream().to_string() + ); + return Err(syn::Error::new(query_type.span(), msg)); + } + } + Ok(()) + }; + + if let Some(named_generics) = storage_def.named_generics.clone() { + args.args.clear(); + args.args + .push(syn::parse_quote!( #prefix_ident<#type_use_gen> )); + match named_generics { + StorageGenerics::Value { + value, + query_kind, + on_empty, + } => { + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + StorageGenerics::Map { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } + | StorageGenerics::CountedMap { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher)); + args.args.push(syn::GenericArgument::Type(key)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + StorageGenerics::DoubleMap { + hasher1, + key1, + hasher2, + key2, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher1)); + args.args.push(syn::GenericArgument::Type(key1)); + args.args.push(syn::GenericArgument::Type(hasher2)); + args.args.push(syn::GenericArgument::Type(key2)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + StorageGenerics::NMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } + | StorageGenerics::CountedNMap { + keygen, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(keygen)); + args.args.push(syn::GenericArgument::Type(value.clone())); + let mut query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + set_result_query_type_parameter(&mut query_kind)?; + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty(value)); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + } + } + } else { + args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); + + let (value_idx, query_idx, on_empty_idx) = match storage_def.metadata { + Metadata::Value { .. } => (1, 2, 3), + Metadata::NMap { .. } | Metadata::CountedNMap { .. } => (2, 3, 4), + Metadata::Map { .. } | Metadata::CountedMap { .. } => (3, 4, 5), + Metadata::DoubleMap { .. } => (5, 6, 7), + }; + + if storage_def.use_default_hasher { + let hasher_indices: Vec = match storage_def.metadata { + Metadata::Map { .. } | Metadata::CountedMap { .. } => vec![1], + Metadata::DoubleMap { .. } => vec![1, 3], + _ => vec![], + }; + for hasher_idx in hasher_indices { + args.args[hasher_idx] = syn::GenericArgument::Type( + syn::parse_quote!(#frame_support::Blake2_128Concat), + ); + } + } + + if query_idx < args.args.len() { + if let syn::GenericArgument::Type(query_kind) = args.args.index_mut(query_idx) { + set_result_query_type_parameter(query_kind)?; + } + } else if let Some(QueryKind::ResultQuery(error_path, _)) = + storage_def.query_kind.as_ref() + { + args.args + .push(syn::GenericArgument::Type(syn::parse_quote!(#error_path))) + } + + // Here, we only need to check if OnEmpty is *not* specified, and if so, then we have to + // generate a default OnEmpty struct for it. + if on_empty_idx >= args.args.len() + && matches!( + storage_def.query_kind.as_ref(), + Some(QueryKind::ResultQuery(_, _)) + ) + { + let value_ty = match args.args[value_idx].clone() { + syn::GenericArgument::Type(ty) => ty, + _ => unreachable!(), + }; + let on_empty = default_on_empty(value_ty); + args.args.push(syn::GenericArgument::Type(on_empty)); + } + } + } + + Ok(on_empty_struct_metadata) } fn augment_final_docs(def: &mut Def) { - // expand the docs with a new line showing the storage type (value, map, double map, etc), and - // the key/value type(s). - let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; - let typ_item = match item { - syn::Item::Type(t) => t, - _ => unreachable!("Checked by def"), - }; - typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); - typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); - }; - def.storages.iter_mut().for_each(|storage| match &storage.metadata { - Metadata::Value { value } => { - let doc_line = format!( - "Storage type is [`StorageValue`] with value type `{}`.", - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::Map { key, value } => { - let doc_line = format!( - "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::DoubleMap { key1, key2, value } => { - let doc_line = format!( + // expand the docs with a new line showing the storage type (value, map, double map, etc), and + // the key/value type(s). + let mut push_string_literal = |doc_line: &str, storage: &mut StorageDef| { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage.index]; + let typ_item = match item { + syn::Item::Type(t) => t, + _ => unreachable!("Checked by def"), + }; + typ_item.attrs.push(syn::parse_quote!(#[doc = ""])); + typ_item.attrs.push(syn::parse_quote!(#[doc = #doc_line])); + }; + def.storages + .iter_mut() + .for_each(|storage| match &storage.metadata { + Metadata::Value { value } => { + let doc_line = format!( + "Storage type is [`StorageValue`] with value type `{}`.", + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::Map { key, value } => { + let doc_line = format!( + "Storage type is [`StorageMap`] with key type `{}` and value type `{}`.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::DoubleMap { key1, key2, value } => { + let doc_line = format!( "Storage type is [`StorageDoubleMap`] with key1 type {}, key2 type {} and value type {}.", key1.to_token_stream(), key2.to_token_stream(), value.to_token_stream() ); - push_string_literal(&doc_line, storage); - }, - Metadata::NMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::CountedNMap { keys, value, .. } => { - let doc_line = format!( - "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", - keys.iter() - .map(|k| k.to_token_stream().to_string()) - .collect::>() - .join(", "), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - Metadata::CountedMap { key, value } => { - let doc_line = format!( - "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", - key.to_token_stream(), - value.to_token_stream() - ); - push_string_literal(&doc_line, storage); - }, - }); + push_string_literal(&doc_line, storage); + } + Metadata::NMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`StorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::CountedNMap { keys, value, .. } => { + let doc_line = format!( + "Storage type is [`CountedStorageNMap`] with keys type ({}) and value type {}.", + keys.iter() + .map(|k| k.to_token_stream().to_string()) + .collect::>() + .join(", "), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + Metadata::CountedMap { key, value } => { + let doc_line = format!( + "Storage type is [`CountedStorageMap`] with key type {} and value type {}.", + key.to_token_stream(), + value.to_token_stream() + ); + push_string_literal(&doc_line, storage); + } + }); } /// @@ -387,29 +418,29 @@ fn augment_final_docs(def: &mut Def) { /// * Add `#[allow(type_alias_bounds)]` on storages type alias /// * generate metadatas pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { - let on_empty_struct_metadata = match process_generics(def) { - Ok(idents) => idents, - Err(e) => return e.into_compile_error(), - }; - - augment_final_docs(def); - - // Check for duplicate prefixes - let mut prefix_set = HashMap::new(); - let mut errors = def - .storages - .iter() - .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); - if let Some(mut final_error) = errors.next() { - errors.for_each(|error| final_error.combine(error)); - return final_error.into_compile_error() - } - - let frame_support = &def.frame_support; - let frame_system = &def.frame_system; - let pallet_ident = &def.pallet_struct.pallet; - - let entries_builder = def.storages.iter().map(|storage| { + let on_empty_struct_metadata = match process_generics(def) { + Ok(idents) => idents, + Err(e) => return e.into_compile_error(), + }; + + augment_final_docs(def); + + // Check for duplicate prefixes + let mut prefix_set = HashMap::new(); + let mut errors = def + .storages + .iter() + .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); + if let Some(mut final_error) = errors.next() { + errors.for_each(|error| final_error.combine(error)); + return final_error.into_compile_error(); + } + + let frame_support = &def.frame_support; + let frame_system = &def.frame_system; + let pallet_ident = &def.pallet_struct.pallet; + + let entries_builder = def.storages.iter().map(|storage| { let no_docs = vec![]; let docs = if cfg!(feature = "no-metadata-docs") { &no_docs } else { &storage.docs }; @@ -432,202 +463,202 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let getters = def.storages.iter().map(|storage| { - if let Some(getter) = &storage.getter { - let completed_where_clause = - super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); - - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - let type_impl_gen = &def.type_impl_generics(storage.attr_span); - let type_use_gen = &def.type_use_generics(storage.attr_span); - let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); - - let cfg_attrs = &storage.cfg_attrs; - - // If the storage item is public, link it and otherwise just mention it. - // - // We can not just copy the docs from a non-public type as it may links to internal - // types which makes the compiler very unhappy :( - let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { - format!("An auto-generated getter for [`{}`].", storage.ident) - } else { - format!("An auto-generated getter for `{}`.", storage.ident) - }; - - match &storage.metadata { - Metadata::Value { value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter() -> #query { - < - #full_ident as #frame_support::storage::StorageValue<#value> - >::get() - } - } - ) - }, - Metadata::Map { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - < - #full_ident as #frame_support::storage::StorageMap<#key, #value> - >::get(k) - } - } - ) - }, - Metadata::CountedMap { key, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k: KArg) -> #query where - KArg: #frame_support::__private::codec::EncodeLike<#key>, - { - // NOTE: we can't use any trait here because CountedStorageMap - // doesn't implement any. - <#full_ident>::get(k) - } - } - ) - }, - Metadata::DoubleMap { key1, key2, value } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(k1: KArg1, k2: KArg2) -> #query where - KArg1: #frame_support::__private::codec::EncodeLike<#key1>, - KArg2: #frame_support::__private::codec::EncodeLike<#key2>, - { - < - #full_ident as - #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> - >::get(k1, k2) - } - } - ) - }, - Metadata::NMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - < - #full_ident as - #frame_support::storage::StorageNMap<#keygen, #value> - >::get(key) - } - } - ) - }, - Metadata::CountedNMap { keygen, value, .. } => { - let query = match storage.query_kind.as_ref().expect("Checked by def") { - QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => - Option<#value> - ), - QueryKind::ResultQuery(error_path, _) => { - quote::quote_spanned!(storage.attr_span => - Result<#value, #error_path> - ) - }, - QueryKind::ValueQuery => quote::quote!(#value), - }; - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* - impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { - #[doc = #getter_doc_line] - pub fn #getter(key: KArg) -> #query - where - KArg: #frame_support::storage::types::EncodeLikeTuple< - <#keygen as #frame_support::storage::types::KeyGenerator>::KArg - > - + #frame_support::storage::types::TupleToEncodedIter, - { - // NOTE: we can't use any trait here because CountedStorageNMap - // doesn't implement any. - <#full_ident>::get(key) - } - } - ) - }, - } - } else { - Default::default() - } - }); - - let prefix_structs = def.storages.iter().map(|storage_def| { + let getters = def.storages.iter().map(|storage| { + if let Some(getter) = &storage.getter { + let completed_where_clause = + super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); + + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let type_impl_gen = &def.type_impl_generics(storage.attr_span); + let type_use_gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + + let cfg_attrs = &storage.cfg_attrs; + + // If the storage item is public, link it and otherwise just mention it. + // + // We can not just copy the docs from a non-public type as it may links to internal + // types which makes the compiler very unhappy :( + let getter_doc_line = if matches!(storage.vis, syn::Visibility::Public(_)) { + format!("An auto-generated getter for [`{}`].", storage.ident) + } else { + format!("An auto-generated getter for `{}`.", storage.ident) + }; + + match &storage.metadata { + Metadata::Value { value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter() -> #query { + < + #full_ident as #frame_support::storage::StorageValue<#value> + >::get() + } + } + ) + } + Metadata::Map { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + < + #full_ident as #frame_support::storage::StorageMap<#key, #value> + >::get(k) + } + } + ) + } + Metadata::CountedMap { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::__private::codec::EncodeLike<#key>, + { + // NOTE: we can't use any trait here because CountedStorageMap + // doesn't implement any. + <#full_ident>::get(k) + } + } + ) + } + Metadata::DoubleMap { key1, key2, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(k1: KArg1, k2: KArg2) -> #query where + KArg1: #frame_support::__private::codec::EncodeLike<#key1>, + KArg2: #frame_support::__private::codec::EncodeLike<#key2>, + { + < + #full_ident as + #frame_support::storage::StorageDoubleMap<#key1, #key2, #value> + >::get(k1, k2) + } + } + ) + } + Metadata::NMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + < + #full_ident as + #frame_support::storage::StorageNMap<#keygen, #value> + >::get(key) + } + } + ) + } + Metadata::CountedNMap { keygen, value, .. } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ResultQuery(error_path, _) => { + quote::quote_spanned!(storage.attr_span => + Result<#value, #error_path> + ) + } + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #[doc = #getter_doc_line] + pub fn #getter(key: KArg) -> #query + where + KArg: #frame_support::storage::types::EncodeLikeTuple< + <#keygen as #frame_support::storage::types::KeyGenerator>::KArg + > + + #frame_support::storage::types::TupleToEncodedIter, + { + // NOTE: we can't use any trait here because CountedStorageNMap + // doesn't implement any. + <#full_ident>::get(key) + } + } + ) + } + } + } else { + Default::default() + } + }); + + let prefix_structs = def.storages.iter().map(|storage_def| { let type_impl_gen = &def.type_impl_generics(storage_def.attr_span); let type_use_gen = &def.type_use_generics(storage_def.attr_span); let prefix_struct_ident = prefix_ident(storage_def); @@ -767,153 +798,159 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { ) }); - let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { - use crate::pallet::parse::GenericKind; - use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; - - let ResultOnEmptyStructMetadata { - name, - visibility, - value_ty, - error_path, - variant_name, - span, - } = metadata; - - let generic_kind = match error_path.segments.last() { - Some(PathSegment { arguments: PathArguments::AngleBracketed(args), .. }) => { - let (has_config, has_instance) = - args.args.iter().fold((false, false), |(has_config, has_instance), arg| { - match arg { - GenericArgument::Type(Type::Path(TypePath { - path: Path { segments, .. }, - .. - })) => { - let maybe_config = - segments.first().map_or(false, |seg| seg.ident == "T"); - let maybe_instance = - segments.first().map_or(false, |seg| seg.ident == "I"); - - (has_config || maybe_config, has_instance || maybe_instance) - }, - _ => (has_config, has_instance), - } - }); - GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) - }, - _ => GenericKind::None, - }; - let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); - let config_where_clause = &def.config.where_clause; - - quote::quote_spanned!(span => - #[doc(hidden)] - #[allow(non_camel_case_types)] - #visibility struct #name; - - impl<#type_impl_gen> #frame_support::traits::Get> - for #name - #config_where_clause - { - fn get() -> Result<#value_ty, #error_path> { - Err(<#error_path>::#variant_name) - } - } - ) - }); - - // aggregated where clause of all storage types and the whole pallet. - let mut where_clauses = vec![&def.config.where_clause]; - where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); - let completed_where_clause = super::merge_where_clauses(&where_clauses); - let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); - - let try_decode_entire_state = { - let mut storage_names = def - .storages - .iter() - .filter_map(|storage| { - // A little hacky; don't generate for cfg gated storages to not get compile errors - // when building "frame-feature-testing" gated storages in the "frame-support-test" - // crate. - if storage.try_decode && storage.cfg_attrs.is_empty() { - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) - } else { - None - } - }) - .collect::>(); - storage_names.sort_by_cached_key(|ident| ident.to_string()); - - quote::quote!( - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage - for #pallet_ident<#type_use_gen> #completed_where_clause - { - fn try_decode_entire_state() -> Result> { - let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> - ::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed"); - - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); - - // NOTE: for now, we have to exclude storage items that are feature gated. - let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); - let mut decoded = 0usize; - - #( - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ - {pallet_name}::{}", stringify!(#storage_names)); - - match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { - Ok(count) => { - decoded += count; - }, - Err(err) => { - errors.extend(err); - }, - } - )* - - if errors.is_empty() { - Ok(decoded) - } else { - Err(errors) - } - } - } - ) - }; - - quote::quote!( - impl<#type_impl_gen> #pallet_ident<#type_use_gen> - #completed_where_clause - { - #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - #frame_support::__private::metadata_ir::PalletStorageMetadataIR { - prefix: < - ::PalletInfo as - #frame_support::traits::PalletInfo - >::name::<#pallet_ident<#type_use_gen>>() - .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), - entries: { - #[allow(unused_mut)] - let mut entries = #frame_support::__private::sp_std::vec![]; - #( #entries_builder )* - entries - }, - } - } - } - - #( #getters )* - #( #prefix_structs )* - #( #on_empty_structs )* - - #try_decode_entire_state - ) + let on_empty_structs = on_empty_struct_metadata.into_iter().map(|metadata| { + use crate::pallet::parse::GenericKind; + use syn::{GenericArgument, Path, PathArguments, PathSegment, Type, TypePath}; + + let ResultOnEmptyStructMetadata { + name, + visibility, + value_ty, + error_path, + variant_name, + span, + } = metadata; + + let generic_kind = match error_path.segments.last() { + Some(PathSegment { + arguments: PathArguments::AngleBracketed(args), + .. + }) => { + let (has_config, has_instance) = + args.args + .iter() + .fold( + (false, false), + |(has_config, has_instance), arg| match arg { + GenericArgument::Type(Type::Path(TypePath { + path: Path { segments, .. }, + .. + })) => { + let maybe_config = + segments.first().map_or(false, |seg| seg.ident == "T"); + let maybe_instance = + segments.first().map_or(false, |seg| seg.ident == "I"); + + (has_config || maybe_config, has_instance || maybe_instance) + } + _ => (has_config, has_instance), + }, + ); + GenericKind::from_gens(has_config, has_instance).unwrap_or(GenericKind::None) + } + _ => GenericKind::None, + }; + let type_impl_gen = generic_kind.type_impl_gen(proc_macro2::Span::call_site()); + let config_where_clause = &def.config.where_clause; + + quote::quote_spanned!(span => + #[doc(hidden)] + #[allow(non_camel_case_types)] + #visibility struct #name; + + impl<#type_impl_gen> #frame_support::traits::Get> + for #name + #config_where_clause + { + fn get() -> Result<#value_ty, #error_path> { + Err(<#error_path>::#variant_name) + } + } + ) + }); + + // aggregated where clause of all storage types and the whole pallet. + let mut where_clauses = vec![&def.config.where_clause]; + where_clauses.extend(def.storages.iter().map(|storage| &storage.where_clause)); + let completed_where_clause = super::merge_where_clauses(&where_clauses); + let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); + let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); + + let try_decode_entire_state = { + let mut storage_names = def + .storages + .iter() + .filter_map(|storage| { + // A little hacky; don't generate for cfg gated storages to not get compile errors + // when building "frame-feature-testing" gated storages in the "frame-support-test" + // crate. + if storage.try_decode && storage.cfg_attrs.is_empty() { + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + Some(quote::quote_spanned!(storage.attr_span => #ident<#gen> )) + } else { + None + } + }) + .collect::>(); + storage_names.sort_by_cached_key(|ident| ident.to_string()); + + quote::quote!( + #[cfg(feature = "try-runtime")] + impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage + for #pallet_ident<#type_use_gen> #completed_where_clause + { + fn try_decode_entire_state() -> Result> { + let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> + ::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"); + + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); + + // NOTE: for now, we have to exclude storage items that are feature gated. + let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); + let mut decoded = 0usize; + + #( + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ + {pallet_name}::{}", stringify!(#storage_names)); + + match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { + Ok(count) => { + decoded += count; + }, + Err(err) => { + errors.extend(err); + }, + } + )* + + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) + } + } + } + ) + }; + + quote::quote!( + impl<#type_impl_gen> #pallet_ident<#type_use_gen> + #completed_where_clause + { + #[doc(hidden)] + pub fn storage_metadata() -> #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + #frame_support::__private::metadata_ir::PalletStorageMetadataIR { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), + entries: { + #[allow(unused_mut)] + let mut entries = #frame_support::__private::sp_std::vec![]; + #( #entries_builder )* + entries + }, + } + } + } + + #( #getters )* + #( #prefix_structs )* + #( #on_empty_structs )* + + #try_decode_entire_state + ) } diff --git a/support/procedural-fork/src/pallet/expand/tasks.rs b/support/procedural-fork/src/pallet/expand/tasks.rs index 6697e5c82..8c4dfb54f 100644 --- a/support/procedural-fork/src/pallet/expand/tasks.rs +++ b/support/procedural-fork/src/pallet/expand/tasks.rs @@ -27,141 +27,145 @@ use quote::{format_ident, quote, ToTokens}; use syn::{parse_quote, spanned::Spanned, ItemEnum, ItemImpl}; impl TaskEnumDef { - /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the - /// event they _don't_ specify one (which is actually the most common behavior) we have to - /// generate one based on the existing [`TasksDef`]. This method performs that generation. - pub fn generate( - tasks: &TasksDef, - type_decl_bounded_generics: TokenStream2, - type_use_generics: TokenStream2, - ) -> Self { - let variants = if tasks.tasks_attr.is_some() { - tasks - .tasks - .iter() - .map(|task| { - let ident = &task.item.sig.ident; - let ident = - format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); + /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the + /// event they _don't_ specify one (which is actually the most common behavior) we have to + /// generate one based on the existing [`TasksDef`]. This method performs that generation. + pub fn generate( + tasks: &TasksDef, + type_decl_bounded_generics: TokenStream2, + type_use_generics: TokenStream2, + ) -> Self { + let variants = if tasks.tasks_attr.is_some() { + tasks + .tasks + .iter() + .map(|task| { + let ident = &task.item.sig.ident; + let ident = + format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); - let args = task.item.sig.inputs.iter().collect::>(); + let args = task.item.sig.inputs.iter().collect::>(); - if args.is_empty() { - quote!(#ident) - } else { - quote!(#ident { - #(#args),* - }) - } - }) - .collect::>() - } else { - Vec::new() - }; - let mut task_enum_def: TaskEnumDef = parse_quote! { - /// Auto-generated enum that encapsulates all tasks defined by this pallet. - /// - /// Conceptually similar to the [`Call`] enum, but for tasks. This is only - /// generated if there are tasks present in this pallet. - #[pallet::task_enum] - pub enum Task<#type_decl_bounded_generics> { - #( - #variants, - )* - } - }; - task_enum_def.type_use_generics = type_use_generics; - task_enum_def - } + if args.is_empty() { + quote!(#ident) + } else { + quote!(#ident { + #(#args),* + }) + } + }) + .collect::>() + } else { + Vec::new() + }; + let mut task_enum_def: TaskEnumDef = parse_quote! { + /// Auto-generated enum that encapsulates all tasks defined by this pallet. + /// + /// Conceptually similar to the [`Call`] enum, but for tasks. This is only + /// generated if there are tasks present in this pallet. + #[pallet::task_enum] + pub enum Task<#type_decl_bounded_generics> { + #( + #variants, + )* + } + }; + task_enum_def.type_use_generics = type_use_generics; + task_enum_def + } } impl ToTokens for TaskEnumDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let item_enum = &self.item_enum; - let ident = &item_enum.ident; - let vis = &item_enum.vis; - let attrs = &item_enum.attrs; - let generics = &item_enum.generics; - let variants = &item_enum.variants; - let scrate = &self.scrate; - let type_use_generics = &self.type_use_generics; - if self.attr.is_some() { - // `item_enum` is short-hand / generated enum - tokens.extend(quote! { - #(#attrs)* - #[derive( - #scrate::CloneNoBound, - #scrate::EqNoBound, - #scrate::PartialEqNoBound, - #scrate::pallet_prelude::Encode, - #scrate::pallet_prelude::Decode, - #scrate::pallet_prelude::TypeInfo, - )] - #[codec(encode_bound())] - #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_generics))] - #vis enum #ident #generics { - #variants - #[doc(hidden)] - #[codec(skip)] - __Ignore(core::marker::PhantomData, #scrate::Never), - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let item_enum = &self.item_enum; + let ident = &item_enum.ident; + let vis = &item_enum.vis; + let attrs = &item_enum.attrs; + let generics = &item_enum.generics; + let variants = &item_enum.variants; + let scrate = &self.scrate; + let type_use_generics = &self.type_use_generics; + if self.attr.is_some() { + // `item_enum` is short-hand / generated enum + tokens.extend(quote! { + #(#attrs)* + #[derive( + #scrate::CloneNoBound, + #scrate::EqNoBound, + #scrate::PartialEqNoBound, + #scrate::pallet_prelude::Encode, + #scrate::pallet_prelude::Decode, + #scrate::pallet_prelude::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_generics))] + #vis enum #ident #generics { + #variants + #[doc(hidden)] + #[codec(skip)] + __Ignore(core::marker::PhantomData, #scrate::Never), + } - impl core::fmt::Debug for #ident<#type_use_generics> { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct(stringify!(#ident)).field("value", self).finish() - } - } - }); - } else { - // `item_enum` is a manually specified enum (no attribute) - tokens.extend(item_enum.to_token_stream()); - } - } + impl core::fmt::Debug for #ident<#type_use_generics> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct(stringify!(#ident)).field("value", self).finish() + } + } + }); + } else { + // `item_enum` is a manually specified enum (no attribute) + tokens.extend(item_enum.to_token_stream()); + } + } } /// Represents an already-expanded [`TasksDef`]. #[derive(Parse)] pub struct ExpandedTasksDef { - pub task_item_impl: ItemImpl, - pub task_trait_impl: ItemImpl, + pub task_item_impl: ItemImpl, + pub task_trait_impl: ItemImpl, } impl ToTokens for TasksDef { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let scrate = &self.scrate; - let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); - let enum_arguments = &self.enum_arguments; - let enum_use = quote!(#enum_ident #enum_arguments); + fn to_tokens(&self, tokens: &mut TokenStream2) { + let scrate = &self.scrate; + let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); + let enum_arguments = &self.enum_arguments; + let enum_use = quote!(#enum_ident #enum_arguments); - let task_fn_idents = self - .tasks - .iter() - .map(|task| { - format_ident!( - "{}", - &task.item.sig.ident.to_string().to_class_case(), - span = task.item.sig.ident.span() - ) - }) - .collect::>(); - let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); - let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); - let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); - let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); + let task_fn_idents = self + .tasks + .iter() + .map(|task| { + format_ident!( + "{}", + &task.item.sig.ident.to_string().to_class_case(), + span = task.item.sig.ident.span() + ) + }) + .collect::>(); + let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); + let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); + let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); + let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); - let task_fn_impls = self.tasks.iter().map(|task| { - let mut task_fn_impl = task.item.clone(); - task_fn_impl.attrs = vec![]; - task_fn_impl - }); + let task_fn_impls = self.tasks.iter().map(|task| { + let mut task_fn_impl = task.item.clone(); + task_fn_impl.attrs = vec![]; + task_fn_impl + }); - let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); - let task_arg_names = self.tasks.iter().map(|task| &task.arg_names).collect::>(); + let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); + let task_arg_names = self + .tasks + .iter() + .map(|task| &task.arg_names) + .collect::>(); - let sp_std = quote!(#scrate::__private::sp_std); - let impl_generics = &self.item_impl.generics; - tokens.extend(quote! { + let sp_std = quote!(#scrate::__private::sp_std); + let impl_generics = &self.item_impl.generics; + tokens.extend(quote! { impl #impl_generics #enum_use { #(#task_fn_impls)* @@ -212,56 +216,66 @@ impl ToTokens for TasksDef { } } }); - } + } } /// Expands the [`TasksDef`] in the enclosing [`Def`], if present, and returns its tokens. /// /// This modifies the underlying [`Def`] in addition to returning any tokens that were added. pub fn expand_tasks_impl(def: &mut Def) -> TokenStream2 { - let Some(tasks) = &mut def.tasks else { return quote!() }; - let ExpandedTasksDef { task_item_impl, task_trait_impl } = parse_quote!(#tasks); - quote! { - #task_item_impl - #task_trait_impl - } + let Some(tasks) = &mut def.tasks else { + return quote!(); + }; + let ExpandedTasksDef { + task_item_impl, + task_trait_impl, + } = parse_quote!(#tasks); + quote! { + #task_item_impl + #task_trait_impl + } } /// Represents a fully-expanded [`TaskEnumDef`]. #[derive(Parse)] pub struct ExpandedTaskEnum { - pub item_enum: ItemEnum, - pub debug_impl: ItemImpl, + pub item_enum: ItemEnum, + pub debug_impl: ItemImpl, } /// Modifies a [`Def`] to expand the underlying [`TaskEnumDef`] if present, and also returns /// its tokens. A blank [`TokenStream2`] is returned if no [`TaskEnumDef`] has been generated /// or defined. pub fn expand_task_enum(def: &mut Def) -> TokenStream2 { - let Some(task_enum) = &mut def.task_enum else { return quote!() }; - let ExpandedTaskEnum { item_enum, debug_impl } = parse_quote!(#task_enum); - quote! { - #item_enum - #debug_impl - } + let Some(task_enum) = &mut def.task_enum else { + return quote!(); + }; + let ExpandedTaskEnum { + item_enum, + debug_impl, + } = parse_quote!(#task_enum); + quote! { + #item_enum + #debug_impl + } } /// Modifies a [`Def`] to expand the underlying [`TasksDef`] and also generate a /// [`TaskEnumDef`] if applicable. The tokens for these items are returned if they are created. pub fn expand_tasks(def: &mut Def) -> TokenStream2 { - if let Some(tasks_def) = &def.tasks { - if def.task_enum.is_none() { - def.task_enum = Some(TaskEnumDef::generate( - &tasks_def, - def.type_decl_bounded_generics(tasks_def.item_impl.span()), - def.type_use_generics(tasks_def.item_impl.span()), - )); - } - } - let tasks_extra_output = expand_tasks_impl(def); - let task_enum_extra_output = expand_task_enum(def); - quote! { - #tasks_extra_output - #task_enum_extra_output - } + if let Some(tasks_def) = &def.tasks { + if def.task_enum.is_none() { + def.task_enum = Some(TaskEnumDef::generate( + &tasks_def, + def.type_decl_bounded_generics(tasks_def.item_impl.span()), + def.type_use_generics(tasks_def.item_impl.span()), + )); + } + } + let tasks_extra_output = expand_tasks_impl(def); + let task_enum_extra_output = expand_task_enum(def); + quote! { + #tasks_extra_output + #task_enum_extra_output + } } diff --git a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs index 99364aaa9..57b78339a 100644 --- a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs +++ b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs @@ -16,201 +16,211 @@ // limitations under the License. use crate::{ - pallet::{CompositeKeyword, Def}, - COUNTER, + pallet::{CompositeKeyword, Def}, + COUNTER, }; use syn::spanned::Spanned; /// Generate the `tt_default_parts` macro. pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let default_parts_unique_id = - syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); - let extra_parts_unique_id = - syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); - let default_parts_unique_id_v2 = - syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); - - let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); - - let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); - - let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); - - let event_part = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); - quote::quote!( Event #gen , ) - }); - - let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); - - let origin_part = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!( )); - quote::quote!( Origin #gen , ) - }); - - let config_part = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!( )); - quote::quote!( Config #gen , ) - }); - - let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); - - let validate_unsigned_part = - def.validate_unsigned.as_ref().map(|_| quote::quote!(ValidateUnsigned,)); - - let freeze_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(FreezeReason,)); - - let hold_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(HoldReason,)); - - let lock_id_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(LockId,)); - - let slash_reason_part = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(SlashReason,)); - - let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); - - let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); - - let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); - - let event_part_v2 = def.event.as_ref().map(|event| { - let gen = event.gen_kind.is_generic().then(|| quote::quote!()); - quote::quote!(+ Event #gen) - }); - - let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); - - let origin_part_v2 = def.origin.as_ref().map(|origin| { - let gen = origin.is_generic.then(|| quote::quote!()); - quote::quote!(+ Origin #gen) - }); - - let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { - let gen = genesis_config.gen_kind.is_generic().then(|| quote::quote!()); - quote::quote!(+ Config #gen) - }); - - let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); - - let validate_unsigned_part_v2 = - def.validate_unsigned.as_ref().map(|_| quote::quote!(+ ValidateUnsigned)); - - let freeze_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) - .then_some(quote::quote!(+ FreezeReason)); - - let hold_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) - .then_some(quote::quote!(+ HoldReason)); - - let lock_id_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) - .then_some(quote::quote!(+ LockId)); - - let slash_reason_part_v2 = def - .composites - .iter() - .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) - .then_some(quote::quote!(+ SlashReason)); - - quote::quote!( - // This macro follows the conventions as laid out by the `tt-call` crate. It does not - // accept any arguments and simply returns the pallet parts, separated by commas, then - // wrapped inside of braces and finally prepended with double colons, to the caller inside - // of a key named `tokens`. - // - // We need to accept a path argument here, because this macro gets expanded on the - // crate that called the `construct_runtime!` macro, and the actual path is unknown. - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part - #inherent_part #validate_unsigned_part #freeze_reason_part #task_part - #hold_reason_part #lock_id_part #slash_reason_part - } - }] - } - }; - } - - pub use #default_parts_unique_id as tt_default_parts; - - - // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared - // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. - // - // For example, after expansion an explicit pallet would look like: - // `System: expanded::{Error} ::{Pallet, Call}`. - // - // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. - #[macro_export] - #[doc(hidden)] - macro_rules! #extra_parts_unique_id { - { - $caller:tt - your_tt_return = [{ $my_tt_return:path }] - } => { - $my_tt_return! { - $caller - tokens = [{ - expanded::{ - #error_part - } - }] - } - }; - } - - pub use #extra_parts_unique_id as tt_extra_parts; - - #[macro_export] - #[doc(hidden)] - macro_rules! #default_parts_unique_id_v2 { - { - $caller:tt - frame_support = [{ $($frame_support:ident)::* }] - } => { - $($frame_support)*::__private::tt_return! { - $caller - tokens = [{ - + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 - #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 - #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 - }] - } - }; - } - - pub use #default_parts_unique_id_v2 as tt_default_parts_v2; - ) + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let default_parts_unique_id = + syn::Ident::new(&format!("__tt_default_parts_{}", count), def.item.span()); + let extra_parts_unique_id = + syn::Ident::new(&format!("__tt_extra_parts_{}", count), def.item.span()); + let default_parts_unique_id_v2 = + syn::Ident::new(&format!("__tt_default_parts_v2_{}", count), def.item.span()); + + let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); + + let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); + + let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); + + let event_part = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!( )); + quote::quote!( Event #gen , ) + }); + + let error_part = def.error.as_ref().map(|_| quote::quote!(Error,)); + + let origin_part = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!( )); + quote::quote!( Origin #gen , ) + }); + + let config_part = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config + .gen_kind + .is_generic() + .then(|| quote::quote!( )); + quote::quote!( Config #gen , ) + }); + + let inherent_part = def.inherent.as_ref().map(|_| quote::quote!(Inherent,)); + + let validate_unsigned_part = def + .validate_unsigned + .as_ref() + .map(|_| quote::quote!(ValidateUnsigned,)); + + let freeze_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(FreezeReason,)); + + let hold_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(HoldReason,)); + + let lock_id_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(LockId,)); + + let slash_reason_part = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(SlashReason,)); + + let call_part_v2 = def.call.as_ref().map(|_| quote::quote!(+ Call)); + + let task_part_v2 = def.task_enum.as_ref().map(|_| quote::quote!(+ Task)); + + let storage_part_v2 = (!def.storages.is_empty()).then(|| quote::quote!(+ Storage)); + + let event_part_v2 = def.event.as_ref().map(|event| { + let gen = event.gen_kind.is_generic().then(|| quote::quote!()); + quote::quote!(+ Event #gen) + }); + + let error_part_v2 = def.error.as_ref().map(|_| quote::quote!(+ Error)); + + let origin_part_v2 = def.origin.as_ref().map(|origin| { + let gen = origin.is_generic.then(|| quote::quote!()); + quote::quote!(+ Origin #gen) + }); + + let config_part_v2 = def.genesis_config.as_ref().map(|genesis_config| { + let gen = genesis_config + .gen_kind + .is_generic() + .then(|| quote::quote!()); + quote::quote!(+ Config #gen) + }); + + let inherent_part_v2 = def.inherent.as_ref().map(|_| quote::quote!(+ Inherent)); + + let validate_unsigned_part_v2 = def + .validate_unsigned + .as_ref() + .map(|_| quote::quote!(+ ValidateUnsigned)); + + let freeze_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::FreezeReason(_))) + .then_some(quote::quote!(+ FreezeReason)); + + let hold_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::HoldReason(_))) + .then_some(quote::quote!(+ HoldReason)); + + let lock_id_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::LockId(_))) + .then_some(quote::quote!(+ LockId)); + + let slash_reason_part_v2 = def + .composites + .iter() + .any(|c| matches!(c.composite_keyword, CompositeKeyword::SlashReason(_))) + .then_some(quote::quote!(+ SlashReason)); + + quote::quote!( + // This macro follows the conventions as laid out by the `tt-call` crate. It does not + // accept any arguments and simply returns the pallet parts, separated by commas, then + // wrapped inside of braces and finally prepended with double colons, to the caller inside + // of a key named `tokens`. + // + // We need to accept a path argument here, because this macro gets expanded on the + // crate that called the `construct_runtime!` macro, and the actual path is unknown. + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part + #inherent_part #validate_unsigned_part #freeze_reason_part #task_part + #hold_reason_part #lock_id_part #slash_reason_part + } + }] + } + }; + } + + pub use #default_parts_unique_id as tt_default_parts; + + + // This macro is similar to the `tt_default_parts!`. It expands the pallets that are declared + // explicitly (`System: frame_system::{Pallet, Call}`) with extra parts. + // + // For example, after expansion an explicit pallet would look like: + // `System: expanded::{Error} ::{Pallet, Call}`. + // + // The `expanded` keyword is a marker of the final state of the `construct_runtime!`. + #[macro_export] + #[doc(hidden)] + macro_rules! #extra_parts_unique_id { + { + $caller:tt + your_tt_return = [{ $my_tt_return:path }] + } => { + $my_tt_return! { + $caller + tokens = [{ + expanded::{ + #error_part + } + }] + } + }; + } + + pub use #extra_parts_unique_id as tt_extra_parts; + + #[macro_export] + #[doc(hidden)] + macro_rules! #default_parts_unique_id_v2 { + { + $caller:tt + frame_support = [{ $($frame_support:ident)::* }] + } => { + $($frame_support)*::__private::tt_return! { + $caller + tokens = [{ + + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 + #inherent_part_v2 #validate_unsigned_part_v2 #freeze_reason_part_v2 #task_part_v2 + #hold_reason_part_v2 #lock_id_part_v2 #slash_reason_part_v2 + }] + } + }; + } + + pub use #default_parts_unique_id_v2 as tt_default_parts_v2; + ) } diff --git a/support/procedural-fork/src/pallet/expand/type_value.rs b/support/procedural-fork/src/pallet/expand/type_value.rs index 5dc6309c0..84db3e431 100644 --- a/support/procedural-fork/src/pallet/expand/type_value.rs +++ b/support/procedural-fork/src/pallet/expand/type_value.rs @@ -22,56 +22,56 @@ use crate::pallet::Def; /// * implement the `Get<..>` on it /// * Rename the name of the function to internal name pub fn expand_type_values(def: &mut Def) -> proc_macro2::TokenStream { - let mut expand = quote::quote!(); - let frame_support = &def.frame_support; + let mut expand = quote::quote!(); + let frame_support = &def.frame_support; - for type_value in &def.type_values { - let fn_name_str = &type_value.ident.to_string(); - let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); - let fn_ident_renamed = syn::Ident::new( - &format!("__type_value_for_{}", fn_name_snakecase), - type_value.ident.span(), - ); + for type_value in &def.type_values { + let fn_name_str = &type_value.ident.to_string(); + let fn_name_snakecase = inflector::cases::snakecase::to_snake_case(fn_name_str); + let fn_ident_renamed = syn::Ident::new( + &format!("__type_value_for_{}", fn_name_snakecase), + type_value.ident.span(), + ); - let type_value_item = { - let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; - if let syn::Item::Fn(item) = item { - item - } else { - unreachable!("Checked by error parser") - } - }; + let type_value_item = { + let item = &mut def.item.content.as_mut().expect("Checked by def").1[type_value.index]; + if let syn::Item::Fn(item) = item { + item + } else { + unreachable!("Checked by error parser") + } + }; - // Rename the type_value function name - type_value_item.sig.ident = fn_ident_renamed.clone(); + // Rename the type_value function name + type_value_item.sig.ident = fn_ident_renamed.clone(); - let vis = &type_value.vis; - let ident = &type_value.ident; - let type_ = &type_value.type_; - let where_clause = &type_value.where_clause; + let vis = &type_value.vis; + let ident = &type_value.ident; + let type_ = &type_value.type_; + let where_clause = &type_value.where_clause; - let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { - ( - def.type_impl_generics(type_value.attr_span), - def.type_use_generics(type_value.attr_span), - ) - } else { - (Default::default(), Default::default()) - }; + let (struct_impl_gen, struct_use_gen) = if type_value.is_generic { + ( + def.type_impl_generics(type_value.attr_span), + def.type_use_generics(type_value.attr_span), + ) + } else { + (Default::default(), Default::default()) + }; - let docs = &type_value.docs; + let docs = &type_value.docs; - expand.extend(quote::quote_spanned!(type_value.attr_span => - #( #[doc = #docs] )* - #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); - impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> - #where_clause - { - fn get() -> #type_ { - #fn_ident_renamed::<#struct_use_gen>() - } - } - )); - } - expand + expand.extend(quote::quote_spanned!(type_value.attr_span => + #( #[doc = #docs] )* + #vis struct #ident<#struct_use_gen>(core::marker::PhantomData<((), #struct_use_gen)>); + impl<#struct_impl_gen> #frame_support::traits::Get<#type_> for #ident<#struct_use_gen> + #where_clause + { + fn get() -> #type_ { + #fn_ident_renamed::<#struct_use_gen>() + } + } + )); + } + expand } diff --git a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs index 876995585..28c78a1c6 100644 --- a/support/procedural-fork/src/pallet/expand/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/expand/validate_unsigned.rs @@ -21,36 +21,38 @@ use quote::quote; use syn::{spanned::Spanned, Ident}; pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = - Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let macro_ident = Ident::new( + &format!("__is_validate_unsigned_part_defined_{}", count), + def.item.span(), + ); - let maybe_compile_error = if def.validate_unsigned.is_none() { - quote! { - compile_error!(concat!( - "`", - stringify!($pallet_name), - "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ - remove `ValidateUnsigned` from construct_runtime?", - )); - } - } else { - TokenStream::new() - }; + let maybe_compile_error = if def.validate_unsigned.is_none() { + quote! { + compile_error!(concat!( + "`", + stringify!($pallet_name), + "` does not have #[pallet::validate_unsigned] defined, perhaps you should \ + remove `ValidateUnsigned` from construct_runtime?", + )); + } + } else { + TokenStream::new() + }; - quote! { - #[doc(hidden)] - pub mod __substrate_validate_unsigned_check { - #[macro_export] - #[doc(hidden)] - macro_rules! #macro_ident { - ($pallet_name:ident) => { - #maybe_compile_error - } - } + quote! { + #[doc(hidden)] + pub mod __substrate_validate_unsigned_check { + #[macro_export] + #[doc(hidden)] + macro_rules! #macro_ident { + ($pallet_name:ident) => { + #maybe_compile_error + } + } - #[doc(hidden)] - pub use #macro_ident as is_validate_unsigned_part_defined; - } - } + #[doc(hidden)] + pub use #macro_ident as is_validate_unsigned_part_defined; + } + } } diff --git a/support/procedural-fork/src/pallet/expand/warnings.rs b/support/procedural-fork/src/pallet/expand/warnings.rs index 030e3ddaf..3d71b83af 100644 --- a/support/procedural-fork/src/pallet/expand/warnings.rs +++ b/support/procedural-fork/src/pallet/expand/warnings.rs @@ -20,79 +20,84 @@ use crate::pallet::parse::call::{CallVariantDef, CallWeightDef}; use proc_macro_warning::Warning; use syn::{ - spanned::Spanned, - visit::{self, Visit}, + spanned::Spanned, + visit::{self, Visit}, }; /// Warn if any of the call arguments starts with a underscore and is used in a weight formula. pub(crate) fn weight_witness_warning( - method: &CallVariantDef, - dev_mode: bool, - warnings: &mut Vec, + method: &CallVariantDef, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return - } - let CallWeightDef::Immediate(w) = &method.weight else { return }; + if dev_mode { + return; + } + let CallWeightDef::Immediate(w) = &method.weight else { + return; + }; - let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") - .old("not check weight witness data") - .new("ensure that all witness data for weight calculation is checked before usage") - .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); + let partial_warning = Warning::new_deprecated("UncheckedWeightWitness") + .old("not check weight witness data") + .new("ensure that all witness data for weight calculation is checked before usage") + .help_link("https://github.com/paritytech/polkadot-sdk/pull/1818"); - for (_, arg_ident, _) in method.args.iter() { - if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { - continue - } + for (_, arg_ident, _) in method.args.iter() { + if !arg_ident.to_string().starts_with('_') || !contains_ident(w.clone(), &arg_ident) { + continue; + } - let warning = partial_warning - .clone() - .index(warnings.len()) - .span(arg_ident.span()) - .build_or_panic(); + let warning = partial_warning + .clone() + .index(warnings.len()) + .span(arg_ident.span()) + .build_or_panic(); - warnings.push(warning); - } + warnings.push(warning); + } } /// Warn if the weight is a constant and the pallet not in `dev_mode`. pub(crate) fn weight_constant_warning( - weight: &syn::Expr, - dev_mode: bool, - warnings: &mut Vec, + weight: &syn::Expr, + dev_mode: bool, + warnings: &mut Vec, ) { - if dev_mode { - return - } - let syn::Expr::Lit(lit) = weight else { return }; + if dev_mode { + return; + } + let syn::Expr::Lit(lit) = weight else { return }; - let warning = Warning::new_deprecated("ConstantWeight") - .index(warnings.len()) - .old("use hard-coded constant as call weight") - .new("benchmark all calls or put the pallet into `dev` mode") - .help_link("https://github.com/paritytech/substrate/pull/13798") - .span(lit.span()) - .build_or_panic(); + let warning = Warning::new_deprecated("ConstantWeight") + .index(warnings.len()) + .old("use hard-coded constant as call weight") + .new("benchmark all calls or put the pallet into `dev` mode") + .help_link("https://github.com/paritytech/substrate/pull/13798") + .span(lit.span()) + .build_or_panic(); - warnings.push(warning); + warnings.push(warning); } /// Returns whether `expr` contains `ident`. fn contains_ident(mut expr: syn::Expr, ident: &syn::Ident) -> bool { - struct ContainsIdent { - ident: syn::Ident, - found: bool, - } + struct ContainsIdent { + ident: syn::Ident, + found: bool, + } - impl<'a> Visit<'a> for ContainsIdent { - fn visit_ident(&mut self, i: &syn::Ident) { - if *i == self.ident { - self.found = true; - } - } - } + impl<'a> Visit<'a> for ContainsIdent { + fn visit_ident(&mut self, i: &syn::Ident) { + if *i == self.ident { + self.found = true; + } + } + } - let mut visitor = ContainsIdent { ident: ident.clone(), found: false }; - visit::visit_expr(&mut visitor, &mut expr); - visitor.found + let mut visitor = ContainsIdent { + ident: ident.clone(), + found: false, + }; + visit::visit_expr(&mut visitor, &mut expr); + visitor.found } diff --git a/support/procedural-fork/src/pallet/mod.rs b/support/procedural-fork/src/pallet/mod.rs index 42d8272fb..d3796662f 100644 --- a/support/procedural-fork/src/pallet/mod.rs +++ b/support/procedural-fork/src/pallet/mod.rs @@ -32,30 +32,30 @@ pub use parse::{composite::keyword::CompositeKeyword, Def}; use syn::spanned::Spanned; mod keyword { - syn::custom_keyword!(dev_mode); + syn::custom_keyword!(dev_mode); } pub fn pallet( - attr: proc_macro::TokenStream, - item: proc_macro::TokenStream, + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - let mut dev_mode = false; - if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { - dev_mode = true; - } else { - let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ + let mut dev_mode = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + dev_mode = true; + } else { + let msg = "Invalid pallet macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::pallet]` or `#[pallet]`, or must specify the \ `dev_mode` attribute, such as `#[frame_support::pallet(dev_mode)]` or \ #[pallet(dev_mode)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into() - } - } - - let item = syn::parse_macro_input!(item as syn::ItemMod); - match parse::Def::try_from(item, dev_mode) { - Ok(def) => expand::expand(def).into(), - Err(e) => e.to_compile_error().into(), - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into(); + } + } + + let item = syn::parse_macro_input!(item as syn::ItemMod); + match parse::Def::try_from(item, dev_mode) { + Ok(def) => expand::expand(def).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/pallet/parse/call.rs b/support/procedural-fork/src/pallet/parse/call.rs index 4e09b86fd..865c63473 100644 --- a/support/procedural-fork/src/pallet/parse/call.rs +++ b/support/procedural-fork/src/pallet/parse/call.rs @@ -24,124 +24,124 @@ use syn::{spanned::Spanned, ExprClosure}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(RuntimeOrigin); - syn::custom_keyword!(weight); - syn::custom_keyword!(call_index); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(feeless_if); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(RuntimeOrigin); + syn::custom_keyword!(weight); + syn::custom_keyword!(call_index); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(feeless_if); } /// Definition of dispatchables typically `impl Pallet { ... }` pub struct CallDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// Information on methods (used for expansion). - pub methods: Vec, - /// The span of the pallet::call attribute. - pub attr_span: proc_macro2::Span, - /// Docs, specified on the impl Block. - pub docs: Vec, - /// The optional `weight` attribute on the `pallet::call`. - pub inherited_call_weight: Option, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// Information on methods (used for expansion). + pub methods: Vec, + /// The span of the pallet::call attribute. + pub attr_span: proc_macro2::Span, + /// Docs, specified on the impl Block. + pub docs: Vec, + /// The optional `weight` attribute on the `pallet::call`. + pub inherited_call_weight: Option, } /// The weight of a call. #[derive(Clone)] pub enum CallWeightDef { - /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. - Immediate(syn::Expr), + /// Explicitly set on the call itself with `#[pallet::weight(…)]`. This value is used. + Immediate(syn::Expr), - /// The default value that should be set for dev-mode pallets. Usually zero. - DevModeDefault, + /// The default value that should be set for dev-mode pallets. Usually zero. + DevModeDefault, - /// Inherits whatever value is configured on the pallet level. - /// - /// The concrete value is not known at this point. - Inherited, + /// Inherits whatever value is configured on the pallet level. + /// + /// The concrete value is not known at this point. + Inherited, } /// Definition of dispatchable typically: `#[weight...] fn foo(origin .., param1: ...) -> ..` #[derive(Clone)] pub struct CallVariantDef { - /// Function name. - pub name: syn::Ident, - /// Information on args: `(is_compact, name, type)` - pub args: Vec<(bool, syn::Ident, Box)>, - /// Weight for the call. - pub weight: CallWeightDef, - /// Call index of the dispatchable. - pub call_index: u8, - /// Whether an explicit call index was specified. - pub explicit_call_index: bool, - /// Docs, used for metadata. - pub docs: Vec, - /// Attributes annotated at the top of the dispatchable function. - pub attrs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// The optional `feeless_if` attribute on the `pallet::call`. - pub feeless_check: Option, + /// Function name. + pub name: syn::Ident, + /// Information on args: `(is_compact, name, type)` + pub args: Vec<(bool, syn::Ident, Box)>, + /// Weight for the call. + pub weight: CallWeightDef, + /// Call index of the dispatchable. + pub call_index: u8, + /// Whether an explicit call index was specified. + pub explicit_call_index: bool, + /// Docs, used for metadata. + pub docs: Vec, + /// Attributes annotated at the top of the dispatchable function. + pub attrs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// The optional `feeless_if` attribute on the `pallet::call`. + pub feeless_check: Option, } /// Attributes for functions in call impl block. pub enum FunctionAttr { - /// Parse for `#[pallet::call_index(expr)]` - CallIndex(u8), - /// Parse for `#[pallet::weight(expr)]` - Weight(syn::Expr), - /// Parse for `#[pallet::feeless_if(expr)]` - FeelessIf(Span, syn::ExprClosure), + /// Parse for `#[pallet::call_index(expr)]` + CallIndex(u8), + /// Parse for `#[pallet::weight(expr)]` + Weight(syn::Expr), + /// Parse for `#[pallet::feeless_if(expr)]` + FeelessIf(Span, syn::ExprClosure), } impl syn::parse::Parse for FunctionAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::weight) { - content.parse::()?; - let weight_content; - syn::parenthesized!(weight_content in content); - Ok(FunctionAttr::Weight(weight_content.parse::()?)) - } else if lookahead.peek(keyword::call_index) { - content.parse::()?; - let call_index_content; - syn::parenthesized!(call_index_content in content); - let index = call_index_content.parse::()?; - if !index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(index.span(), msg)) - } - Ok(FunctionAttr::CallIndex(index.base10_parse()?)) - } else if lookahead.peek(keyword::feeless_if) { - content.parse::()?; - let closure_content; - syn::parenthesized!(closure_content in content); - Ok(FunctionAttr::FeelessIf( - closure_content.span(), - closure_content.parse::().map_err(|e| { - let msg = "Invalid feeless_if attribute: expected a closure"; - let mut err = syn::Error::new(closure_content.span(), msg); - err.combine(e); - err - })?, - )) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::weight) { + content.parse::()?; + let weight_content; + syn::parenthesized!(weight_content in content); + Ok(FunctionAttr::Weight(weight_content.parse::()?)) + } else if lookahead.peek(keyword::call_index) { + content.parse::()?; + let call_index_content; + syn::parenthesized!(call_index_content in content); + let index = call_index_content.parse::()?; + if !index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(index.span(), msg)); + } + Ok(FunctionAttr::CallIndex(index.base10_parse()?)) + } else if lookahead.peek(keyword::feeless_if) { + content.parse::()?; + let closure_content; + syn::parenthesized!(closure_content in content); + Ok(FunctionAttr::FeelessIf( + closure_content.span(), + closure_content.parse::().map_err(|e| { + let msg = "Invalid feeless_if attribute: expected a closure"; + let mut err = syn::Error::new(closure_content.span(), msg); + err.combine(e); + err + })?, + )) + } else { + Err(lookahead.error()) + } + } } /// Attribute for arguments in function in call impl block. @@ -149,319 +149,324 @@ impl syn::parse::Parse for FunctionAttr { pub struct ArgAttrIsCompact; impl syn::parse::Parse for ArgAttrIsCompact { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(ArgAttrIsCompact) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(ArgAttrIsCompact) + } } /// Check the syntax is `OriginFor`, `&OriginFor` or `T::RuntimeOrigin`. pub fn check_dispatchable_first_arg_type(ty: &syn::Type, is_ref: bool) -> syn::Result<()> { - pub struct CheckOriginFor(bool); - impl syn::parse::Parse for CheckOriginFor { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let is_ref = input.parse::().is_ok(); - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(is_ref)) - } - } - - pub struct CheckRuntimeOrigin; - impl syn::parse::Parse for CheckRuntimeOrigin { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - let result_origin_for = syn::parse2::(ty.to_token_stream()); - let result_runtime_origin = syn::parse2::(ty.to_token_stream()); - return match (result_origin_for, result_runtime_origin) { - (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), - (_, Ok(_)) => Ok(()), - (_, _) => { - let msg = if is_ref { - "Invalid type: expected `&OriginFor`" - } else { - "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" - }; - return Err(syn::Error::new(ty.span(), msg)) - }, - } + pub struct CheckOriginFor(bool); + impl syn::parse::Parse for CheckOriginFor { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let is_ref = input.parse::().is_ok(); + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(is_ref)) + } + } + + pub struct CheckRuntimeOrigin; + impl syn::parse::Parse for CheckRuntimeOrigin { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + let result_origin_for = syn::parse2::(ty.to_token_stream()); + let result_runtime_origin = syn::parse2::(ty.to_token_stream()); + return match (result_origin_for, result_runtime_origin) { + (Ok(CheckOriginFor(has_ref)), _) if is_ref == has_ref => Ok(()), + (_, Ok(_)) => Ok(()), + (_, _) => { + let msg = if is_ref { + "Invalid type: expected `&OriginFor`" + } else { + "Invalid type: expected `OriginFor` or `T::RuntimeOrigin`" + }; + return Err(syn::Error::new(ty.span(), msg)); + } + }; } impl CallDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - inherited_call_weight: Option, - ) -> syn::Result { - let item_impl = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) - }; - - let instances = vec![ - helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, - helper::check_pallet_struct_usage(&item_impl.self_ty)?, - ]; - - if let Some((_, _, for_)) = item_impl.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + inherited_call_weight: Option, + ) -> syn::Result { + let item_impl = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::call, expected item impl", + )); + }; + + let instances = vec![ + helper::check_impl_gen(&item_impl.generics, item_impl.impl_token.span())?, + helper::check_pallet_struct_usage(&item_impl.self_ty)?, + ]; + + if let Some((_, _, for_)) = item_impl.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)) - } - - let mut methods = vec![]; - let mut indices = HashMap::new(); - let mut last_index: Option = None; - for item in &mut item_impl.items { - if let syn::ImplItem::Fn(method) = item { - if !matches!(method.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::call, dispatchable function must be public: \ + return Err(syn::Error::new(for_.span(), msg)); + } + + let mut methods = vec![]; + let mut indices = HashMap::new(); + let mut last_index: Option = None; + for item in &mut item_impl.items { + if let syn::ImplItem::Fn(method) = item { + if !matches!(method.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::call, dispatchable function must be public: \ `pub fn`"; - let span = match method.vis { - syn::Visibility::Inherited => method.sig.span(), - _ => method.vis.span(), - }; - - return Err(syn::Error::new(span, msg)) - } - - match method.sig.inputs.first() { - None => { - let msg = "Invalid pallet::call, must have at least origin arg"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - Some(syn::FnArg::Receiver(_)) => { - let msg = "Invalid pallet::call, first argument must be a typed argument, \ + let span = match method.vis { + syn::Visibility::Inherited => method.sig.span(), + _ => method.vis.span(), + }; + + return Err(syn::Error::new(span, msg)); + } + + match method.sig.inputs.first() { + None => { + let msg = "Invalid pallet::call, must have at least origin arg"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + Some(syn::FnArg::Receiver(_)) => { + let msg = "Invalid pallet::call, first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - Some(syn::FnArg::Typed(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, false)?; - }, - } - - if let syn::ReturnType::Type(_, type_) = &method.sig.output { - helper::check_pallet_call_return_type(type_)?; - } else { - let msg = "Invalid pallet::call, require return type \ + return Err(syn::Error::new(method.sig.span(), msg)); + } + Some(syn::FnArg::Typed(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, false)?; + } + } + + if let syn::ReturnType::Type(_, type_) = &method.sig.output { + helper::check_pallet_call_return_type(type_)?; + } else { + let msg = "Invalid pallet::call, require return type \ DispatchResultWithPostInfo"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); - let mut call_idx_attrs = vec![]; - let mut weight_attrs = vec![]; - let mut feeless_attrs = vec![]; - for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { - match attr { - FunctionAttr::CallIndex(_) => { - call_idx_attrs.push(attr); - }, - FunctionAttr::Weight(_) => { - weight_attrs.push(attr); - }, - FunctionAttr::FeelessIf(span, _) => { - feeless_attrs.push((span, attr)); - }, - } - } - - if weight_attrs.is_empty() && dev_mode { - // inject a default O(1) weight when dev mode is enabled and no weight has - // been specified on the call - let empty_weight: syn::Expr = syn::parse_quote!(0); - weight_attrs.push(FunctionAttr::Weight(empty_weight)); - } - - let weight = match weight_attrs.len() { - 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, - 0 if dev_mode => CallWeightDef::DevModeDefault, - 0 => return Err(syn::Error::new( - method.sig.span(), - "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an + return Err(syn::Error::new(method.sig.span(), msg)); + } + + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); + let mut call_idx_attrs = vec![]; + let mut weight_attrs = vec![]; + let mut feeless_attrs = vec![]; + for attr in helper::take_item_pallet_attrs(&mut method.attrs)?.into_iter() { + match attr { + FunctionAttr::CallIndex(_) => { + call_idx_attrs.push(attr); + } + FunctionAttr::Weight(_) => { + weight_attrs.push(attr); + } + FunctionAttr::FeelessIf(span, _) => { + feeless_attrs.push((span, attr)); + } + } + } + + if weight_attrs.is_empty() && dev_mode { + // inject a default O(1) weight when dev mode is enabled and no weight has + // been specified on the call + let empty_weight: syn::Expr = syn::parse_quote!(0); + weight_attrs.push(FunctionAttr::Weight(empty_weight)); + } + + let weight = match weight_attrs.len() { + 0 if inherited_call_weight.is_some() => CallWeightDef::Inherited, + 0 if dev_mode => CallWeightDef::DevModeDefault, + 0 => return Err(syn::Error::new( + method.sig.span(), + "A pallet::call requires either a concrete `#[pallet::weight($expr)]` or an inherited weight from the `#[pallet:call(weight($type))]` attribute, but none were given.", - )), - 1 => match weight_attrs.pop().unwrap() { - FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), - _ => unreachable!("checked during creation of the let binding"), - }, - _ => { - let msg = "Invalid pallet::call, too many weight attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)) - }, - }; - - if call_idx_attrs.len() > 1 { - let msg = "Invalid pallet::call, too many call_index attributes given"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - let call_index = call_idx_attrs.pop().map(|attr| match attr { - FunctionAttr::CallIndex(idx) => idx, - _ => unreachable!("checked during creation of the let binding"), - }); - let explicit_call_index = call_index.is_some(); - - let final_index = match call_index { - Some(i) => i, - None => - last_index.map_or(Some(0), |idx| idx.checked_add(1)).ok_or_else(|| { - let msg = "Call index doesn't fit into u8, index is 256"; - syn::Error::new(method.sig.span(), msg) - })?, - }; - last_index = Some(final_index); - - if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { - let msg = format!( - "Call indices are conflicting: Both functions {} and {} are at index {}", - used_fn, method.sig.ident, final_index, - ); - let mut err = syn::Error::new(used_fn.span(), &msg); - err.combine(syn::Error::new(method.sig.ident.span(), msg)); - return Err(err) - } - - let mut args = vec![]; - for arg in method.sig.inputs.iter_mut().skip(1) { - let arg = if let syn::FnArg::Typed(arg) = arg { - arg - } else { - unreachable!("Only first argument can be receiver"); - }; - - let arg_attrs: Vec = - helper::take_item_pallet_attrs(&mut arg.attrs)?; - - if arg_attrs.len() > 1 { - let msg = "Invalid pallet::call, argument has too many attributes"; - return Err(syn::Error::new(arg.span(), msg)) - } - - let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { - pat.ident.clone() - } else { - let msg = "Invalid pallet::call, argument must be ident"; - return Err(syn::Error::new(arg.pat.span(), msg)) - }; - - args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); - } - - let docs = get_doc_literals(&method.attrs); - - if feeless_attrs.len() > 1 { - let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; - return Err(syn::Error::new(feeless_attrs[1].0, msg)) - } - let feeless_check: Option = - feeless_attrs.pop().map(|(_, attr)| match attr { - FunctionAttr::FeelessIf(_, closure) => closure, - _ => unreachable!("checked during creation of the let binding"), - }); - - if let Some(ref feeless_check) = feeless_check { - if feeless_check.inputs.len() != args.len() + 1 { - let msg = "Invalid pallet::call, feeless_if closure must have same \ + )), + 1 => match weight_attrs.pop().unwrap() { + FunctionAttr::Weight(w) => CallWeightDef::Immediate(w), + _ => unreachable!("checked during creation of the let binding"), + }, + _ => { + let msg = "Invalid pallet::call, too many weight attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + }; + + if call_idx_attrs.len() > 1 { + let msg = "Invalid pallet::call, too many call_index attributes given"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + let call_index = call_idx_attrs.pop().map(|attr| match attr { + FunctionAttr::CallIndex(idx) => idx, + _ => unreachable!("checked during creation of the let binding"), + }); + let explicit_call_index = call_index.is_some(); + + let final_index = match call_index { + Some(i) => i, + None => last_index + .map_or(Some(0), |idx| idx.checked_add(1)) + .ok_or_else(|| { + let msg = "Call index doesn't fit into u8, index is 256"; + syn::Error::new(method.sig.span(), msg) + })?, + }; + last_index = Some(final_index); + + if let Some(used_fn) = indices.insert(final_index, method.sig.ident.clone()) { + let msg = format!( + "Call indices are conflicting: Both functions {} and {} are at index {}", + used_fn, method.sig.ident, final_index, + ); + let mut err = syn::Error::new(used_fn.span(), &msg); + err.combine(syn::Error::new(method.sig.ident.span(), msg)); + return Err(err); + } + + let mut args = vec![]; + for arg in method.sig.inputs.iter_mut().skip(1) { + let arg = if let syn::FnArg::Typed(arg) = arg { + arg + } else { + unreachable!("Only first argument can be receiver"); + }; + + let arg_attrs: Vec = + helper::take_item_pallet_attrs(&mut arg.attrs)?; + + if arg_attrs.len() > 1 { + let msg = "Invalid pallet::call, argument has too many attributes"; + return Err(syn::Error::new(arg.span(), msg)); + } + + let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { + pat.ident.clone() + } else { + let msg = "Invalid pallet::call, argument must be ident"; + return Err(syn::Error::new(arg.pat.span(), msg)); + }; + + args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); + } + + let docs = get_doc_literals(&method.attrs); + + if feeless_attrs.len() > 1 { + let msg = "Invalid pallet::call, there can only be one feeless_if attribute"; + return Err(syn::Error::new(feeless_attrs[1].0, msg)); + } + let feeless_check: Option = + feeless_attrs.pop().map(|(_, attr)| match attr { + FunctionAttr::FeelessIf(_, closure) => closure, + _ => unreachable!("checked during creation of the let binding"), + }); + + if let Some(ref feeless_check) = feeless_check { + if feeless_check.inputs.len() != args.len() + 1 { + let msg = "Invalid pallet::call, feeless_if closure must have same \ number of arguments as the dispatchable function"; - return Err(syn::Error::new(feeless_check.span(), msg)) - } - - match feeless_check.inputs.first() { - None => { - let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; - return Err(syn::Error::new(feeless_check.span(), msg)) - }, - Some(syn::Pat::Type(arg)) => { - check_dispatchable_first_arg_type(&arg.ty, true)?; - }, - _ => { - let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ + return Err(syn::Error::new(feeless_check.span(), msg)); + } + + match feeless_check.inputs.first() { + None => { + let msg = "Invalid pallet::call, feeless_if closure must have at least origin arg"; + return Err(syn::Error::new(feeless_check.span(), msg)); + } + Some(syn::Pat::Type(arg)) => { + check_dispatchable_first_arg_type(&arg.ty, true)?; + } + _ => { + let msg = "Invalid pallet::call, feeless_if closure first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(feeless_check.span(), msg)) - }, - } - - for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { - let feeless_arg_type = - if let syn::Pat::Type(syn::PatType { ty, .. }) = feeless_arg.clone() { - if let syn::Type::Reference(pat) = *ty { - pat.elem.clone() - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; - return Err(syn::Error::new(ty.span(), msg)) - } - } else { - let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; - return Err(syn::Error::new(feeless_arg.span(), msg)) - }; - - if feeless_arg_type != arg.2 { - let msg = - "Invalid pallet::call, feeless_if closure argument must have \ + return Err(syn::Error::new(feeless_check.span(), msg)); + } + } + + for (feeless_arg, arg) in feeless_check.inputs.iter().skip(1).zip(args.iter()) { + let feeless_arg_type = if let syn::Pat::Type(syn::PatType { ty, .. }) = + feeless_arg.clone() + { + if let syn::Type::Reference(pat) = *ty { + pat.elem.clone() + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a reference"; + return Err(syn::Error::new(ty.span(), msg)); + } + } else { + let msg = "Invalid pallet::call, feeless_if closure argument must be a type ascription pattern"; + return Err(syn::Error::new(feeless_arg.span(), msg)); + }; + + if feeless_arg_type != arg.2 { + let msg = + "Invalid pallet::call, feeless_if closure argument must have \ a reference to the same type as the dispatchable function argument"; - return Err(syn::Error::new(feeless_arg.span(), msg)) - } - } - - let valid_return = match &feeless_check.output { - syn::ReturnType::Type(_, type_) => match *(type_.clone()) { - syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), - _ => false, - }, - _ => false, - }; - if !valid_return { - let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; - return Err(syn::Error::new(feeless_check.output.span(), msg)) - } - } - - methods.push(CallVariantDef { - name: method.sig.ident.clone(), - weight, - call_index: final_index, - explicit_call_index, - args, - docs, - attrs: method.attrs.clone(), - cfg_attrs, - feeless_check, - }); - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(item.span(), msg)) - } - } - - Ok(Self { - index, - attr_span, - instances, - methods, - where_clause: item_impl.generics.where_clause.clone(), - docs: get_doc_literals(&item_impl.attrs), - inherited_call_weight, - }) - } + return Err(syn::Error::new(feeless_arg.span(), msg)); + } + } + + let valid_return = match &feeless_check.output { + syn::ReturnType::Type(_, type_) => match *(type_.clone()) { + syn::Type::Path(syn::TypePath { path, .. }) => path.is_ident("bool"), + _ => false, + }, + _ => false, + }; + if !valid_return { + let msg = "Invalid pallet::call, feeless_if closure must return `bool`"; + return Err(syn::Error::new(feeless_check.output.span(), msg)); + } + } + + methods.push(CallVariantDef { + name: method.sig.ident.clone(), + weight, + call_index: final_index, + explicit_call_index, + args, + docs, + attrs: method.attrs.clone(), + cfg_attrs, + feeless_check, + }); + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(item.span(), msg)); + } + } + + Ok(Self { + index, + attr_span, + instances, + methods, + where_clause: item_impl.generics.where_clause.clone(), + docs: get_doc_literals(&item_impl.attrs), + inherited_call_weight, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/composite.rs b/support/procedural-fork/src/pallet/parse/composite.rs index c3ac74846..38da1f205 100644 --- a/support/procedural-fork/src/pallet/parse/composite.rs +++ b/support/procedural-fork/src/pallet/parse/composite.rs @@ -20,172 +20,178 @@ use quote::ToTokens; use syn::spanned::Spanned; pub mod keyword { - use super::*; - - syn::custom_keyword!(FreezeReason); - syn::custom_keyword!(HoldReason); - syn::custom_keyword!(LockId); - syn::custom_keyword!(SlashReason); - syn::custom_keyword!(Task); - - pub enum CompositeKeyword { - FreezeReason(FreezeReason), - HoldReason(HoldReason), - LockId(LockId), - SlashReason(SlashReason), - Task(Task), - } - - impl ToTokens for CompositeKeyword { - fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - use CompositeKeyword::*; - match self { - FreezeReason(inner) => inner.to_tokens(tokens), - HoldReason(inner) => inner.to_tokens(tokens), - LockId(inner) => inner.to_tokens(tokens), - SlashReason(inner) => inner.to_tokens(tokens), - Task(inner) => inner.to_tokens(tokens), - } - } - } - - impl syn::parse::Parse for CompositeKeyword { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(FreezeReason) { - Ok(Self::FreezeReason(input.parse()?)) - } else if lookahead.peek(HoldReason) { - Ok(Self::HoldReason(input.parse()?)) - } else if lookahead.peek(LockId) { - Ok(Self::LockId(input.parse()?)) - } else if lookahead.peek(SlashReason) { - Ok(Self::SlashReason(input.parse()?)) - } else if lookahead.peek(Task) { - Ok(Self::Task(input.parse()?)) - } else { - Err(lookahead.error()) - } - } - } - - impl std::fmt::Display for CompositeKeyword { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use CompositeKeyword::*; - write!( - f, - "{}", - match self { - FreezeReason(_) => "FreezeReason", - HoldReason(_) => "HoldReason", - Task(_) => "Task", - LockId(_) => "LockId", - SlashReason(_) => "SlashReason", - } - ) - } - } + use super::*; + + syn::custom_keyword!(FreezeReason); + syn::custom_keyword!(HoldReason); + syn::custom_keyword!(LockId); + syn::custom_keyword!(SlashReason); + syn::custom_keyword!(Task); + + pub enum CompositeKeyword { + FreezeReason(FreezeReason), + HoldReason(HoldReason), + LockId(LockId), + SlashReason(SlashReason), + Task(Task), + } + + impl ToTokens for CompositeKeyword { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + use CompositeKeyword::*; + match self { + FreezeReason(inner) => inner.to_tokens(tokens), + HoldReason(inner) => inner.to_tokens(tokens), + LockId(inner) => inner.to_tokens(tokens), + SlashReason(inner) => inner.to_tokens(tokens), + Task(inner) => inner.to_tokens(tokens), + } + } + } + + impl syn::parse::Parse for CompositeKeyword { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(FreezeReason) { + Ok(Self::FreezeReason(input.parse()?)) + } else if lookahead.peek(HoldReason) { + Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(LockId) { + Ok(Self::LockId(input.parse()?)) + } else if lookahead.peek(SlashReason) { + Ok(Self::SlashReason(input.parse()?)) + } else if lookahead.peek(Task) { + Ok(Self::Task(input.parse()?)) + } else { + Err(lookahead.error()) + } + } + } + + impl std::fmt::Display for CompositeKeyword { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use CompositeKeyword::*; + write!( + f, + "{}", + match self { + FreezeReason(_) => "FreezeReason", + HoldReason(_) => "HoldReason", + Task(_) => "Task", + LockId(_) => "LockId", + SlashReason(_) => "SlashReason", + } + ) + } + } } pub struct CompositeDef { - /// The index of the CompositeDef item in the pallet module. - pub index: usize, - /// The composite keyword used (contains span). - pub composite_keyword: keyword::CompositeKeyword, - /// Name of the associated type. - pub ident: syn::Ident, - /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. - pub generics: syn::Generics, - /// The span of the pallet::composite_enum attribute. - pub attr_span: proc_macro2::Span, - /// Variant count of the pallet::composite_enum. - pub variant_count: u32, + /// The index of the CompositeDef item in the pallet module. + pub index: usize, + /// The composite keyword used (contains span). + pub composite_keyword: keyword::CompositeKeyword, + /// Name of the associated type. + pub ident: syn::Ident, + /// Type parameters and where clause attached to a declaration of the pallet::composite_enum. + pub generics: syn::Generics, + /// The span of the pallet::composite_enum attribute. + pub attr_span: proc_macro2::Span, + /// Variant count of the pallet::composite_enum. + pub variant_count: u32, } impl CompositeDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - scrate: &syn::Path, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - // check variants: composite enums support only field-less enum variants. This is - // because fields can introduce too many possibilities, making it challenging to compute - // a fixed variant count. - for variant in &item.variants { - match variant.fields { - syn::Fields::Named(_) | syn::Fields::Unnamed(_) => - return Err(syn::Error::new( - variant.ident.span(), - "The composite enum does not support variants with fields!", - )), - syn::Fields::Unit => (), - } - } - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::composite_enum, expected enum item", - )) - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = format!("Invalid pallet::composite_enum, `{}` must be public", item.ident); - return Err(syn::Error::new(item.span(), msg)) - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_derive_attr = item.attrs.iter().any(|attr| { - if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { - path.get_ident().map(|ident| ident == "derive").unwrap_or(false) - } else { - false - } - }); - - if !has_derive_attr { - let derive_attr: syn::Attribute = syn::parse_quote! { - #[derive( - Copy, Clone, Eq, PartialEq, - #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, - #scrate::__private::scale_info::TypeInfo, - #scrate::__private::RuntimeDebug, - )] - }; - item.attrs.push(derive_attr); - } - - if has_instance { - item.attrs.push(syn::parse_quote! { - #[scale_info(skip_type_params(I))] - }); - - item.variants.push(syn::parse_quote! { - #[doc(hidden)] - #[codec(skip)] - __Ignore( - ::core::marker::PhantomData, - ) - }); - } - - let composite_keyword = - syn::parse2::(item.ident.to_token_stream())?; - - Ok(CompositeDef { - index, - composite_keyword, - attr_span, - generics: item.generics.clone(), - variant_count: item.variants.len() as u32, - ident: item.ident.clone(), - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + scrate: &syn::Path, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + // check variants: composite enums support only field-less enum variants. This is + // because fields can introduce too many possibilities, making it challenging to compute + // a fixed variant count. + for variant in &item.variants { + match variant.fields { + syn::Fields::Named(_) | syn::Fields::Unnamed(_) => { + return Err(syn::Error::new( + variant.ident.span(), + "The composite enum does not support variants with fields!", + )) + } + syn::Fields::Unit => (), + } + } + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::composite_enum, expected enum item", + )); + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = format!( + "Invalid pallet::composite_enum, `{}` must be public", + item.ident + ); + return Err(syn::Error::new(item.span(), msg)); + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_derive_attr = item.attrs.iter().any(|attr| { + if let syn::Meta::List(syn::MetaList { path, .. }) = &attr.meta { + path.get_ident() + .map(|ident| ident == "derive") + .unwrap_or(false) + } else { + false + } + }); + + if !has_derive_attr { + let derive_attr: syn::Attribute = syn::parse_quote! { + #[derive( + Copy, Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, #scrate::__private::codec::Decode, #scrate::__private::codec::MaxEncodedLen, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + }; + item.attrs.push(derive_attr); + } + + if has_instance { + item.attrs.push(syn::parse_quote! { + #[scale_info(skip_type_params(I))] + }); + + item.variants.push(syn::parse_quote! { + #[doc(hidden)] + #[codec(skip)] + __Ignore( + ::core::marker::PhantomData, + ) + }); + } + + let composite_keyword = + syn::parse2::(item.ident.to_token_stream())?; + + Ok(CompositeDef { + index, + composite_keyword, + attr_span, + generics: item.generics.clone(), + variant_count: item.variants.len() as u32, + ident: item.ident.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs index fbab92db1..cde565245 100644 --- a/support/procedural-fork/src/pallet/parse/config.rs +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -22,569 +22,592 @@ use syn::{spanned::Spanned, token, Token}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Config); - syn::custom_keyword!(From); - syn::custom_keyword!(T); - syn::custom_keyword!(I); - syn::custom_keyword!(config); - syn::custom_keyword!(pallet); - syn::custom_keyword!(IsType); - syn::custom_keyword!(RuntimeEvent); - syn::custom_keyword!(Event); - syn::custom_keyword!(frame_system); - syn::custom_keyword!(disable_frame_system_supertrait_check); - syn::custom_keyword!(no_default); - syn::custom_keyword!(no_default_bounds); - syn::custom_keyword!(constant); + syn::custom_keyword!(Config); + syn::custom_keyword!(From); + syn::custom_keyword!(T); + syn::custom_keyword!(I); + syn::custom_keyword!(config); + syn::custom_keyword!(pallet); + syn::custom_keyword!(IsType); + syn::custom_keyword!(RuntimeEvent); + syn::custom_keyword!(Event); + syn::custom_keyword!(frame_system); + syn::custom_keyword!(disable_frame_system_supertrait_check); + syn::custom_keyword!(no_default); + syn::custom_keyword!(no_default_bounds); + syn::custom_keyword!(constant); } #[derive(Default)] pub struct DefaultTrait { - /// A bool for each sub-trait item indicates whether the item has - /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds - /// in the generated default sub-trait. - pub items: Vec<(syn::TraitItem, bool)>, - pub has_system: bool, + /// A bool for each sub-trait item indicates whether the item has + /// `#[pallet::no_default_bounds]` attached to it. If true, the item will not have any bounds + /// in the generated default sub-trait. + pub items: Vec<(syn::TraitItem, bool)>, + pub has_system: bool, } /// Input definition for the pallet config. pub struct ConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// Whether the trait has instance (i.e. define with `Config`) - pub has_instance: bool, - /// Const associated type. - pub consts_metadata: Vec, - /// Whether the trait has the associated type `Event`, note that those bounds are - /// checked: - /// * `IsType::RuntimeEvent` - /// * `From` or `From>` or `From>` - pub has_event_type: bool, - /// The where clause on trait definition but modified so `Self` is `T`. - pub where_clause: Option, - /// The span of the pallet::config attribute. - pub attr_span: proc_macro2::Span, - /// Whether a default sub-trait should be generated. - /// - /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). - /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are - /// no trait items. - pub default_sub_trait: Option, + /// The index of item in pallet module. + pub index: usize, + /// Whether the trait has instance (i.e. define with `Config`) + pub has_instance: bool, + /// Const associated type. + pub consts_metadata: Vec, + /// Whether the trait has the associated type `Event`, note that those bounds are + /// checked: + /// * `IsType::RuntimeEvent` + /// * `From` or `From>` or `From>` + pub has_event_type: bool, + /// The where clause on trait definition but modified so `Self` is `T`. + pub where_clause: Option, + /// The span of the pallet::config attribute. + pub attr_span: proc_macro2::Span, + /// Whether a default sub-trait should be generated. + /// + /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). + /// Vec will be empty if `#[pallet::config(with_default)]` is not specified or if there are + /// no trait items. + pub default_sub_trait: Option, } /// Input definition for a constant in pallet config. pub struct ConstMetadataDef { - /// Name of the associated type. - pub ident: syn::Ident, - /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, + /// Name of the associated type. + pub ident: syn::Ident, + /// The type in Get, e.g. `u32` in `type Foo: Get;`, but `Self` is replaced by `T` + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, } impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { - type Error = syn::Error; - - fn try_from(trait_ty: &syn::TraitItemType) -> Result { - let err = |span, msg| { - syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) - }; - let doc = get_doc_literals(&trait_ty.attrs); - let ident = trait_ty.ident.clone(); - let bound = trait_ty - .bounds - .iter() - .find_map(|b| { - if let syn::TypeParamBound::Trait(tb) = b { - tb.path - .segments - .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) - } else { - None - } - }) - .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { - if ab.args.len() == 1 { - if let syn::GenericArgument::Type(ref ty) = ab.args[0] { - Ok(ty) - } else { - Err(err(ab.args[0].span(), "Expected a type argument")) - } - } else { - Err(err(bound.span(), "Expected a single type argument")) - } - } else { - Err(err(bound.span(), "Expected trait generic args")) - }?; - let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) - .expect("Internal error: replacing `Self` by `T` should result in valid type"); - - Ok(Self { ident, type_, doc }) - } + type Error = syn::Error; + + fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let err = |span, msg| { + syn::Error::new( + span, + format!("Invalid usage of `#[pallet::constant]`: {}", msg), + ) + }; + let doc = get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty + .bounds + .iter() + .find_map(|b| { + if let syn::TypeParamBound::Trait(tb) = b { + tb.path + .segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) + } else { + None + } + }) + .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(err(ab.args[0].span(), "Expected a type argument")) + } + } else { + Err(err(bound.span(), "Expected a single type argument")) + } + } else { + Err(err(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); + + Ok(Self { ident, type_, doc }) + } } /// Parse for `#[pallet::disable_frame_system_supertrait_check]` pub struct DisableFrameSystemSupertraitCheck; impl syn::parse::Parse for DisableFrameSystemSupertraitCheck { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - content.parse::()?; - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + content.parse::()?; + Ok(Self) + } } /// Parsing for the `typ` portion of `PalletAttr` #[derive(derive_syn_parse::Parse, PartialEq, Eq)] pub enum PalletAttrType { - #[peek(keyword::no_default, name = "no_default")] - NoDefault(keyword::no_default), - #[peek(keyword::no_default_bounds, name = "no_default_bounds")] - NoBounds(keyword::no_default_bounds), - #[peek(keyword::constant, name = "constant")] - Constant(keyword::constant), + #[peek(keyword::no_default, name = "no_default")] + NoDefault(keyword::no_default), + #[peek(keyword::no_default_bounds, name = "no_default_bounds")] + NoBounds(keyword::no_default_bounds), + #[peek(keyword::constant, name = "constant")] + Constant(keyword::constant), } /// Parsing for `#[pallet::X]` #[derive(derive_syn_parse::Parse)] pub struct PalletAttr { - _pound: Token![#], - #[bracket] - _bracket: token::Bracket, - #[inside(_bracket)] - _pallet: keyword::pallet, - #[prefix(Token![::] in _bracket)] - #[inside(_bracket)] - typ: PalletAttrType, + _pound: Token![#], + #[bracket] + _bracket: token::Bracket, + #[inside(_bracket)] + _pallet: keyword::pallet, + #[prefix(Token![::] in _bracket)] + #[inside(_bracket)] + typ: PalletAttrType, } /// Parse for `IsType<::RuntimeEvent>` and retrieve `$path` pub struct IsTypeBoundEventParse(syn::Path); impl syn::parse::Parse for IsTypeBoundEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - let config_path = input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - - Ok(Self(config_path)) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + let config_path = input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + + Ok(Self(config_path)) + } } /// Parse for `From` or `From>` or `From>` pub struct FromEventParse { - is_generic: bool, - has_instance: bool, + is_generic: bool, + has_instance: bool, } impl syn::parse::Parse for FromEventParse { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut is_generic = false; - let mut has_instance = false; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - is_generic = true; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - input.parse::()?; - input.parse::()?; - has_instance = true; - } - input.parse::]>()?; - } - input.parse::]>()?; - - Ok(Self { is_generic, has_instance }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut is_generic = false; + let mut has_instance = false; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + is_generic = true; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + input.parse::()?; + input.parse::()?; + has_instance = true; + } + input.parse::]>()?; + } + input.parse::]>()?; + + Ok(Self { + is_generic, + has_instance, + }) + } } /// Check if trait_item is `type RuntimeEvent`, if so checks its bounds are those expected. /// (Event type is reserved type) fn check_event_type( - frame_system: &syn::Path, - trait_item: &syn::TraitItem, - trait_has_instance: bool, + frame_system: &syn::Path, + trait_item: &syn::TraitItem, + trait_has_instance: bool, ) -> syn::Result { - if let syn::TraitItem::Type(type_) = trait_item { - if type_.ident == "RuntimeEvent" { - // Check event has no generics - if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ + if let syn::TraitItem::Type(type_) = trait_item { + if type_.ident == "RuntimeEvent" { + // Check event has no generics + if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ no generics nor where_clause"; - return Err(syn::Error::new(trait_item.span(), msg)) - } + return Err(syn::Error::new(trait_item.span(), msg)); + } - // Check bound contains IsType and From - let has_is_type_bound = type_.bounds.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b.0, frame_system)) - }); + // Check bound contains IsType and From + let has_is_type_bound = type_.bounds.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b.0, frame_system)) + }); - if !has_is_type_bound { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + if !has_is_type_bound { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `IsType<::RuntimeEvent>`".to_string(); - return Err(syn::Error::new(type_.span(), msg)) - } - - let from_event_bound = type_ - .bounds - .iter() - .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); - - let from_event_bound = if let Some(b) = from_event_bound { - b - } else { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + return Err(syn::Error::new(type_.span(), msg)); + } + + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); + + let from_event_bound = if let Some(b) = from_event_bound { + b + } else { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ bound: `From` or `From>` or `From>`"; - return Err(syn::Error::new(type_.span(), msg)) - }; + return Err(syn::Error::new(type_.span(), msg)); + }; - if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) - { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) + { + let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ `From`. Config and generic Event must be both with instance or \ without instance"; - return Err(syn::Error::new(type_.span(), msg)) - } - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) - } + return Err(syn::Error::new(type_.span(), msg)); + } + + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } } /// Check that the path to `frame_system::Config` is valid, this is that the path is just /// `frame_system::Config` or when using the `frame` crate it is `frame::xyz::frame_system::Config`. fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool { - // Check if `frame_system` is actually 'frame_system'. - if path.segments.iter().all(|s| s.ident != "frame_system") { - return false - } - - let mut expected_system_config = - match (is_using_frame_crate(&path), is_using_frame_crate(&frame_system)) { - (true, false) => - // We can't use the path to `frame_system` from `frame` if `frame_system` is not being - // in scope through `frame`. - return false, - (false, true) => - // We know that the only valid frame_system path is one that is `frame_system`, as - // `frame` re-exports it as such. - syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed"), - (_, _) => - // They are either both `frame_system` or both `frame::xyz::frame_system`. - frame_system.clone(), - }; - - expected_system_config - .segments - .push(syn::PathSegment::from(syn::Ident::new("Config", path.span()))); - - // the parse path might be something like `frame_system::Config<...>`, so we - // only compare the idents along the path. - expected_system_config - .segments - .into_iter() - .map(|ps| ps.ident) - .collect::>() == - path.segments.into_iter().map(|ps| ps.ident).collect::>() + // Check if `frame_system` is actually 'frame_system'. + if path.segments.iter().all(|s| s.ident != "frame_system") { + return false; + } + + let mut expected_system_config = match ( + is_using_frame_crate(&path), + is_using_frame_crate(&frame_system), + ) { + (true, false) => + // We can't use the path to `frame_system` from `frame` if `frame_system` is not being + // in scope through `frame`. + { + return false + } + (false, true) => + // We know that the only valid frame_system path is one that is `frame_system`, as + // `frame` re-exports it as such. + { + syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed") + } + (_, _) => + // They are either both `frame_system` or both `frame::xyz::frame_system`. + { + frame_system.clone() + } + }; + + expected_system_config + .segments + .push(syn::PathSegment::from(syn::Ident::new( + "Config", + path.span(), + ))); + + // the parse path might be something like `frame_system::Config<...>`, so we + // only compare the idents along the path. + expected_system_config + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() + == path + .segments + .into_iter() + .map(|ps| ps.ident) + .collect::>() } /// Replace ident `Self` by `T` pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream { - input - .into_iter() - .map(|token_tree| match token_tree { - proc_macro2::TokenTree::Group(group) => - proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into(), - proc_macro2::TokenTree::Ident(ident) if ident == "Self" => - proc_macro2::Ident::new("T", ident.span()).into(), - other => other, - }) - .collect() + input + .into_iter() + .map(|token_tree| match token_tree { + proc_macro2::TokenTree::Group(group) => { + proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into() + } + proc_macro2::TokenTree::Ident(ident) if ident == "Self" => { + proc_macro2::Ident::new("T", ident.span()).into() + } + other => other, + }) + .collect() } impl ConfigDef { - pub fn try_from( - frame_system: &syn::Path, - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - enable_default: bool, - ) -> syn::Result { - let item = if let syn::Item::Trait(item) = item { - item - } else { - let msg = "Invalid pallet::config, expected trait definition"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::config, trait must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - syn::parse2::(item.ident.to_token_stream())?; - - let where_clause = { - let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); - syn::parse2::>(stream).expect( - "Internal error: replacing `Self` by `T` should result in valid where + pub fn try_from( + frame_system: &syn::Path, + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + enable_default: bool, + ) -> syn::Result { + let item = if let syn::Item::Trait(item) = item { + item + } else { + let msg = "Invalid pallet::config, expected trait definition"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::config, trait must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + syn::parse2::(item.ident.to_token_stream())?; + + let where_clause = { + let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); + syn::parse2::>(stream).expect( + "Internal error: replacing `Self` by `T` should result in valid where clause", - ) - }; - - if item.generics.params.len() > 1 { - let msg = "Invalid pallet::config, expected no more than one generic"; - return Err(syn::Error::new(item.generics.params[2].span(), msg)) - } - - let has_instance = if item.generics.params.first().is_some() { - helper::check_config_def_gen(&item.generics, item.ident.span())?; - true - } else { - false - }; - - let has_frame_system_supertrait = item.supertraits.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b, frame_system)) - }); - - let mut has_event_type = false; - let mut consts_metadata = vec![]; - let mut default_sub_trait = if enable_default { - Some(DefaultTrait { - items: Default::default(), - has_system: has_frame_system_supertrait, - }) - } else { - None - }; - for trait_item in &mut item.items { - let is_event = check_event_type(frame_system, trait_item, has_instance)?; - has_event_type = has_event_type || is_event; - - let mut already_no_default = false; - let mut already_constant = false; - let mut already_no_default_bounds = false; - - while let Ok(Some(pallet_attr)) = - helper::take_first_item_pallet_attr::(trait_item) - { - match (pallet_attr.typ, &trait_item) { - (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { - if already_constant { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::constant] attribute not allowed.", - )) - } - already_constant = true; - consts_metadata.push(ConstMetadataDef::try_from(typ)?); - }, - (PalletAttrType::Constant(_), _) => - return Err(syn::Error::new( - trait_item.span(), - "Invalid #[pallet::constant] in #[pallet::config], expected type item", - )), - (PalletAttrType::NoDefault(_), _) => { - if !enable_default { - return Err(syn::Error::new( + ) + }; + + if item.generics.params.len() > 1 { + let msg = "Invalid pallet::config, expected no more than one generic"; + return Err(syn::Error::new(item.generics.params[2].span(), msg)); + } + + let has_instance = if item.generics.params.first().is_some() { + helper::check_config_def_gen(&item.generics, item.ident.span())?; + true + } else { + false + }; + + let has_frame_system_supertrait = item.supertraits.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b, frame_system)) + }); + + let mut has_event_type = false; + let mut consts_metadata = vec![]; + let mut default_sub_trait = if enable_default { + Some(DefaultTrait { + items: Default::default(), + has_system: has_frame_system_supertrait, + }) + } else { + None + }; + for trait_item in &mut item.items { + let is_event = check_event_type(frame_system, trait_item, has_instance)?; + has_event_type = has_event_type || is_event; + + let mut already_no_default = false; + let mut already_constant = false; + let mut already_no_default_bounds = false; + + while let Ok(Some(pallet_attr)) = + helper::take_first_item_pallet_attr::(trait_item) + { + match (pallet_attr.typ, &trait_item) { + (PalletAttrType::Constant(_), syn::TraitItem::Type(ref typ)) => { + if already_constant { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::constant] attribute not allowed.", + )); + } + already_constant = true; + consts_metadata.push(ConstMetadataDef::try_from(typ)?); + } + (PalletAttrType::Constant(_), _) => { + return Err(syn::Error::new( + trait_item.span(), + "Invalid #[pallet::constant] in #[pallet::config], expected type item", + )) + } + (PalletAttrType::NoDefault(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )) - } - if already_no_default { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default] attribute not allowed.", - )) - } - - already_no_default = true; - }, - (PalletAttrType::NoBounds(_), _) => { - if !enable_default { - return Err(syn::Error::new( + )); + } + if already_no_default { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default] attribute not allowed.", + )); + } + + already_no_default = true; + } + (PalletAttrType::NoBounds(_), _) => { + if !enable_default { + return Err(syn::Error::new( pallet_attr._bracket.span.join(), "`#[pallet:no_default_bounds]` can only be used if `#[pallet::config(with_default)]` \ has been specified" - )) - } - if already_no_default_bounds { - return Err(syn::Error::new( - pallet_attr._bracket.span.join(), - "Duplicate #[pallet::no_default_bounds] attribute not allowed.", - )) - } - already_no_default_bounds = true; - }, - } - } - - if !already_no_default && enable_default { - default_sub_trait - .as_mut() - .expect("is 'Some(_)' if 'enable_default'; qed") - .items - .push((trait_item.clone(), already_no_default_bounds)); - } - } - - let attr: Option = - helper::take_first_item_pallet_attr(&mut item.attrs)?; - let disable_system_supertrait_check = attr.is_some(); - - if !has_frame_system_supertrait && !disable_system_supertrait_check { - let found = if item.supertraits.is_empty() { - "none".to_string() - } else { - let mut found = item - .supertraits - .iter() - .fold(String::new(), |acc, s| format!("{}`{}`, ", acc, quote::quote!(#s))); - found.pop(); - found.pop(); - found - }; - - let msg = format!( - "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ + )); + } + if already_no_default_bounds { + return Err(syn::Error::new( + pallet_attr._bracket.span.join(), + "Duplicate #[pallet::no_default_bounds] attribute not allowed.", + )); + } + already_no_default_bounds = true; + } + } + } + + if !already_no_default && enable_default { + default_sub_trait + .as_mut() + .expect("is 'Some(_)' if 'enable_default'; qed") + .items + .push((trait_item.clone(), already_no_default_bounds)); + } + } + + let attr: Option = + helper::take_first_item_pallet_attr(&mut item.attrs)?; + let disable_system_supertrait_check = attr.is_some(); + + if !has_frame_system_supertrait && !disable_system_supertrait_check { + let found = if item.supertraits.is_empty() { + "none".to_string() + } else { + let mut found = item.supertraits.iter().fold(String::new(), |acc, s| { + format!("{}`{}`, ", acc, quote::quote!(#s)) + }); + found.pop(); + found.pop(); + found + }; + + let msg = format!( + "Invalid pallet::trait, expected explicit `{}::Config` as supertrait, \ found {}. \ (try `pub trait Config: frame_system::Config {{ ...` or \ `pub trait Config: frame_system::Config {{ ...`). \ To disable this check, use `#[pallet::disable_frame_system_supertrait_check]`", - frame_system.to_token_stream(), - found, - ); - return Err(syn::Error::new(item.span(), msg)) - } - - Ok(Self { - index, - has_instance, - consts_metadata, - has_event_type, - where_clause, - attr_span, - default_sub_trait, - }) - } + frame_system.to_token_stream(), + found, + ); + return Err(syn::Error::new(item.span(), msg)); + } + + Ok(Self { + index, + has_instance, + consts_metadata, + has_event_type, + where_clause, + attr_span, + default_sub_trait, + }) + } } #[cfg(test)] mod tests { - use super::*; - #[test] - fn has_expected_system_config_works() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_assoc_type() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame_system::Config)) - .unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_works_with_other_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { - let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_frame_system() { - let frame_system = - syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_unexpected_path() { - let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } - - #[test] - fn has_expected_system_config_not_frame_system() { - let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); - let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); - assert!(!has_expected_system_config(path, &frame_system)); - } + use super::*; + #[test] + fn has_expected_system_config_works() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_assoc_type() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame_system::Config)) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_works_with_other_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_frame_system() { + let frame_system = + syn::parse2::(quote::quote!(framez::deps::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_unexpected_path() { + let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!(frame_system::ConfigSystem)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } + + #[test] + fn has_expected_system_config_not_frame_system() { + let frame_system = syn::parse2::(quote::quote!(something)).unwrap(); + let path = syn::parse2::(quote::quote!(something::Config)).unwrap(); + assert!(!has_expected_system_config(path, &frame_system)); + } } diff --git a/support/procedural-fork/src/pallet/parse/error.rs b/support/procedural-fork/src/pallet/parse/error.rs index 362df8d73..e93e2113f 100644 --- a/support/procedural-fork/src/pallet/parse/error.rs +++ b/support/procedural-fork/src/pallet/parse/error.rs @@ -22,94 +22,108 @@ use syn::{spanned::Spanned, Fields}; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); + syn::custom_keyword!(Error); } /// Records information about the error enum variant field. pub struct VariantField { - /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. - pub is_named: bool, + /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. + pub is_named: bool, } /// Records information about the error enum variants. pub struct VariantDef { - /// The variant ident. - pub ident: syn::Ident, - /// The variant field, if any. - pub field: Option, - /// The variant doc literals. - pub docs: Vec, - /// The `cfg` attributes. - pub cfg_attrs: Vec, + /// The variant ident. + pub ident: syn::Ident, + /// The variant field, if any. + pub field: Option, + /// The variant doc literals. + pub docs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, } /// This checks error declaration as a enum declaration with only variants without fields nor /// discriminant. pub struct ErrorDef { - /// The index of error item in pallet module. - pub index: usize, - /// Variant definitions. - pub variants: Vec, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The keyword error used (contains span). - pub error: keyword::Error, - /// The span of the pallet::error attribute. - pub attr_span: proc_macro2::Span, + /// The index of error item in pallet module. + pub index: usize, + /// Variant definitions. + pub variants: Vec, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The keyword error used (contains span). + pub error: keyword::Error, + /// The span of the pallet::error attribute. + pub attr_span: proc_macro2::Span, } impl ErrorDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")) - }; - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::error, `Error` must be public"; - return Err(syn::Error::new(item.span(), msg)) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::error, expected item enum", + )); + }; + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::error, `Error` must be public"; + return Err(syn::Error::new(item.span(), msg)); + } - let instances = - vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; + let instances = vec![helper::check_type_def_gen_no_bounds( + &item.generics, + item.ident.span(), + )?]; - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; - return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)) - } + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; + return Err(syn::Error::new( + item.generics.where_clause.as_ref().unwrap().span(), + msg, + )); + } - let error = syn::parse2::(item.ident.to_token_stream())?; + let error = syn::parse2::(item.ident.to_token_stream())?; - let variants = item - .variants - .iter() - .map(|variant| { - let field_ty = match &variant.fields { - Fields::Unit => None, - Fields::Named(_) => Some(VariantField { is_named: true }), - Fields::Unnamed(_) => Some(VariantField { is_named: false }), - }; - if variant.discriminant.is_some() { - let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ + let variants = item + .variants + .iter() + .map(|variant| { + let field_ty = match &variant.fields { + Fields::Unit => None, + Fields::Named(_) => Some(VariantField { is_named: true }), + Fields::Unnamed(_) => Some(VariantField { is_named: false }), + }; + if variant.discriminant.is_some() { + let msg = "Invalid pallet::error, unexpected discriminant, discriminants \ are not supported"; - let span = variant.discriminant.as_ref().unwrap().0.span(); - return Err(syn::Error::new(span, msg)) - } - let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); + let span = variant.discriminant.as_ref().unwrap().0.span(); + return Err(syn::Error::new(span, msg)); + } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); - Ok(VariantDef { - ident: variant.ident.clone(), - field: field_ty, - docs: get_doc_literals(&variant.attrs), - cfg_attrs, - }) - }) - .collect::>()?; + Ok(VariantDef { + ident: variant.ident.clone(), + field: field_ty, + docs: get_doc_literals(&variant.attrs), + cfg_attrs, + }) + }) + .collect::>()?; - Ok(ErrorDef { attr_span, index, variants, instances, error }) - } + Ok(ErrorDef { + attr_span, + index, + variants, + instances, + error, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/event.rs b/support/procedural-fork/src/pallet/parse/event.rs index 0fb8ee4f5..6102dd31f 100644 --- a/support/procedural-fork/src/pallet/parse/event.rs +++ b/support/procedural-fork/src/pallet/parse/event.rs @@ -21,28 +21,28 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Event); - syn::custom_keyword!(pallet); - syn::custom_keyword!(generate_deposit); - syn::custom_keyword!(deposit_event); + syn::custom_keyword!(Event); + syn::custom_keyword!(pallet); + syn::custom_keyword!(generate_deposit); + syn::custom_keyword!(deposit_event); } /// Definition for pallet event enum. pub struct EventDef { - /// The index of event item in pallet module. - pub index: usize, - /// The keyword Event used (contains span). - pub event: keyword::Event, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The kind of generic the type `Event` has. - pub gen_kind: super::GenericKind, - /// Whether the function `deposit_event` must be generated. - pub deposit_event: Option, - /// Where clause used in event definition. - pub where_clause: Option, - /// The span of the pallet::event attribute. - pub attr_span: proc_macro2::Span, + /// The index of event item in pallet module. + pub index: usize, + /// The keyword Event used (contains span). + pub event: keyword::Event, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The kind of generic the type `Event` has. + pub gen_kind: super::GenericKind, + /// Whether the function `deposit_event` must be generated. + pub deposit_event: Option, + /// Where clause used in event definition. + pub where_clause: Option, + /// The span of the pallet::event attribute. + pub attr_span: proc_macro2::Span, } /// Attribute for a pallet's Event. @@ -50,92 +50,110 @@ pub struct EventDef { /// Syntax is: /// * `#[pallet::generate_deposit($vis fn deposit_event)]` pub struct PalletEventDepositAttr { - pub fn_vis: syn::Visibility, - // Span for the keyword deposit_event - pub fn_span: proc_macro2::Span, - // Span of the attribute - pub span: proc_macro2::Span, + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, } impl syn::parse::Parse for PalletEventDepositAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let span = content.parse::()?.span(); - let generate_content; - syn::parenthesized!(generate_content in content); - let fn_vis = generate_content.parse::()?; - generate_content.parse::()?; - let fn_span = generate_content.parse::()?.span(); - - Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); + + Ok(PalletEventDepositAttr { + fn_vis, + span, + fn_span, + }) + } } struct PalletEventAttrInfo { - deposit_event: Option, + deposit_event: Option, } impl PalletEventAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut deposit_event = None; - for attr in attrs { - if deposit_event.is_none() { - deposit_event = Some(attr) - } else { - return Err(syn::Error::new(attr.span, "Duplicate attribute")) - } - } - - Ok(PalletEventAttrInfo { deposit_event }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut deposit_event = None; + for attr in attrs { + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")); + } + } + + Ok(PalletEventAttrInfo { deposit_event }) + } } impl EventDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Enum(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected enum item")) - }; - - let event_attrs: Vec = - helper::take_item_pallet_attrs(&mut item.attrs)?; - let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; - let deposit_event = attr_info.deposit_event; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::event, `Event` must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - let where_clause = item.generics.where_clause.clone(); - - let mut instances = vec![]; - // NOTE: Event is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { - instances.push(u); - } else { - // construct_runtime only allow non generic event for non instantiable pallet. - instances.push(helper::InstanceUsage { has_instance: false, span: item.ident.span() }) - } - - let has_instance = item.generics.type_params().any(|t| t.ident == "I"); - let has_config = item.generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); - - let event = syn::parse2::(item.ident.to_token_stream())?; - - Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Enum(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::event, expected enum item", + )); + }; + + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; + let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; + let deposit_event = attr_info.deposit_event; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::event, `Event` must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + let where_clause = item.generics.where_clause.clone(); + + let mut instances = vec![]; + // NOTE: Event is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(&item.generics, item.ident.span())? { + instances.push(u); + } else { + // construct_runtime only allow non generic event for non instantiable pallet. + instances.push(helper::InstanceUsage { + has_instance: false, + span: item.ident.span(), + }) + } + + let has_instance = item.generics.type_params().any(|t| t.ident == "I"); + let has_config = item.generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); + + let event = syn::parse2::(item.ident.to_token_stream())?; + + Ok(EventDef { + attr_span, + index, + instances, + deposit_event, + event, + gen_kind, + where_clause, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/extra_constants.rs b/support/procedural-fork/src/pallet/parse/extra_constants.rs index 2ba6c44b7..38acea21a 100644 --- a/support/procedural-fork/src/pallet/parse/extra_constants.rs +++ b/support/procedural-fork/src/pallet/parse/extra_constants.rs @@ -21,140 +21,148 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(DispatchResultWithPostInfo); - syn::custom_keyword!(Call); - syn::custom_keyword!(OriginFor); - syn::custom_keyword!(weight); - syn::custom_keyword!(compact); - syn::custom_keyword!(T); - syn::custom_keyword!(pallet); - syn::custom_keyword!(constant_name); + syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(Call); + syn::custom_keyword!(OriginFor); + syn::custom_keyword!(weight); + syn::custom_keyword!(compact); + syn::custom_keyword!(T); + syn::custom_keyword!(pallet); + syn::custom_keyword!(constant_name); } /// Definition of extra constants typically `impl Pallet { ... }` pub struct ExtraConstantsDef { - /// The where_clause used. - pub where_clause: Option, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, - /// The extra constant defined. - pub extra_constants: Vec, + /// The where_clause used. + pub where_clause: Option, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The index of call item in pallet module. + pub index: usize, + /// The extra constant defined. + pub extra_constants: Vec, } /// Input definition for an constant in pallet. pub struct ExtraConstantDef { - /// Name of the function - pub ident: syn::Ident, - /// The type returned by the function - pub type_: syn::Type, - /// The doc associated - pub doc: Vec, - /// Optional MetaData Name - pub metadata_name: Option, + /// Name of the function + pub ident: syn::Ident, + /// The type returned by the function + pub type_: syn::Type, + /// The doc associated + pub doc: Vec, + /// Optional MetaData Name + pub metadata_name: Option, } /// Attributes for functions in extra_constants impl block. /// Parse for `#[pallet::constant_name(ConstantName)]` pub struct ExtraConstAttr { - metadata_name: syn::Ident, + metadata_name: syn::Ident, } impl syn::parse::Parse for ExtraConstAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - content.parse::()?; - - let metadata_name; - syn::parenthesized!(metadata_name in content); - Ok(ExtraConstAttr { metadata_name: metadata_name.parse::()? }) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + content.parse::()?; + + let metadata_name; + syn::parenthesized!(metadata_name in content); + Ok(ExtraConstAttr { + metadata_name: metadata_name.parse::()?, + }) + } } impl ExtraConstantsDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - return Err(syn::Error::new( - item.span(), - "Invalid pallet::extra_constants, expected item impl", - )) - }; - - let instances = vec![ - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - helper::check_pallet_struct_usage(&item.self_ty)?, - ]; - - if let Some((_, _, for_)) = item.trait_ { - let msg = "Invalid pallet::call, expected no trait ident as in \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::extra_constants, expected item impl", + )); + }; + + let instances = vec![ + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + helper::check_pallet_struct_usage(&item.self_ty)?, + ]; + + if let Some((_, _, for_)) = item.trait_ { + let msg = "Invalid pallet::call, expected no trait ident as in \ `impl<..> Pallet<..> { .. }`"; - return Err(syn::Error::new(for_.span(), msg)) - } - - let mut extra_constants = vec![]; - for impl_item in &mut item.items { - let method = if let syn::ImplItem::Fn(method) = impl_item { - method - } else { - let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(impl_item.span(), msg)) - }; - - if !method.sig.inputs.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 args"; - return Err(syn::Error::new(method.sig.span(), msg)) - } - - if !method.sig.generics.params.is_empty() { - let msg = "Invalid pallet::extra_constants, method must have 0 generics"; - return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)) - } - - if method.sig.generics.where_clause.is_some() { - let msg = "Invalid pallet::extra_constants, method must have no where clause"; - return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)) - } - - let type_ = match &method.sig.output { - syn::ReturnType::Default => { - let msg = "Invalid pallet::extra_constants, method must have a return type"; - return Err(syn::Error::new(method.span(), msg)) - }, - syn::ReturnType::Type(_, type_) => *type_.clone(), - }; - - // parse metadata_name - let mut extra_constant_attrs: Vec = - helper::take_item_pallet_attrs(method)?; - - if extra_constant_attrs.len() > 1 { - let msg = - "Invalid attribute in pallet::constant_name, only one attribute is expected"; - return Err(syn::Error::new(extra_constant_attrs[1].metadata_name.span(), msg)) - } - - let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); - - extra_constants.push(ExtraConstantDef { - ident: method.sig.ident.clone(), - type_, - doc: get_doc_literals(&method.attrs), - metadata_name, - }); - } - - Ok(Self { - index, - instances, - where_clause: item.generics.where_clause.clone(), - extra_constants, - }) - } + return Err(syn::Error::new(for_.span(), msg)); + } + + let mut extra_constants = vec![]; + for impl_item in &mut item.items { + let method = if let syn::ImplItem::Fn(method) = impl_item { + method + } else { + let msg = "Invalid pallet::call, only method accepted"; + return Err(syn::Error::new(impl_item.span(), msg)); + }; + + if !method.sig.inputs.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 args"; + return Err(syn::Error::new(method.sig.span(), msg)); + } + + if !method.sig.generics.params.is_empty() { + let msg = "Invalid pallet::extra_constants, method must have 0 generics"; + return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)); + } + + if method.sig.generics.where_clause.is_some() { + let msg = "Invalid pallet::extra_constants, method must have no where clause"; + return Err(syn::Error::new( + method.sig.generics.where_clause.span(), + msg, + )); + } + + let type_ = match &method.sig.output { + syn::ReturnType::Default => { + let msg = "Invalid pallet::extra_constants, method must have a return type"; + return Err(syn::Error::new(method.span(), msg)); + } + syn::ReturnType::Type(_, type_) => *type_.clone(), + }; + + // parse metadata_name + let mut extra_constant_attrs: Vec = + helper::take_item_pallet_attrs(method)?; + + if extra_constant_attrs.len() > 1 { + let msg = + "Invalid attribute in pallet::constant_name, only one attribute is expected"; + return Err(syn::Error::new( + extra_constant_attrs[1].metadata_name.span(), + msg, + )); + } + + let metadata_name = extra_constant_attrs.pop().map(|attr| attr.metadata_name); + + extra_constants.push(ExtraConstantDef { + ident: method.sig.ident.clone(), + type_, + doc: get_doc_literals(&method.attrs), + metadata_name, + }); + } + + Ok(Self { + index, + instances, + where_clause: item.generics.where_clause.clone(), + extra_constants, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_build.rs b/support/procedural-fork/src/pallet/parse/genesis_build.rs index d0e1d9ec9..670d4d5ef 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_build.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_build.rs @@ -20,42 +20,47 @@ use syn::spanned::Spanned; /// Definition for pallet genesis build implementation. pub struct GenesisBuildDef { - /// The index of item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Option>, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::genesis_build attribute. - pub attr_span: proc_macro2::Span, + /// The index of item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Option>, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::genesis_build attribute. + pub attr_span: proc_macro2::Span, } impl GenesisBuildDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::genesis_build, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::genesis_build, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ for GenesisConfig<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - let instances = - helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); + let instances = + helper::check_genesis_builder_usage(item_trait)?.map(|instances| vec![instances]); - Ok(Self { attr_span, index, instances, where_clause: item.generics.where_clause.clone() }) - } + Ok(Self { + attr_span, + index, + instances, + where_clause: item.generics.where_clause.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/genesis_config.rs b/support/procedural-fork/src/pallet/parse/genesis_config.rs index 62da6ba13..1c52345eb 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_config.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_config.rs @@ -24,50 +24,55 @@ use syn::spanned::Spanned; /// * `struct GenesisConfig` /// * `enum GenesisConfig` pub struct GenesisConfigDef { - /// The index of item in pallet module. - pub index: usize, - /// The kind of generic the type `GenesisConfig` has. - pub gen_kind: super::GenericKind, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The ident of genesis_config, can be used for span. - pub genesis_config: syn::Ident, + /// The index of item in pallet module. + pub index: usize, + /// The kind of generic the type `GenesisConfig` has. + pub gen_kind: super::GenericKind, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The ident of genesis_config, can be used for span. + pub genesis_config: syn::Ident, } impl GenesisConfigDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::genesis_config, expected enum or struct"; - return Err(syn::Error::new(item.span(), msg)) - }, - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::genesis_config, expected enum or struct"; + return Err(syn::Error::new(item.span(), msg)); + } + }; - let mut instances = vec![]; - // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported - // by construct_runtime. - if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { - instances.push(u); - } + let mut instances = vec![]; + // NOTE: GenesisConfig is not allowed to be only generic on I because it is not supported + // by construct_runtime. + if let Some(u) = helper::check_type_def_optional_gen(generics, ident.span())? { + instances.push(u); + } - let has_instance = generics.type_params().any(|t| t.ident == "I"); - let has_config = generics.type_params().any(|t| t.ident == "T"); - let gen_kind = super::GenericKind::from_gens(has_config, has_instance) - .expect("Checked by `helper::check_type_def_optional_gen` above"); + let has_instance = generics.type_params().any(|t| t.ident == "I"); + let has_config = generics.type_params().any(|t| t.ident == "T"); + let gen_kind = super::GenericKind::from_gens(has_config, has_instance) + .expect("Checked by `helper::check_type_def_optional_gen` above"); - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; - return Err(syn::Error::new(item_span, msg)) - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; + return Err(syn::Error::new(item_span, msg)); + } - if ident != "GenesisConfig" { - let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; - return Err(syn::Error::new(ident.span(), msg)) - } + if ident != "GenesisConfig" { + let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; + return Err(syn::Error::new(ident.span(), msg)); + } - Ok(GenesisConfigDef { index, genesis_config: ident.clone(), instances, gen_kind }) - } + Ok(GenesisConfigDef { + index, + genesis_config: ident.clone(), + instances, + gen_kind, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs index 3187c9139..f58c8d81c 100644 --- a/support/procedural-fork/src/pallet/parse/helper.rs +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -21,164 +21,176 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(I); - syn::custom_keyword!(compact); - syn::custom_keyword!(GenesisBuild); - syn::custom_keyword!(BuildGenesisConfig); - syn::custom_keyword!(Config); - syn::custom_keyword!(T); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(origin); - syn::custom_keyword!(DispatchResult); - syn::custom_keyword!(DispatchResultWithPostInfo); + syn::custom_keyword!(I); + syn::custom_keyword!(compact); + syn::custom_keyword!(GenesisBuild); + syn::custom_keyword!(BuildGenesisConfig); + syn::custom_keyword!(Config); + syn::custom_keyword!(T); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(origin); + syn::custom_keyword!(DispatchResult); + syn::custom_keyword!(DispatchResultWithPostInfo); } /// A usage of instance, either the trait `Config` has been used with instance or without instance. /// Used to check for consistency. #[derive(Clone)] pub struct InstanceUsage { - pub has_instance: bool, - pub span: proc_macro2::Span, + pub has_instance: bool, + pub span: proc_macro2::Span, } /// Trait implemented for syn items to get mutable references on their attributes. /// /// NOTE: verbatim variants are not supported. pub trait MutItemAttrs { - fn mut_item_attrs(&mut self) -> Option<&mut Vec>; + fn mut_item_attrs(&mut self) -> Option<&mut Vec>; } /// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr` pub(crate) fn take_first_item_pallet_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; - - if let Some(index) = attrs.iter().position(|attr| { - attr.path().segments.first().map_or(false, |segment| segment.ident == "pallet") - }) { - let pallet_attr = attrs.remove(index); - Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) - } else { - Ok(None) - } + let attrs = if let Some(attrs) = item.mut_item_attrs() { + attrs + } else { + return Ok(None); + }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path() + .segments + .first() + .map_or(false, |segment| segment.ident == "pallet") + }) { + let pallet_attr = attrs.remove(index); + Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) + } else { + Ok(None) + } } /// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` pub(crate) fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let mut pallet_attrs = Vec::new(); + let mut pallet_attrs = Vec::new(); - while let Some(attr) = take_first_item_pallet_attr(item)? { - pallet_attrs.push(attr) - } + while let Some(attr) = take_first_item_pallet_attr(item)? { + pallet_attrs.push(attr) + } - Ok(pallet_attrs) + Ok(pallet_attrs) } /// Get all the cfg attributes (e.g. attribute like `#[cfg..]`) and decode them to `Attr` pub fn get_item_cfg_attrs(attrs: &[syn::Attribute]) -> Vec { - attrs - .iter() - .filter_map(|attr| { - if attr.path().segments.first().map_or(false, |segment| segment.ident == "cfg") { - Some(attr.clone()) - } else { - None - } - }) - .collect::>() + attrs + .iter() + .filter_map(|attr| { + if attr + .path() + .segments + .first() + .map_or(false, |segment| segment.ident == "cfg") + { + Some(attr.clone()) + } else { + None + } + }) + .collect::>() } impl MutItemAttrs for syn::Item { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Enum(item) => Some(item.attrs.as_mut()), - Self::ExternCrate(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::ForeignMod(item) => Some(item.attrs.as_mut()), - Self::Impl(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - Self::Mod(item) => Some(item.attrs.as_mut()), - Self::Static(item) => Some(item.attrs.as_mut()), - Self::Struct(item) => Some(item.attrs.as_mut()), - Self::Trait(item) => Some(item.attrs.as_mut()), - Self::TraitAlias(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Union(item) => Some(item.attrs.as_mut()), - Self::Use(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Enum(item) => Some(item.attrs.as_mut()), + Self::ExternCrate(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::ForeignMod(item) => Some(item.attrs.as_mut()), + Self::Impl(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + Self::Mod(item) => Some(item.attrs.as_mut()), + Self::Static(item) => Some(item.attrs.as_mut()), + Self::Struct(item) => Some(item.attrs.as_mut()), + Self::Trait(item) => Some(item.attrs.as_mut()), + Self::TraitAlias(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Union(item) => Some(item.attrs.as_mut()), + Self::Use(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for syn::TraitItem { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - match self { - Self::Const(item) => Some(item.attrs.as_mut()), - Self::Fn(item) => Some(item.attrs.as_mut()), - Self::Type(item) => Some(item.attrs.as_mut()), - Self::Macro(item) => Some(item.attrs.as_mut()), - _ => None, - } - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + match self { + Self::Const(item) => Some(item.attrs.as_mut()), + Self::Fn(item) => Some(item.attrs.as_mut()), + Self::Type(item) => Some(item.attrs.as_mut()), + Self::Macro(item) => Some(item.attrs.as_mut()), + _ => None, + } + } } impl MutItemAttrs for Vec { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(self) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(self) + } } impl MutItemAttrs for syn::ItemMod { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ImplItemFn { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } impl MutItemAttrs for syn::ItemType { - fn mut_item_attrs(&mut self) -> Option<&mut Vec> { - Some(&mut self.attrs) - } + fn mut_item_attrs(&mut self) -> Option<&mut Vec> { + Some(&mut self.attrs) + } } /// Parse for `()` struct Unit; impl syn::parse::Parse for Unit { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - if !content.is_empty() { - let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; - return Err(syn::Error::new(content.span(), msg)) - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + if !content.is_empty() { + let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; + return Err(syn::Error::new(content.span(), msg)); + } + Ok(Self) + } } /// Parse for `'static` struct StaticLifetime; impl syn::parse::Parse for StaticLifetime { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lifetime = input.parse::()?; - if lifetime.ident != "static" { - let msg = "unexpected tokens, expected `static`"; - return Err(syn::Error::new(lifetime.ident.span(), msg)) - } - Ok(Self) - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lifetime = input.parse::()?; + if lifetime.ident != "static" { + let msg = "unexpected tokens, expected `static`"; + return Err(syn::Error::new(lifetime.ident.span(), msg)); + } + Ok(Self) + } } /// Check the syntax: `I: 'static = ()` @@ -187,28 +199,28 @@ impl syn::parse::Parse for StaticLifetime { /// /// return the instance if found. pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result<()> { - let expected = "expected `I: 'static = ()`"; - pub struct CheckTraitDefGenerics; - impl syn::parse::Parse for CheckTraitDefGenerics { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self) - } - } - - syn::parse2::(gen.params.to_token_stream()).map_err(|e| { - let msg = format!("Invalid generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })?; - - Ok(()) + let expected = "expected `I: 'static = ()`"; + pub struct CheckTraitDefGenerics; + impl syn::parse::Parse for CheckTraitDefGenerics { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self) + } + } + + syn::parse2::(gen.params.to_token_stream()).map_err(|e| { + let msg = format!("Invalid generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })?; + + Ok(()) } /// Check the syntax: @@ -219,38 +231,41 @@ pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn /// /// return the instance if found. pub fn check_type_def_gen_no_bounds( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T, I = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { has_instance: false, span: input.span() }; - - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `T` or `T, I = ()`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + has_instance: false, + span: input.span(), + }; + + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -264,76 +279,79 @@ pub fn check_type_def_gen_no_bounds( /// /// return some instance usage if there is some generic, or none otherwise. pub fn check_type_def_optional_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result> { - let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `` or `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)) - } - - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } else { - Err(lookahead.error()) - } - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)); + } + + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } else { + Err(lookahead.error()) + } + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the syntax: @@ -342,36 +360,39 @@ pub fn check_type_def_optional_gen( /// /// return the instance if found. pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result { - let expected = "expected `Pallet` or `Pallet`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid pallet struct: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `Pallet` or `Pallet`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid pallet struct: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the generic is: @@ -382,39 +403,42 @@ pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result syn::Result { - let expected = "expected `impl` or `impl, I: 'static>`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![<]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - } - - Ok(Self(instance_usage)) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `impl` or `impl, I: 'static>`"; + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![<]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + } + + Ok(Self(instance_usage)) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -427,70 +451,73 @@ pub fn check_impl_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Resu /// /// return the instance if found. pub fn check_type_def_gen( - gen: &syn::Generics, - span: proc_macro2::Span, + gen: &syn::Generics, + span: proc_macro2::Span, ) -> syn::Result { - let expected = "expected `T` or `T: Config` or `T, I = ()` or \ + let expected = "expected `T` or `T: Config` or `T, I = ()` or \ `T: Config, I: 'static = ()`"; - pub struct Checker(InstanceUsage); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)) - } - - let lookahead = input.lookahead1(); - if lookahead.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else if lookahead.peek(syn::Token![:]) { - input.parse::()?; - input.parse::()?; - - if input.is_empty() { - return Ok(Self(instance_usage)) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(instance_usage)) - } else { - Err(lookahead.error()) - } - } - } - - let mut i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0; - - // Span can be call_site if generic is empty. Thus we replace it. - i.span = span; - - Ok(i) + pub struct Checker(InstanceUsage); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)); + } + + let lookahead = input.lookahead1(); + if lookahead.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else if lookahead.peek(syn::Token![:]) { + input.parse::()?; + input.parse::()?; + + if input.is_empty() { + return Ok(Self(instance_usage)); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(instance_usage)) + } else { + Err(lookahead.error()) + } + } + } + + let mut i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0; + + // Span can be call_site if generic is empty. Thus we replace it. + i.span = span; + + Ok(i) } /// Check the syntax: @@ -501,40 +528,43 @@ pub fn check_type_def_gen( /// return the instance if found for `GenesisBuild` /// return None for BuildGenesisConfig pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result> { - let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - if input.peek(keyword::GenesisBuild) { - input.parse::()?; - input.parse::()?; - input.parse::()?; - if input.peek(syn::Token![,]) { - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - } - input.parse::]>()?; - return Ok(Self(Some(instance_usage))) - } else { - input.parse::()?; - return Ok(Self(None)) - } - } - } - - let i = syn::parse2::(type_.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid genesis builder: {}", expected); - let mut err = syn::Error::new(type_.span(), msg); - err.combine(e); - err - })? - .0; - - Ok(i) + let expected = "expected `BuildGenesisConfig` (or the deprecated `GenesisBuild` or `GenesisBuild`)"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + if input.peek(keyword::GenesisBuild) { + input.parse::()?; + input.parse::()?; + input.parse::()?; + if input.peek(syn::Token![,]) { + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + } + input.parse::]>()?; + return Ok(Self(Some(instance_usage))); + } else { + input.parse::()?; + return Ok(Self(None)); + } + } + } + + let i = syn::parse2::(type_.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid genesis builder: {}", expected); + let mut err = syn::Error::new(type_.span(), msg); + err.combine(e); + err + })? + .0; + + Ok(i) } /// Check the syntax: @@ -546,87 +576,90 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result syn::Result> { - let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; - pub struct Checker(Option); - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(Self(None)) - } - - input.parse::()?; - input.parse::()?; - input.parse::()?; - - let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; - - if input.is_empty() { - return Ok(Self(Some(instance_usage))) - } - - instance_usage.has_instance = true; - input.parse::()?; - input.parse::()?; - input.parse::]>()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - - Ok(Self(Some(instance_usage))) - } - } - - let i = syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid type def generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })? - .0 - // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { - i.span = span; - i - }); - - Ok(i) + let expected = "expected `` or `T: Config` or `T: Config, I: 'static`"; + pub struct Checker(Option); + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(Self(None)); + } + + input.parse::()?; + input.parse::()?; + input.parse::()?; + + let mut instance_usage = InstanceUsage { + span: input.span(), + has_instance: false, + }; + + if input.is_empty() { + return Ok(Self(Some(instance_usage))); + } + + instance_usage.has_instance = true; + input.parse::()?; + input.parse::()?; + input.parse::]>()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + input.parse::()?; + + Ok(Self(Some(instance_usage))) + } + } + + let i = syn::parse2::(gen.params.to_token_stream()) + .map_err(|e| { + let msg = format!("Invalid type def generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })? + .0 + // Span can be call_site if generic is empty. Thus we replace it. + .map(|mut i| { + i.span = span; + i + }); + + Ok(i) } /// Check the keyword `DispatchResultWithPostInfo` or `DispatchResult`. pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { - pub struct Checker; - impl syn::parse::Parse for Checker { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let lookahead = input.lookahead1(); - if lookahead.peek(keyword::DispatchResultWithPostInfo) { - input.parse::()?; - Ok(Self) - } else if lookahead.peek(keyword::DispatchResult) { - input.parse::()?; - Ok(Self) - } else { - Err(lookahead.error()) - } - } - } - - syn::parse2::(type_.to_token_stream()).map(|_| ()) + pub struct Checker; + impl syn::parse::Parse for Checker { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keyword::DispatchResultWithPostInfo) { + input.parse::()?; + Ok(Self) + } else if lookahead.peek(keyword::DispatchResult) { + input.parse::()?; + Ok(Self) + } else { + Err(lookahead.error()) + } + } + } + + syn::parse2::(type_.to_token_stream()).map(|_| ()) } pub(crate) fn two128_str(s: &str) -> TokenStream { - bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) + bytes_to_array(sp_crypto_hashing::twox_128(s.as_bytes()).into_iter()) } pub(crate) fn bytes_to_array(bytes: impl IntoIterator) -> TokenStream { - let bytes = bytes.into_iter(); + let bytes = bytes.into_iter(); - quote!( - [ #( #bytes ),* ] - ) - .into() + quote!( + [ #( #bytes ),* ] + ) + .into() } diff --git a/support/procedural-fork/src/pallet/parse/hooks.rs b/support/procedural-fork/src/pallet/parse/hooks.rs index 37d7d22f4..1cf5c72cc 100644 --- a/support/procedural-fork/src/pallet/parse/hooks.rs +++ b/support/procedural-fork/src/pallet/parse/hooks.rs @@ -20,67 +20,67 @@ use syn::spanned::Spanned; /// Implementation of the pallet hooks. pub struct HooksDef { - /// The index of item in pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, - /// The where_clause used. - pub where_clause: Option, - /// The span of the pallet::hooks attribute. - pub attr_span: proc_macro2::Span, - /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. - pub has_runtime_upgrade: bool, + /// The index of item in pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, + /// The where_clause used. + pub where_clause: Option, + /// The span of the pallet::hooks attribute. + pub attr_span: proc_macro2::Span, + /// Boolean flag, set to true if the `on_runtime_upgrade` method of hooks was implemented. + pub has_runtime_upgrade: bool, } impl HooksDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::hooks, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::hooks, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - let item_trait = &item - .trait_ - .as_ref() - .ok_or_else(|| { - let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ + let item_trait = &item + .trait_ + .as_ref() + .ok_or_else(|| { + let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ for Pallet<..>"; - syn::Error::new(item.span(), msg) - })? - .1; + syn::Error::new(item.span(), msg) + })? + .1; - if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { - let msg = format!( - "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ + if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { + let msg = format!( + "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ , you can import from `frame_support::pallet_prelude`", - quote::quote!(#item_trait) - ); + quote::quote!(#item_trait) + ); - return Err(syn::Error::new(item_trait.span(), msg)) - } + return Err(syn::Error::new(item_trait.span(), msg)); + } - let has_runtime_upgrade = item.items.iter().any(|i| match i { - syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", - _ => false, - }); + let has_runtime_upgrade = item.items.iter().any(|i| match i { + syn::ImplItem::Fn(method) => method.sig.ident == "on_runtime_upgrade", + _ => false, + }); - Ok(Self { - attr_span, - index, - instances, - has_runtime_upgrade, - where_clause: item.generics.where_clause.clone(), - }) - } + Ok(Self { + attr_span, + index, + instances, + has_runtime_upgrade, + where_clause: item.generics.where_clause.clone(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/inherent.rs b/support/procedural-fork/src/pallet/parse/inherent.rs index d8641691a..4eb04e914 100644 --- a/support/procedural-fork/src/pallet/parse/inherent.rs +++ b/support/procedural-fork/src/pallet/parse/inherent.rs @@ -20,41 +20,41 @@ use syn::spanned::Spanned; /// The definition of the pallet inherent implementation. pub struct InherentDef { - /// The index of inherent item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of inherent item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl InherentDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::inherent, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ProvideInherent" { - let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; - return Err(syn::Error::new(last.span(), msg)) - } - } else { - let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; - - Ok(InherentDef { index, instances }) - } + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::inherent, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)); + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ProvideInherent" { + let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; + return Err(syn::Error::new(last.span(), msg)); + } + } else { + let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; + return Err(syn::Error::new(item.span(), msg)); + } + + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; + + Ok(InherentDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index 6e1277461..57c252473 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -47,68 +47,68 @@ use syn::spanned::Spanned; /// Parsed definition of a pallet. pub struct Def { - /// The module items. - /// (their order must not be modified because they are registered in individual definitions). - pub item: syn::ItemMod, - pub config: config::ConfigDef, - pub pallet_struct: pallet_struct::PalletStructDef, - pub hooks: Option, - pub call: Option, - pub tasks: Option, - pub task_enum: Option, - pub storages: Vec, - pub error: Option, - pub event: Option, - pub origin: Option, - pub inherent: Option, - pub genesis_config: Option, - pub genesis_build: Option, - pub validate_unsigned: Option, - pub extra_constants: Option, - pub composites: Vec, - pub type_values: Vec, - pub frame_system: syn::Path, - pub frame_support: syn::Path, - pub dev_mode: bool, + /// The module items. + /// (their order must not be modified because they are registered in individual definitions). + pub item: syn::ItemMod, + pub config: config::ConfigDef, + pub pallet_struct: pallet_struct::PalletStructDef, + pub hooks: Option, + pub call: Option, + pub tasks: Option, + pub task_enum: Option, + pub storages: Vec, + pub error: Option, + pub event: Option, + pub origin: Option, + pub inherent: Option, + pub genesis_config: Option, + pub genesis_build: Option, + pub validate_unsigned: Option, + pub extra_constants: Option, + pub composites: Vec, + pub type_values: Vec, + pub frame_system: syn::Path, + pub frame_support: syn::Path, + pub dev_mode: bool, } impl Def { - pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid pallet definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; - - let mut config = None; - let mut pallet_struct = None; - let mut hooks = None; - let mut call = None; - let mut tasks = None; - let mut task_enum = None; - let mut error = None; - let mut event = None; - let mut origin = None; - let mut inherent = None; - let mut genesis_config = None; - let mut genesis_build = None; - let mut validate_unsigned = None; - let mut extra_constants = None; - let mut storages = vec![]; - let mut type_values = vec![]; - let mut composites: Vec = vec![]; - - for (index, item) in items.iter_mut().enumerate() { - let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; - - match pallet_attr { + pub fn try_from(mut item: syn::ItemMod, dev_mode: bool) -> syn::Result { + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid pallet definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; + + let mut config = None; + let mut pallet_struct = None; + let mut hooks = None; + let mut call = None; + let mut tasks = None; + let mut task_enum = None; + let mut error = None; + let mut event = None; + let mut origin = None; + let mut inherent = None; + let mut genesis_config = None; + let mut genesis_build = None; + let mut validate_unsigned = None; + let mut extra_constants = None; + let mut storages = vec![]; + let mut type_values = vec![]; + let mut composites: Vec = vec![]; + + for (index, item) in items.iter_mut().enumerate() { + let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; + + match pallet_attr { Some(PalletAttr::Config(span, with_default)) if config.is_none() => config = Some(config::ConfigDef::try_from( &frame_system, @@ -212,538 +212,596 @@ impl Def { }, None => (), } - } + } - if genesis_config.is_some() != genesis_build.is_some() { - let msg = format!( - "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ + if genesis_config.is_some() != genesis_build.is_some() { + let msg = format!( + "`#[pallet::genesis_config]` and `#[pallet::genesis_build]` attributes must be \ either both used or both not used, instead genesis_config is {} and genesis_build \ is {}", - genesis_config.as_ref().map_or("unused", |_| "used"), - genesis_build.as_ref().map_or("unused", |_| "used"), - ); - return Err(syn::Error::new(item_span, msg)) - } - - Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; - - let def = Def { - item, - config: config - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, - pallet_struct: pallet_struct - .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, - hooks, - call, - tasks, - task_enum, - extra_constants, - genesis_config, - genesis_build, - validate_unsigned, - error, - event, - origin, - inherent, - storages, - composites, - type_values, - frame_system, - frame_support, - dev_mode, - }; - - def.check_instance_usage()?; - def.check_event_usage()?; - - Ok(def) - } - - /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. - fn resolve_tasks( - item_span: &proc_macro2::Span, - tasks: &mut Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - // fallback for manual (without macros) definition of tasks impl - Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; - - // fallback for manual (without macros) definition of task enum - Self::resolve_manual_task_enum(tasks, task_enum, items)?; - - // ensure that if `task_enum` is specified, `tasks` is also specified - match (&task_enum, &tasks) { - (Some(_), None) => - return Err(syn::Error::new( - *item_span, - "Missing `#[pallet::tasks_experimental]` impl", - )), - (None, Some(tasks)) => - if tasks.tasks_attr.is_none() { - return Err(syn::Error::new( + genesis_config.as_ref().map_or("unused", |_| "used"), + genesis_build.as_ref().map_or("unused", |_| "used"), + ); + return Err(syn::Error::new(item_span, msg)); + } + + Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; + + let def = Def { + item, + config: config + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, + pallet_struct: pallet_struct + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, + hooks, + call, + tasks, + task_enum, + extra_constants, + genesis_config, + genesis_build, + validate_unsigned, + error, + event, + origin, + inherent, + storages, + composites, + type_values, + frame_system, + frame_support, + dev_mode, + }; + + def.check_instance_usage()?; + def.check_event_usage()?; + + Ok(def) + } + + /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. + fn resolve_tasks( + item_span: &proc_macro2::Span, + tasks: &mut Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + // fallback for manual (without macros) definition of tasks impl + Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; + + // fallback for manual (without macros) definition of task enum + Self::resolve_manual_task_enum(tasks, task_enum, items)?; + + // ensure that if `task_enum` is specified, `tasks` is also specified + match (&task_enum, &tasks) { + (Some(_), None) => { + return Err(syn::Error::new( + *item_span, + "Missing `#[pallet::tasks_experimental]` impl", + )) + } + (None, Some(tasks)) => { + if tasks.tasks_attr.is_none() { + return Err(syn::Error::new( tasks.item_impl.impl_token.span(), "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ task enum has been omitted", - )) - } else { - }, - _ => (), - } - - Ok(()) - } - - /// Tries to locate task enum based on the tasks impl target if attribute is not specified - /// but impl is present. If one is found, `task_enum` is set appropriately. - fn resolve_manual_task_enum( - tasks: &Option, - task_enum: &mut Option, - items: &mut Vec, - ) -> syn::Result<()> { - let (None, Some(tasks)) = (&task_enum, &tasks) else { return Ok(()) }; - let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { return Ok(()) }; - let type_path = type_path.path.segments.iter().collect::>(); - let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { return Ok(()) }; - let mut result = None; - for item in items { - let syn::Item::Enum(item_enum) = item else { continue }; - if item_enum.ident == seg.ident { - result = Some(syn::parse2::(item_enum.to_token_stream())?); - // replace item with a no-op because it will be handled by the expansion of - // `task_enum`. We use a no-op instead of simply removing it from the vec - // so that any indices collected by `Def::try_from` remain accurate - *item = syn::Item::Verbatim(quote::quote!()); - break - } - } - *task_enum = result; - Ok(()) - } - - /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is - /// `Task`) in the event that one has not been found already via the attribute macro - pub fn resolve_manual_tasks_impl( - tasks: &mut Option, - task_enum: &Option, - items: &Vec, - ) -> syn::Result<()> { - let None = tasks else { return Ok(()) }; - let mut result = None; - for item in items { - let syn::Item::Impl(item_impl) = item else { continue }; - let Some((_, path, _)) = &item_impl.trait_ else { continue }; - let Some(trait_last_seg) = path.segments.last() else { continue }; - let syn::Type::Path(target_path) = &*item_impl.self_ty else { continue }; - let target_path = target_path.path.segments.iter().collect::>(); - let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { - continue - }; - let matches_task_enum = match task_enum { - Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, - None => true, - }; - if trait_last_seg.ident == "Task" && matches_task_enum { - result = Some(syn::parse2::(item_impl.to_token_stream())?); - break - } - } - *tasks = result; - Ok(()) - } - - /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared - /// and trait defines type RuntimeEvent, or not declared and no trait associated type. - fn check_event_usage(&self) -> syn::Result<()> { - match (self.config.has_event_type, self.event.is_some()) { - (true, false) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ + )); + } else { + } + } + _ => (), + } + + Ok(()) + } + + /// Tries to locate task enum based on the tasks impl target if attribute is not specified + /// but impl is present. If one is found, `task_enum` is set appropriately. + fn resolve_manual_task_enum( + tasks: &Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + let (None, Some(tasks)) = (&task_enum, &tasks) else { + return Ok(()); + }; + let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { + return Ok(()); + }; + let type_path = type_path.path.segments.iter().collect::>(); + let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { + return Ok(()); + }; + let mut result = None; + for item in items { + let syn::Item::Enum(item_enum) = item else { + continue; + }; + if item_enum.ident == seg.ident { + result = Some(syn::parse2::( + item_enum.to_token_stream(), + )?); + // replace item with a no-op because it will be handled by the expansion of + // `task_enum`. We use a no-op instead of simply removing it from the vec + // so that any indices collected by `Def::try_from` remain accurate + *item = syn::Item::Verbatim(quote::quote!()); + break; + } + } + *task_enum = result; + Ok(()) + } + + /// Tries to locate a manual tasks impl (an impl implementing a trait whose last path segment is + /// `Task`) in the event that one has not been found already via the attribute macro + pub fn resolve_manual_tasks_impl( + tasks: &mut Option, + task_enum: &Option, + items: &Vec, + ) -> syn::Result<()> { + let None = tasks else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Impl(item_impl) = item else { + continue; + }; + let Some((_, path, _)) = &item_impl.trait_ else { + continue; + }; + let Some(trait_last_seg) = path.segments.last() else { + continue; + }; + let syn::Type::Path(target_path) = &*item_impl.self_ty else { + continue; + }; + let target_path = target_path.path.segments.iter().collect::>(); + let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + continue; + }; + let matches_task_enum = match task_enum { + Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, + None => true, + }; + if trait_last_seg.ident == "Task" && matches_task_enum { + result = Some(syn::parse2::(item_impl.to_token_stream())?); + break; + } + } + *tasks = result; + Ok(()) + } + + /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared + /// and trait defines type RuntimeEvent, or not declared and no trait associated type. + fn check_event_usage(&self) -> syn::Result<()> { + match (self.config.has_event_type, self.event.is_some()) { + (true, false) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains associated type `RuntimeEvent`, \ but enum `Event` is not declared (i.e. no use of `#[pallet::event]`). \ Note that type `RuntimeEvent` in trait is reserved to work alongside pallet event."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - }, - (false, true) => { - let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + } + (false, true) => { + let msg = "Invalid usage of RuntimeEvent, `Config` contains no associated type \ `RuntimeEvent`, but enum `Event` is declared (in use of `#[pallet::event]`). \ An RuntimeEvent associated type must be declare on trait `Config`."; - Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) - }, - _ => Ok(()), - } - } - - /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with - /// instance iff it is defined with instance. - fn check_instance_usage(&self) -> syn::Result<()> { - let mut instances = vec![]; - instances.extend_from_slice(&self.pallet_struct.instances[..]); - instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); - if let Some(call) = &self.call { - instances.extend_from_slice(&call.instances[..]); - } - if let Some(hooks) = &self.hooks { - instances.extend_from_slice(&hooks.instances[..]); - } - if let Some(event) = &self.event { - instances.extend_from_slice(&event.instances[..]); - } - if let Some(error) = &self.error { - instances.extend_from_slice(&error.instances[..]); - } - if let Some(inherent) = &self.inherent { - instances.extend_from_slice(&inherent.instances[..]); - } - if let Some(origin) = &self.origin { - instances.extend_from_slice(&origin.instances[..]); - } - if let Some(genesis_config) = &self.genesis_config { - instances.extend_from_slice(&genesis_config.instances[..]); - } - if let Some(genesis_build) = &self.genesis_build { - genesis_build.instances.as_ref().map(|i| instances.extend_from_slice(&i)); - } - if let Some(extra_constants) = &self.extra_constants { - instances.extend_from_slice(&extra_constants.instances[..]); - } - - let mut errors = instances.into_iter().filter_map(|instances| { - if instances.has_instance == self.config.has_instance { - return None - } - let msg = if self.config.has_instance { - "Invalid generic declaration, trait is defined with instance but generic use none" - } else { - "Invalid generic declaration, trait is defined without instance but generic use \ + Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) + } + _ => Ok(()), + } + } + + /// Check that usage of trait `Config` is consistent with the definition, i.e. it is used with + /// instance iff it is defined with instance. + fn check_instance_usage(&self) -> syn::Result<()> { + let mut instances = vec![]; + instances.extend_from_slice(&self.pallet_struct.instances[..]); + instances.extend(&mut self.storages.iter().flat_map(|s| s.instances.clone())); + if let Some(call) = &self.call { + instances.extend_from_slice(&call.instances[..]); + } + if let Some(hooks) = &self.hooks { + instances.extend_from_slice(&hooks.instances[..]); + } + if let Some(event) = &self.event { + instances.extend_from_slice(&event.instances[..]); + } + if let Some(error) = &self.error { + instances.extend_from_slice(&error.instances[..]); + } + if let Some(inherent) = &self.inherent { + instances.extend_from_slice(&inherent.instances[..]); + } + if let Some(origin) = &self.origin { + instances.extend_from_slice(&origin.instances[..]); + } + if let Some(genesis_config) = &self.genesis_config { + instances.extend_from_slice(&genesis_config.instances[..]); + } + if let Some(genesis_build) = &self.genesis_build { + genesis_build + .instances + .as_ref() + .map(|i| instances.extend_from_slice(&i)); + } + if let Some(extra_constants) = &self.extra_constants { + instances.extend_from_slice(&extra_constants.instances[..]); + } + + let mut errors = instances.into_iter().filter_map(|instances| { + if instances.has_instance == self.config.has_instance { + return None; + } + let msg = if self.config.has_instance { + "Invalid generic declaration, trait is defined with instance but generic use none" + } else { + "Invalid generic declaration, trait is defined without instance but generic use \ some" - }; - Some(syn::Error::new(instances.span, msg)) - }); - - if let Some(mut first_error) = errors.next() { - for error in errors { - first_error.combine(error) - } - Err(first_error) - } else { - Ok(()) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static` - pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T: Config` - /// * or `T: Config, I: 'static = ()` - pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T: Config, I: 'static = ()) - } else { - quote::quote_spanned!(span => T: Config) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I = ()` - pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I = ()) - } else { - quote::quote_spanned!(span => T) - } - } - - /// Depending on if pallet is instantiable: - /// * either `` - /// * or `` - /// to be used when using pallet trait `Config` - pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => ) - } else { - quote::quote_spanned!(span => ) - } - } - - /// Depending on if pallet is instantiable: - /// * either `T` - /// * or `T, I` - pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - if self.config.has_instance { - quote::quote_spanned!(span => T, I) - } else { - quote::quote_spanned!(span => T) - } - } + }; + Some(syn::Error::new(instances.span, msg)) + }); + + if let Some(mut first_error) = errors.next() { + for error in errors { + first_error.combine(error) + } + Err(first_error) + } else { + Ok(()) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static` + pub fn type_impl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T: Config` + /// * or `T: Config, I: 'static = ()` + pub fn type_decl_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T: Config, I: 'static = ()) + } else { + quote::quote_spanned!(span => T: Config) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I = ()` + pub fn type_decl_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I = ()) + } else { + quote::quote_spanned!(span => T) + } + } + + /// Depending on if pallet is instantiable: + /// * either `` + /// * or `` + /// to be used when using pallet trait `Config` + pub fn trait_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => ) + } else { + quote::quote_spanned!(span => ) + } + } + + /// Depending on if pallet is instantiable: + /// * either `T` + /// * or `T, I` + pub fn type_use_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + if self.config.has_instance { + quote::quote_spanned!(span => T, I) + } else { + quote::quote_spanned!(span => T) + } + } } /// Some generic kind for type which can be not generic, or generic over config, /// or generic over config and instance, but not generic only over instance. pub enum GenericKind { - None, - Config, - ConfigAndInstance, + None, + Config, + ConfigAndInstance, } impl GenericKind { - /// Return Err if it is only generics over instance but not over config. - pub fn from_gens(has_config: bool, has_instance: bool) -> Result { - match (has_config, has_instance) { - (false, false) => Ok(GenericKind::None), - (true, false) => Ok(GenericKind::Config), - (true, true) => Ok(GenericKind::ConfigAndInstance), - (false, true) => Err(()), - } - } - - /// Return the generic to be used when using the type. - /// - /// Depending on its definition it can be: ``, `T` or `T, I` - pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T), - GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), - } - } - - /// Return the generic to be used in `impl<..>` when implementing on the type. - pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - match self { - GenericKind::None => quote::quote!(), - GenericKind::Config => quote::quote_spanned!(span => T: Config), - GenericKind::ConfigAndInstance => { - quote::quote_spanned!(span => T: Config, I: 'static) - }, - } - } - - /// Return whereas the type has some generic. - pub fn is_generic(&self) -> bool { - match self { - GenericKind::None => false, - GenericKind::Config | GenericKind::ConfigAndInstance => true, - } - } + /// Return Err if it is only generics over instance but not over config. + pub fn from_gens(has_config: bool, has_instance: bool) -> Result { + match (has_config, has_instance) { + (false, false) => Ok(GenericKind::None), + (true, false) => Ok(GenericKind::Config), + (true, true) => Ok(GenericKind::ConfigAndInstance), + (false, true) => Err(()), + } + } + + /// Return the generic to be used when using the type. + /// + /// Depending on its definition it can be: ``, `T` or `T, I` + pub fn type_use_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T), + GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T, I), + } + } + + /// Return the generic to be used in `impl<..>` when implementing on the type. + pub fn type_impl_gen(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + match self { + GenericKind::None => quote::quote!(), + GenericKind::Config => quote::quote_spanned!(span => T: Config), + GenericKind::ConfigAndInstance => { + quote::quote_spanned!(span => T: Config, I: 'static) + } + } + } + + /// Return whereas the type has some generic. + pub fn is_generic(&self) -> bool { + match self { + GenericKind::None => false, + GenericKind::Config | GenericKind::ConfigAndInstance => true, + } + } } /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(origin); - syn::custom_keyword!(call); - syn::custom_keyword!(tasks_experimental); - syn::custom_keyword!(task_enum); - syn::custom_keyword!(task_list); - syn::custom_keyword!(task_condition); - syn::custom_keyword!(task_index); - syn::custom_keyword!(weight); - syn::custom_keyword!(event); - syn::custom_keyword!(config); - syn::custom_keyword!(with_default); - syn::custom_keyword!(hooks); - syn::custom_keyword!(inherent); - syn::custom_keyword!(error); - syn::custom_keyword!(storage); - syn::custom_keyword!(genesis_build); - syn::custom_keyword!(genesis_config); - syn::custom_keyword!(validate_unsigned); - syn::custom_keyword!(type_value); - syn::custom_keyword!(pallet); - syn::custom_keyword!(extra_constants); - syn::custom_keyword!(composite_enum); + syn::custom_keyword!(origin); + syn::custom_keyword!(call); + syn::custom_keyword!(tasks_experimental); + syn::custom_keyword!(task_enum); + syn::custom_keyword!(task_list); + syn::custom_keyword!(task_condition); + syn::custom_keyword!(task_index); + syn::custom_keyword!(weight); + syn::custom_keyword!(event); + syn::custom_keyword!(config); + syn::custom_keyword!(with_default); + syn::custom_keyword!(hooks); + syn::custom_keyword!(inherent); + syn::custom_keyword!(error); + syn::custom_keyword!(storage); + syn::custom_keyword!(genesis_build); + syn::custom_keyword!(genesis_config); + syn::custom_keyword!(validate_unsigned); + syn::custom_keyword!(type_value); + syn::custom_keyword!(pallet); + syn::custom_keyword!(extra_constants); + syn::custom_keyword!(composite_enum); } /// Parse attributes for item in pallet module /// syntax must be `pallet::` (e.g. `#[pallet::config]`) enum PalletAttr { - Config(proc_macro2::Span, bool), - Pallet(proc_macro2::Span), - Hooks(proc_macro2::Span), - /// A `#[pallet::call]` with optional attributes to specialize the behaviour. - /// - /// # Attributes - /// - /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or - /// `#[pallet::call(attr(…))]`. The possible attributes are: - /// - /// ## `weight` - /// - /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one - /// argument that is expected to be an implementation of the `WeightInfo` or something that - /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. - /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the - /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: - /// - /// ```ignore - /// #[pallet::call] - /// impl Pallet { - /// #[pallet::weight(T::WeightInfo::create())] - /// pub fn create( - /// ``` - /// you can now omit it on the call itself, if the name of the weigh function matches the call: - /// - /// ```ignore - /// #[pallet::call(weight = ::WeightInfo)] - /// impl Pallet { - /// pub fn create( - /// ``` - /// - /// It is possible to use this syntax together with instantiated pallets by using `Config` - /// instead. - /// - /// ### Dev Mode - /// - /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation - /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used - /// instead of the zero weight. So to say: it works together with `dev_mode`. - RuntimeCall(Option, proc_macro2::Span), - Error(proc_macro2::Span), - Tasks(proc_macro2::Span), - TaskList(proc_macro2::Span), - TaskCondition(proc_macro2::Span), - TaskIndex(proc_macro2::Span), - RuntimeTask(proc_macro2::Span), - RuntimeEvent(proc_macro2::Span), - RuntimeOrigin(proc_macro2::Span), - Inherent(proc_macro2::Span), - Storage(proc_macro2::Span), - GenesisConfig(proc_macro2::Span), - GenesisBuild(proc_macro2::Span), - ValidateUnsigned(proc_macro2::Span), - TypeValue(proc_macro2::Span), - ExtraConstants(proc_macro2::Span), - Composite(proc_macro2::Span), + Config(proc_macro2::Span, bool), + Pallet(proc_macro2::Span), + Hooks(proc_macro2::Span), + /// A `#[pallet::call]` with optional attributes to specialize the behaviour. + /// + /// # Attributes + /// + /// Each attribute `attr` can take the form of `#[pallet::call(attr = …)]` or + /// `#[pallet::call(attr(…))]`. The possible attributes are: + /// + /// ## `weight` + /// + /// Can be used to reduce the repetitive weight annotation in the trivial case. It accepts one + /// argument that is expected to be an implementation of the `WeightInfo` or something that + /// behaves syntactically equivalent. This allows to annotate a `WeightInfo` for all the calls. + /// Now each call does not need to specify its own `#[pallet::weight]` but can instead use the + /// one from the `#[pallet::call]` definition. So instead of having to write it on each call: + /// + /// ```ignore + /// #[pallet::call] + /// impl Pallet { + /// #[pallet::weight(T::WeightInfo::create())] + /// pub fn create( + /// ``` + /// you can now omit it on the call itself, if the name of the weigh function matches the call: + /// + /// ```ignore + /// #[pallet::call(weight = ::WeightInfo)] + /// impl Pallet { + /// pub fn create( + /// ``` + /// + /// It is possible to use this syntax together with instantiated pallets by using `Config` + /// instead. + /// + /// ### Dev Mode + /// + /// Normally the `dev_mode` sets all weights of calls without a `#[pallet::weight]` annotation + /// to zero. Now when there is a `weight` attribute on the `#[pallet::call]`, then that is used + /// instead of the zero weight. So to say: it works together with `dev_mode`. + RuntimeCall(Option, proc_macro2::Span), + Error(proc_macro2::Span), + Tasks(proc_macro2::Span), + TaskList(proc_macro2::Span), + TaskCondition(proc_macro2::Span), + TaskIndex(proc_macro2::Span), + RuntimeTask(proc_macro2::Span), + RuntimeEvent(proc_macro2::Span), + RuntimeOrigin(proc_macro2::Span), + Inherent(proc_macro2::Span), + Storage(proc_macro2::Span), + GenesisConfig(proc_macro2::Span), + GenesisBuild(proc_macro2::Span), + ValidateUnsigned(proc_macro2::Span), + TypeValue(proc_macro2::Span), + ExtraConstants(proc_macro2::Span), + Composite(proc_macro2::Span), } impl PalletAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Config(span, _) => *span, - Self::Pallet(span) => *span, - Self::Hooks(span) => *span, - Self::Tasks(span) => *span, - Self::TaskCondition(span) => *span, - Self::TaskIndex(span) => *span, - Self::TaskList(span) => *span, - Self::Error(span) => *span, - Self::RuntimeTask(span) => *span, - Self::RuntimeCall(_, span) => *span, - Self::RuntimeEvent(span) => *span, - Self::RuntimeOrigin(span) => *span, - Self::Inherent(span) => *span, - Self::Storage(span) => *span, - Self::GenesisConfig(span) => *span, - Self::GenesisBuild(span) => *span, - Self::ValidateUnsigned(span) => *span, - Self::TypeValue(span) => *span, - Self::ExtraConstants(span) => *span, - Self::Composite(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Config(span, _) => *span, + Self::Pallet(span) => *span, + Self::Hooks(span) => *span, + Self::Tasks(span) => *span, + Self::TaskCondition(span) => *span, + Self::TaskIndex(span) => *span, + Self::TaskList(span) => *span, + Self::Error(span) => *span, + Self::RuntimeTask(span) => *span, + Self::RuntimeCall(_, span) => *span, + Self::RuntimeEvent(span) => *span, + Self::RuntimeOrigin(span) => *span, + Self::Inherent(span) => *span, + Self::Storage(span) => *span, + Self::GenesisConfig(span) => *span, + Self::GenesisBuild(span) => *span, + Self::ValidateUnsigned(span) => *span, + Self::TypeValue(span) => *span, + Self::ExtraConstants(span) => *span, + Self::Composite(span) => *span, + } + } } impl syn::parse::Parse for PalletAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::config) { - let span = content.parse::()?.span(); - let with_default = content.peek(syn::token::Paren); - if with_default { - let inside_config; - let _paren = syn::parenthesized!(inside_config in content); - inside_config.parse::()?; - } - Ok(PalletAttr::Config(span, with_default)) - } else if lookahead.peek(keyword::pallet) { - Ok(PalletAttr::Pallet(content.parse::()?.span())) - } else if lookahead.peek(keyword::hooks) { - Ok(PalletAttr::Hooks(content.parse::()?.span())) - } else if lookahead.peek(keyword::call) { - let span = content.parse::().expect("peeked").span(); - let attr = match content.is_empty() { - true => None, - false => Some(InheritedCallWeightAttr::parse(&content)?), - }; - Ok(PalletAttr::RuntimeCall(attr, span)) - } else if lookahead.peek(keyword::tasks_experimental) { - Ok(PalletAttr::Tasks(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_enum) { - Ok(PalletAttr::RuntimeTask(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_condition) { - Ok(PalletAttr::TaskCondition(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_index) { - Ok(PalletAttr::TaskIndex(content.parse::()?.span())) - } else if lookahead.peek(keyword::task_list) { - Ok(PalletAttr::TaskList(content.parse::()?.span())) - } else if lookahead.peek(keyword::error) { - Ok(PalletAttr::Error(content.parse::()?.span())) - } else if lookahead.peek(keyword::event) { - Ok(PalletAttr::RuntimeEvent(content.parse::()?.span())) - } else if lookahead.peek(keyword::origin) { - Ok(PalletAttr::RuntimeOrigin(content.parse::()?.span())) - } else if lookahead.peek(keyword::inherent) { - Ok(PalletAttr::Inherent(content.parse::()?.span())) - } else if lookahead.peek(keyword::storage) { - Ok(PalletAttr::Storage(content.parse::()?.span())) - } else if lookahead.peek(keyword::genesis_config) { - Ok(PalletAttr::GenesisConfig(content.parse::()?.span())) - } else if lookahead.peek(keyword::genesis_build) { - Ok(PalletAttr::GenesisBuild(content.parse::()?.span())) - } else if lookahead.peek(keyword::validate_unsigned) { - Ok(PalletAttr::ValidateUnsigned(content.parse::()?.span())) - } else if lookahead.peek(keyword::type_value) { - Ok(PalletAttr::TypeValue(content.parse::()?.span())) - } else if lookahead.peek(keyword::extra_constants) { - Ok(PalletAttr::ExtraConstants(content.parse::()?.span())) - } else if lookahead.peek(keyword::composite_enum) { - Ok(PalletAttr::Composite(content.parse::()?.span())) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::config) { + let span = content.parse::()?.span(); + let with_default = content.peek(syn::token::Paren); + if with_default { + let inside_config; + let _paren = syn::parenthesized!(inside_config in content); + inside_config.parse::()?; + } + Ok(PalletAttr::Config(span, with_default)) + } else if lookahead.peek(keyword::pallet) { + Ok(PalletAttr::Pallet( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::hooks) { + Ok(PalletAttr::Hooks(content.parse::()?.span())) + } else if lookahead.peek(keyword::call) { + let span = content.parse::().expect("peeked").span(); + let attr = match content.is_empty() { + true => None, + false => Some(InheritedCallWeightAttr::parse(&content)?), + }; + Ok(PalletAttr::RuntimeCall(attr, span)) + } else if lookahead.peek(keyword::tasks_experimental) { + Ok(PalletAttr::Tasks( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_enum) { + Ok(PalletAttr::RuntimeTask( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_condition) { + Ok(PalletAttr::TaskCondition( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_index) { + Ok(PalletAttr::TaskIndex( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::task_list) { + Ok(PalletAttr::TaskList( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::error) { + Ok(PalletAttr::Error(content.parse::()?.span())) + } else if lookahead.peek(keyword::event) { + Ok(PalletAttr::RuntimeEvent( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::origin) { + Ok(PalletAttr::RuntimeOrigin( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::inherent) { + Ok(PalletAttr::Inherent( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::storage) { + Ok(PalletAttr::Storage( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::genesis_config) { + Ok(PalletAttr::GenesisConfig( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::genesis_build) { + Ok(PalletAttr::GenesisBuild( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::validate_unsigned) { + Ok(PalletAttr::ValidateUnsigned( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::type_value) { + Ok(PalletAttr::TypeValue( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::extra_constants) { + Ok(PalletAttr::ExtraConstants( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::composite_enum) { + Ok(PalletAttr::Composite( + content.parse::()?.span(), + )) + } else { + Err(lookahead.error()) + } + } } /// The optional weight annotation on a `#[pallet::call]` like `#[pallet::call(weight($type))]`. #[derive(Clone)] pub struct InheritedCallWeightAttr { - pub typename: syn::Type, - pub span: proc_macro2::Span, + pub typename: syn::Type, + pub span: proc_macro2::Span, } impl syn::parse::Parse for InheritedCallWeightAttr { - // Parses `(weight($type))` or `(weight = $type)`. - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let content; - syn::parenthesized!(content in input); - content.parse::()?; - let lookahead = content.lookahead1(); - - let buffer = if lookahead.peek(syn::token::Paren) { - let inner; - syn::parenthesized!(inner in content); - inner - } else if lookahead.peek(syn::Token![=]) { - content.parse::().expect("peeked"); - content - } else { - return Err(lookahead.error()) - }; - - Ok(Self { typename: buffer.parse()?, span: input.span() }) - } + // Parses `(weight($type))` or `(weight = $type)`. + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let content; + syn::parenthesized!(content in input); + content.parse::()?; + let lookahead = content.lookahead1(); + + let buffer = if lookahead.peek(syn::token::Paren) { + let inner; + syn::parenthesized!(inner in content); + inner + } else if lookahead.peek(syn::Token![=]) { + content.parse::().expect("peeked"); + content + } else { + return Err(lookahead.error()); + }; + + Ok(Self { + typename: buffer.parse()?, + span: input.span(), + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/origin.rs b/support/procedural-fork/src/pallet/parse/origin.rs index 76e2a8841..2dd84c40d 100644 --- a/support/procedural-fork/src/pallet/parse/origin.rs +++ b/support/procedural-fork/src/pallet/parse/origin.rs @@ -25,48 +25,56 @@ use syn::spanned::Spanned; /// * `struct Origin` /// * `enum Origin` pub struct OriginDef { - /// The index of item in pallet module. - pub index: usize, - pub has_instance: bool, - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with trait. - pub instances: Vec, + /// The index of item in pallet module. + pub index: usize, + pub has_instance: bool, + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with trait. + pub instances: Vec, } impl OriginDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item_span = item.span(); - let (vis, ident, generics) = match &item { - syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), - syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), - _ => { - let msg = "Invalid pallet::origin, expected enum or struct or type"; - return Err(syn::Error::new(item.span(), msg)) - }, - }; + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item_span = item.span(); + let (vis, ident, generics) = match &item { + syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), + syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), + _ => { + let msg = "Invalid pallet::origin, expected enum or struct or type"; + return Err(syn::Error::new(item.span(), msg)); + } + }; - let has_instance = generics.params.len() == 2; - let is_generic = !generics.params.is_empty(); + let has_instance = generics.params.len() == 2; + let is_generic = !generics.params.is_empty(); - let mut instances = vec![]; - if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { - instances.push(u); - } else { - // construct_runtime only allow generic event for instantiable pallet. - instances.push(helper::InstanceUsage { has_instance: false, span: ident.span() }) - } + let mut instances = vec![]; + if let Some(u) = helper::check_type_def_optional_gen(generics, item.span())? { + instances.push(u); + } else { + // construct_runtime only allow generic event for instantiable pallet. + instances.push(helper::InstanceUsage { + has_instance: false, + span: ident.span(), + }) + } - if !matches!(vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::origin, Origin must be public"; - return Err(syn::Error::new(item_span, msg)) - } + if !matches!(vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::origin, Origin must be public"; + return Err(syn::Error::new(item_span, msg)); + } - if ident != "Origin" { - let msg = "Invalid pallet::origin, ident must `Origin`"; - return Err(syn::Error::new(ident.span(), msg)) - } + if ident != "Origin" { + let msg = "Invalid pallet::origin, ident must `Origin`"; + return Err(syn::Error::new(ident.span(), msg)); + } - Ok(OriginDef { index, has_instance, is_generic, instances }) - } + Ok(OriginDef { + index, + has_instance, + is_generic, + instances, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/pallet_struct.rs b/support/procedural-fork/src/pallet/parse/pallet_struct.rs index b64576099..320cf01fa 100644 --- a/support/procedural-fork/src/pallet/parse/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/parse/pallet_struct.rs @@ -21,129 +21,137 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(pallet); - syn::custom_keyword!(Pallet); - syn::custom_keyword!(without_storage_info); - syn::custom_keyword!(storage_version); + syn::custom_keyword!(pallet); + syn::custom_keyword!(Pallet); + syn::custom_keyword!(without_storage_info); + syn::custom_keyword!(storage_version); } /// Definition of the pallet pallet. pub struct PalletStructDef { - /// The index of item in pallet pallet. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config trait. - pub instances: Vec, - /// The keyword Pallet used (contains span). - pub pallet: keyword::Pallet, - /// The span of the pallet::pallet attribute. - pub attr_span: proc_macro2::Span, - /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. - /// Contains the span of the attribute. - pub without_storage_info: Option, - /// The in-code storage version of the pallet. - pub storage_version: Option, + /// The index of item in pallet pallet. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config trait. + pub instances: Vec, + /// The keyword Pallet used (contains span). + pub pallet: keyword::Pallet, + /// The span of the pallet::pallet attribute. + pub attr_span: proc_macro2::Span, + /// Whether to specify the storages max encoded len when implementing `StorageInfoTrait`. + /// Contains the span of the attribute. + pub without_storage_info: Option, + /// The in-code storage version of the pallet. + pub storage_version: Option, } /// Parse for one variant of: /// * `#[pallet::without_storage_info]` /// * `#[pallet::storage_version(STORAGE_VERSION)]` pub enum PalletStructAttr { - WithoutStorageInfoTrait(proc_macro2::Span), - StorageVersion { storage_version: syn::Path, span: proc_macro2::Span }, + WithoutStorageInfoTrait(proc_macro2::Span), + StorageVersion { + storage_version: syn::Path, + span: proc_macro2::Span, + }, } impl PalletStructAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::WithoutStorageInfoTrait(span) | Self::StorageVersion { span, .. } => *span, + } + } } impl syn::parse::Parse for PalletStructAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::without_storage_info) { - let span = content.parse::()?.span(); - Ok(Self::WithoutStorageInfoTrait(span)) - } else if lookahead.peek(keyword::storage_version) { - let span = content.parse::()?.span(); - - let version_content; - syn::parenthesized!(version_content in content); - let storage_version = version_content.parse::()?; - - Ok(Self::StorageVersion { storage_version, span }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::without_storage_info) { + let span = content.parse::()?.span(); + Ok(Self::WithoutStorageInfoTrait(span)) + } else if lookahead.peek(keyword::storage_version) { + let span = content.parse::()?.span(); + + let version_content; + syn::parenthesized!(version_content in content); + let storage_version = version_content.parse::()?; + + Ok(Self::StorageVersion { + storage_version, + span, + }) + } else { + Err(lookahead.error()) + } + } } impl PalletStructDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid pallet::pallet, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)) - }; - - let mut without_storage_info = None; - let mut storage_version_found = None; - - let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - for attr in struct_attrs { - match attr { - PalletStructAttr::WithoutStorageInfoTrait(span) - if without_storage_info.is_none() => - { - without_storage_info = Some(span); - }, - PalletStructAttr::StorageVersion { storage_version, .. } - if storage_version_found.is_none() => - { - storage_version_found = Some(storage_version); - }, - attr => { - let msg = "Unexpected duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)) - }, - } - } - - let pallet = syn::parse2::(item.ident.to_token_stream())?; - - if !matches!(item.vis, syn::Visibility::Public(_)) { - let msg = "Invalid pallet::pallet, Pallet must be public"; - return Err(syn::Error::new(item.span(), msg)) - } - - if item.generics.where_clause.is_some() { - let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; - return Err(syn::Error::new(item.generics.where_clause.span(), msg)) - } - - let instances = - vec![helper::check_type_def_gen_no_bounds(&item.generics, item.ident.span())?]; - - Ok(Self { - index, - instances, - pallet, - attr_span, - without_storage_info, - storage_version: storage_version_found, - }) - } + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid pallet::pallet, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)); + }; + + let mut without_storage_info = None; + let mut storage_version_found = None; + + let struct_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + for attr in struct_attrs { + match attr { + PalletStructAttr::WithoutStorageInfoTrait(span) + if without_storage_info.is_none() => + { + without_storage_info = Some(span); + } + PalletStructAttr::StorageVersion { + storage_version, .. + } if storage_version_found.is_none() => { + storage_version_found = Some(storage_version); + } + attr => { + let msg = "Unexpected duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)); + } + } + } + + let pallet = syn::parse2::(item.ident.to_token_stream())?; + + if !matches!(item.vis, syn::Visibility::Public(_)) { + let msg = "Invalid pallet::pallet, Pallet must be public"; + return Err(syn::Error::new(item.span(), msg)); + } + + if item.generics.where_clause.is_some() { + let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; + return Err(syn::Error::new(item.generics.where_clause.span(), msg)); + } + + let instances = vec![helper::check_type_def_gen_no_bounds( + &item.generics, + item.ident.span(), + )?]; + + Ok(Self { + index, + instances, + pallet, + attr_span, + without_storage_info, + storage_version: storage_version_found, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs index 9d96a18b5..64a5e685b 100644 --- a/support/procedural-fork/src/pallet/parse/storage.rs +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -23,16 +23,16 @@ use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(Error); - syn::custom_keyword!(pallet); - syn::custom_keyword!(getter); - syn::custom_keyword!(storage_prefix); - syn::custom_keyword!(unbounded); - syn::custom_keyword!(whitelist_storage); - syn::custom_keyword!(disable_try_decode_storage); - syn::custom_keyword!(OptionQuery); - syn::custom_keyword!(ResultQuery); - syn::custom_keyword!(ValueQuery); + syn::custom_keyword!(Error); + syn::custom_keyword!(pallet); + syn::custom_keyword!(getter); + syn::custom_keyword!(storage_prefix); + syn::custom_keyword!(unbounded); + syn::custom_keyword!(whitelist_storage); + syn::custom_keyword!(disable_try_decode_storage); + syn::custom_keyword!(OptionQuery); + syn::custom_keyword!(ResultQuery); + syn::custom_keyword!(ValueQuery); } /// Parse for one of the following: @@ -42,906 +42,1003 @@ mod keyword { /// * `#[pallet::whitelist_storage] /// * `#[pallet::disable_try_decode_storage]` pub enum PalletStorageAttr { - Getter(syn::Ident, proc_macro2::Span), - StorageName(syn::LitStr, proc_macro2::Span), - Unbounded(proc_macro2::Span), - WhitelistStorage(proc_macro2::Span), - DisableTryDecodeStorage(proc_macro2::Span), + Getter(syn::Ident, proc_macro2::Span), + StorageName(syn::LitStr, proc_macro2::Span), + Unbounded(proc_macro2::Span), + WhitelistStorage(proc_macro2::Span), + DisableTryDecodeStorage(proc_macro2::Span), } impl PalletStorageAttr { - fn attr_span(&self) -> proc_macro2::Span { - match self { - Self::Getter(_, span) | - Self::StorageName(_, span) | - Self::Unbounded(span) | - Self::WhitelistStorage(span) => *span, - Self::DisableTryDecodeStorage(span) => *span, - } - } + fn attr_span(&self) -> proc_macro2::Span { + match self { + Self::Getter(_, span) + | Self::StorageName(_, span) + | Self::Unbounded(span) + | Self::WhitelistStorage(span) => *span, + Self::DisableTryDecodeStorage(span) => *span, + } + } } impl syn::parse::Parse for PalletStorageAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let attr_span = input.span(); - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; - - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::getter) { - content.parse::()?; - - let generate_content; - syn::parenthesized!(generate_content in content); - generate_content.parse::()?; - Ok(Self::Getter(generate_content.parse::()?, attr_span)) - } else if lookahead.peek(keyword::storage_prefix) { - content.parse::()?; - content.parse::()?; - - let renamed_prefix = content.parse::()?; - // Ensure the renamed prefix is a proper Rust identifier - syn::parse_str::(&renamed_prefix.value()).map_err(|_| { - let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); - syn::Error::new(renamed_prefix.span(), msg) - })?; - - Ok(Self::StorageName(renamed_prefix, attr_span)) - } else if lookahead.peek(keyword::unbounded) { - content.parse::()?; - - Ok(Self::Unbounded(attr_span)) - } else if lookahead.peek(keyword::whitelist_storage) { - content.parse::()?; - Ok(Self::WhitelistStorage(attr_span)) - } else if lookahead.peek(keyword::disable_try_decode_storage) { - content.parse::()?; - Ok(Self::DisableTryDecodeStorage(attr_span)) - } else { - Err(lookahead.error()) - } - } + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let attr_span = input.span(); + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; + + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::getter) { + content.parse::()?; + + let generate_content; + syn::parenthesized!(generate_content in content); + generate_content.parse::()?; + Ok(Self::Getter( + generate_content.parse::()?, + attr_span, + )) + } else if lookahead.peek(keyword::storage_prefix) { + content.parse::()?; + content.parse::()?; + + let renamed_prefix = content.parse::()?; + // Ensure the renamed prefix is a proper Rust identifier + syn::parse_str::(&renamed_prefix.value()).map_err(|_| { + let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); + syn::Error::new(renamed_prefix.span(), msg) + })?; + + Ok(Self::StorageName(renamed_prefix, attr_span)) + } else if lookahead.peek(keyword::unbounded) { + content.parse::()?; + + Ok(Self::Unbounded(attr_span)) + } else if lookahead.peek(keyword::whitelist_storage) { + content.parse::()?; + Ok(Self::WhitelistStorage(attr_span)) + } else if lookahead.peek(keyword::disable_try_decode_storage) { + content.parse::()?; + Ok(Self::DisableTryDecodeStorage(attr_span)) + } else { + Err(lookahead.error()) + } + } } struct PalletStorageAttrInfo { - getter: Option, - rename_as: Option, - unbounded: bool, - whitelisted: bool, - try_decode: bool, + getter: Option, + rename_as: Option, + unbounded: bool, + whitelisted: bool, + try_decode: bool, } impl PalletStorageAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut getter = None; - let mut rename_as = None; - let mut unbounded = false; - let mut whitelisted = false; - let mut disable_try_decode_storage = false; - for attr in attrs { - match attr { - PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), - PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => - rename_as = Some(name), - PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, - PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, - PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => - disable_try_decode_storage = true, - attr => - return Err(syn::Error::new( - attr.attr_span(), - "Invalid attribute: Duplicate attribute", - )), - } - } - - Ok(PalletStorageAttrInfo { - getter, - rename_as, - unbounded, - whitelisted, - try_decode: !disable_try_decode_storage, - }) - } + fn from_attrs(attrs: Vec) -> syn::Result { + let mut getter = None; + let mut rename_as = None; + let mut unbounded = false; + let mut whitelisted = false; + let mut disable_try_decode_storage = false; + for attr in attrs { + match attr { + PalletStorageAttr::Getter(ident, ..) if getter.is_none() => getter = Some(ident), + PalletStorageAttr::StorageName(name, ..) if rename_as.is_none() => { + rename_as = Some(name) + } + PalletStorageAttr::Unbounded(..) if !unbounded => unbounded = true, + PalletStorageAttr::WhitelistStorage(..) if !whitelisted => whitelisted = true, + PalletStorageAttr::DisableTryDecodeStorage(..) if !disable_try_decode_storage => { + disable_try_decode_storage = true + } + attr => { + return Err(syn::Error::new( + attr.attr_span(), + "Invalid attribute: Duplicate attribute", + )) + } + } + } + + Ok(PalletStorageAttrInfo { + getter, + rename_as, + unbounded, + whitelisted, + try_decode: !disable_try_decode_storage, + }) + } } /// The value and key types used by storages. Needed to expand metadata. pub enum Metadata { - Value { value: syn::Type }, - Map { value: syn::Type, key: syn::Type }, - CountedMap { value: syn::Type, key: syn::Type }, - DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, - NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, - CountedNMap { keys: Vec, keygen: syn::Type, value: syn::Type }, + Value { + value: syn::Type, + }, + Map { + value: syn::Type, + key: syn::Type, + }, + CountedMap { + value: syn::Type, + key: syn::Type, + }, + DoubleMap { + value: syn::Type, + key1: syn::Type, + key2: syn::Type, + }, + NMap { + keys: Vec, + keygen: syn::Type, + value: syn::Type, + }, + CountedNMap { + keys: Vec, + keygen: syn::Type, + value: syn::Type, + }, } pub enum QueryKind { - OptionQuery, - ResultQuery(syn::Path, syn::Ident), - ValueQuery, + OptionQuery, + ResultQuery(syn::Path, syn::Ident), + ValueQuery, } /// Definition of a storage, storage is a storage type like /// `type MyStorage = StorageValue` /// The keys and values types are parsed in order to get metadata pub struct StorageDef { - /// The index of storage item in pallet module. - pub index: usize, - /// Visibility of the storage type. - pub vis: syn::Visibility, - /// The type ident, to generate the StoragePrefix for. - pub ident: syn::Ident, - /// The keys and value metadata of the storage. - pub metadata: Metadata, - /// The doc associated to the storage. - pub docs: Vec, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// Optional getter to generate. If some then query_kind is ensured to be some as well. - pub getter: Option, - /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of - /// ident. - pub rename_as: Option, - /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. - /// Note that this is best effort as it can't be determined when QueryKind is generic, and - /// result can be false if user do some unexpected type alias. - pub query_kind: Option, - /// Where clause of type definition. - pub where_clause: Option, - /// The span of the pallet::storage attribute. - pub attr_span: proc_macro2::Span, - /// The `cfg` attributes. - pub cfg_attrs: Vec, - /// If generics are named (e.g. `StorageValue`) then this contains all the - /// generics of the storage. - /// If generics are not named, this is none. - pub named_generics: Option, - /// If the value stored in this storage is unbounded. - pub unbounded: bool, - /// Whether or not reads to this storage key will be ignored by benchmarking - pub whitelisted: bool, - /// Whether or not to try to decode the storage key when running try-runtime checks. - pub try_decode: bool, - /// Whether or not a default hasher is allowed to replace `_` - pub use_default_hasher: bool, + /// The index of storage item in pallet module. + pub index: usize, + /// Visibility of the storage type. + pub vis: syn::Visibility, + /// The type ident, to generate the StoragePrefix for. + pub ident: syn::Ident, + /// The keys and value metadata of the storage. + pub metadata: Metadata, + /// The doc associated to the storage. + pub docs: Vec, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// Optional getter to generate. If some then query_kind is ensured to be some as well. + pub getter: Option, + /// Optional expression that evaluates to a type that can be used as StoragePrefix instead of + /// ident. + pub rename_as: Option, + /// Whereas the querytype of the storage is OptionQuery, ResultQuery or ValueQuery. + /// Note that this is best effort as it can't be determined when QueryKind is generic, and + /// result can be false if user do some unexpected type alias. + pub query_kind: Option, + /// Where clause of type definition. + pub where_clause: Option, + /// The span of the pallet::storage attribute. + pub attr_span: proc_macro2::Span, + /// The `cfg` attributes. + pub cfg_attrs: Vec, + /// If generics are named (e.g. `StorageValue`) then this contains all the + /// generics of the storage. + /// If generics are not named, this is none. + pub named_generics: Option, + /// If the value stored in this storage is unbounded. + pub unbounded: bool, + /// Whether or not reads to this storage key will be ignored by benchmarking + pub whitelisted: bool, + /// Whether or not to try to decode the storage key when running try-runtime checks. + pub try_decode: bool, + /// Whether or not a default hasher is allowed to replace `_` + pub use_default_hasher: bool, } /// The parsed generic from the #[derive(Clone)] pub enum StorageGenerics { - DoubleMap { - hasher1: syn::Type, - key1: syn::Type, - hasher2: syn::Type, - key2: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Map { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedMap { - hasher: syn::Type, - key: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - Value { - value: syn::Type, - query_kind: Option, - on_empty: Option, - }, - NMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, - CountedNMap { - keygen: syn::Type, - value: syn::Type, - query_kind: Option, - on_empty: Option, - max_values: Option, - }, + DoubleMap { + hasher1: syn::Type, + key1: syn::Type, + hasher2: syn::Type, + key2: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Map { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedMap { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + Value { + value: syn::Type, + query_kind: Option, + on_empty: Option, + }, + NMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, + CountedNMap { + keygen: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, } impl StorageGenerics { - /// Return the metadata from the defined generics - fn metadata(&self) -> syn::Result { - let res = match self.clone() { - Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, - Self::Map { value, key, .. } => Metadata::Map { value, key }, - Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, - Self::Value { value, .. } => Metadata::Value { value }, - Self::NMap { keygen, value, .. } => - Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, - Self::CountedNMap { keygen, value, .. } => - Metadata::CountedNMap { keys: collect_keys(&keygen)?, keygen, value }, - }; - - Ok(res) - } - - /// Return the query kind from the defined generics - fn query_kind(&self) -> Option { - match &self { - Self::DoubleMap { query_kind, .. } | - Self::Map { query_kind, .. } | - Self::CountedMap { query_kind, .. } | - Self::Value { query_kind, .. } | - Self::NMap { query_kind, .. } | - Self::CountedNMap { query_kind, .. } => query_kind.clone(), - } - } + /// Return the metadata from the defined generics + fn metadata(&self) -> syn::Result { + let res = match self.clone() { + Self::DoubleMap { + value, key1, key2, .. + } => Metadata::DoubleMap { value, key1, key2 }, + Self::Map { value, key, .. } => Metadata::Map { value, key }, + Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, + Self::Value { value, .. } => Metadata::Value { value }, + Self::NMap { keygen, value, .. } => Metadata::NMap { + keys: collect_keys(&keygen)?, + keygen, + value, + }, + Self::CountedNMap { keygen, value, .. } => Metadata::CountedNMap { + keys: collect_keys(&keygen)?, + keygen, + value, + }, + }; + + Ok(res) + } + + /// Return the query kind from the defined generics + fn query_kind(&self) -> Option { + match &self { + Self::DoubleMap { query_kind, .. } + | Self::Map { query_kind, .. } + | Self::CountedMap { query_kind, .. } + | Self::Value { query_kind, .. } + | Self::NMap { query_kind, .. } + | Self::CountedNMap { query_kind, .. } => query_kind.clone(), + } + } } enum StorageKind { - Value, - Map, - CountedMap, - DoubleMap, - NMap, - CountedNMap, + Value, + Map, + CountedMap, + DoubleMap, + NMap, + CountedNMap, } /// Check the generics in the `map` contains the generics in `gen` may contains generics in /// `optional_gen`, and doesn't contains any other. fn check_generics( - map: &HashMap, - mandatory_generics: &[&str], - optional_generics: &[&str], - storage_type_name: &str, - args_span: proc_macro2::Span, + map: &HashMap, + mandatory_generics: &[&str], + optional_generics: &[&str], + storage_type_name: &str, + args_span: proc_macro2::Span, ) -> syn::Result<()> { - let mut errors = vec![]; - - let expectation = { - let mut e = format!( - "`{}` expect generics {}and optional generics {}", - storage_type_name, - mandatory_generics - .iter() - .map(|name| format!("`{}`, ", name)) - .collect::(), - &optional_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), - ); - e.pop(); - e.pop(); - e.push('.'); - e - }; - - for (gen_name, gen_binding) in map { - if !mandatory_generics.contains(&gen_name.as_str()) && - !optional_generics.contains(&gen_name.as_str()) - { - let msg = format!( - "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", - gen_name, storage_type_name, expectation, - ); - errors.push(syn::Error::new(gen_binding.span(), msg)); - } - } - - for mandatory_generic in mandatory_generics { - if !map.contains_key(&mandatory_generic.to_string()) { - let msg = format!( - "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", - mandatory_generic, storage_type_name - ); - errors.push(syn::Error::new(args_span, msg)); - } - } - - let mut errors = errors.drain(..); - if let Some(mut error) = errors.next() { - for other_error in errors { - error.combine(other_error); - } - Err(error) - } else { - Ok(()) - } + let mut errors = vec![]; + + let expectation = { + let mut e = format!( + "`{}` expect generics {}and optional generics {}", + storage_type_name, + mandatory_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + &optional_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), + ); + e.pop(); + e.pop(); + e.push('.'); + e + }; + + for (gen_name, gen_binding) in map { + if !mandatory_generics.contains(&gen_name.as_str()) + && !optional_generics.contains(&gen_name.as_str()) + { + let msg = format!( + "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", + gen_name, storage_type_name, expectation, + ); + errors.push(syn::Error::new(gen_binding.span(), msg)); + } + } + + for mandatory_generic in mandatory_generics { + if !map.contains_key(&mandatory_generic.to_string()) { + let msg = format!( + "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", + mandatory_generic, storage_type_name + ); + errors.push(syn::Error::new(args_span, msg)); + } + } + + let mut errors = errors.drain(..); + if let Some(mut error) = errors.next() { + for other_error in errors { + error.combine(other_error); + } + Err(error) + } else { + Ok(()) + } } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_named_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::AssocType], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::AssocType], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let mut parsed = HashMap::::new(); - - // Ensure no duplicate. - for arg in args { - if let Some(other) = parsed.get(&arg.ident.to_string()) { - let msg = "Invalid pallet::storage, Duplicated named generic"; - let mut err = syn::Error::new(arg.ident.span(), msg); - err.combine(syn::Error::new(other.ident.span(), msg)); - return Err(err) - } - parsed.insert(arg.ident.to_string(), arg.clone()); - } - - let mut map_mandatory_generics = vec!["Key", "Value"]; - let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; - if dev_mode { - map_optional_generics.push("Hasher"); - } else { - map_mandatory_generics.push("Hasher"); - } - - let generics = match storage { - StorageKind::Value => { - check_generics( - &parsed, - &["Value"], - &["QueryKind", "OnEmpty"], - "StorageValue", - args_span, - )?; - - StorageGenerics::Value { - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - } - }, - StorageKind::Map => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "StorageMap", - args_span, - )?; - - StorageGenerics::Map { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::CountedMap => { - check_generics( - &parsed, - &map_mandatory_generics, - &map_optional_generics, - "CountedStorageMap", - args_span, - )?; - - StorageGenerics::CountedMap { - hasher: parsed - .remove("Hasher") - .map(|binding| binding.ty) - .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), - key: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::DoubleMap => { - let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; - if dev_mode { - map_optional_generics.extend(["Hasher1", "Hasher2"]); - } else { - double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); - } - - check_generics( - &parsed, - &double_map_mandatory_generics, - &map_optional_generics, - "StorageDoubleMap", - args_span, - )?; - - StorageGenerics::DoubleMap { - hasher1: parsed - .remove("Hasher1") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key1: parsed - .remove("Key1") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - hasher2: parsed - .remove("Hasher2") - .map(|binding| binding.ty) - .unwrap_or(syn::parse_quote!(Blake2_128Concat)), - key2: parsed - .remove("Key2") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::NMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "StorageNMap", - args_span, - )?; - - StorageGenerics::NMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - StorageKind::CountedNMap => { - check_generics( - &parsed, - &["Key", "Value"], - &["QueryKind", "OnEmpty", "MaxValues"], - "CountedStorageNMap", - args_span, - )?; - - StorageGenerics::CountedNMap { - keygen: parsed - .remove("Key") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - value: parsed - .remove("Value") - .map(|binding| binding.ty) - .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), - max_values: parsed.remove("MaxValues").map(|binding| binding.ty), - } - }, - }; - - let metadata = generics.metadata()?; - let query_kind = generics.query_kind(); - - Ok((Some(generics), metadata, query_kind, false)) + let mut parsed = HashMap::::new(); + + // Ensure no duplicate. + for arg in args { + if let Some(other) = parsed.get(&arg.ident.to_string()) { + let msg = "Invalid pallet::storage, Duplicated named generic"; + let mut err = syn::Error::new(arg.ident.span(), msg); + err.combine(syn::Error::new(other.ident.span(), msg)); + return Err(err); + } + parsed.insert(arg.ident.to_string(), arg.clone()); + } + + let mut map_mandatory_generics = vec!["Key", "Value"]; + let mut map_optional_generics = vec!["QueryKind", "OnEmpty", "MaxValues"]; + if dev_mode { + map_optional_generics.push("Hasher"); + } else { + map_mandatory_generics.push("Hasher"); + } + + let generics = match storage { + StorageKind::Value => { + check_generics( + &parsed, + &["Value"], + &["QueryKind", "OnEmpty"], + "StorageValue", + args_span, + )?; + + StorageGenerics::Value { + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + } + } + StorageKind::Map => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "StorageMap", + args_span, + )?; + + StorageGenerics::Map { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::CountedMap => { + check_generics( + &parsed, + &map_mandatory_generics, + &map_optional_generics, + "CountedStorageMap", + args_span, + )?; + + StorageGenerics::CountedMap { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .unwrap_or(syn::Type::Verbatim(quote::quote! { Blake2_128Concat })), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::DoubleMap => { + let mut double_map_mandatory_generics = vec!["Key1", "Key2", "Value"]; + if dev_mode { + map_optional_generics.extend(["Hasher1", "Hasher2"]); + } else { + double_map_mandatory_generics.extend(["Hasher1", "Hasher2"]); + } + + check_generics( + &parsed, + &double_map_mandatory_generics, + &map_optional_generics, + "StorageDoubleMap", + args_span, + )?; + + StorageGenerics::DoubleMap { + hasher1: parsed + .remove("Hasher1") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key1: parsed + .remove("Key1") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + hasher2: parsed + .remove("Hasher2") + .map(|binding| binding.ty) + .unwrap_or(syn::parse_quote!(Blake2_128Concat)), + key2: parsed + .remove("Key2") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::NMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "StorageNMap", + args_span, + )?; + + StorageGenerics::NMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + StorageKind::CountedNMap => { + check_generics( + &parsed, + &["Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "CountedStorageNMap", + args_span, + )?; + + StorageGenerics::CountedNMap { + keygen: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + } + }; + + let metadata = generics.metadata()?; + let query_kind = generics.query_kind(); + + Ok((Some(generics), metadata, query_kind, false)) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_unnamed_generics( - storage: &StorageKind, - args_span: proc_macro2::Span, - args: &[syn::Type], - dev_mode: bool, + storage: &StorageKind, + args_span: proc_macro2::Span, + args: &[syn::Type], + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let retrieve_arg = |arg_pos| { - args.get(arg_pos).cloned().ok_or_else(|| { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic argument, \ + let retrieve_arg = |arg_pos| { + args.get(arg_pos).cloned().ok_or_else(|| { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic argument, \ expect at least {} args, found {}.", - arg_pos + 1, - args.len(), - ); - syn::Error::new(args_span, msg) - }) - }; - - let prefix_arg = retrieve_arg(0)?; - syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { - let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ + arg_pos + 1, + args.len(), + ); + syn::Error::new(args_span, msg) + }) + }; + + let prefix_arg = retrieve_arg(0)?; + syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { + let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ first generic argument must be `_`, the argument is then replaced by macro."; - let mut err = syn::Error::new(prefix_arg.span(), msg); - err.combine(e); - err - })?; - - let use_default_hasher = |arg_pos| { - let arg = retrieve_arg(arg_pos)?; - if syn::parse2::(arg.to_token_stream()).is_ok() { - if dev_mode { - Ok(true) - } else { - let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; - Err(syn::Error::new(arg.span(), msg)) - } - } else { - Ok(false) - } - }; - - let res = match storage { - StorageKind::Value => - (None, Metadata::Value { value: retrieve_arg(1)? }, retrieve_arg(2).ok(), false), - StorageKind::Map => ( - None, - Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::CountedMap => ( - None, - Metadata::CountedMap { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, - retrieve_arg(4).ok(), - use_default_hasher(1)?, - ), - StorageKind::DoubleMap => ( - None, - Metadata::DoubleMap { - key1: retrieve_arg(2)?, - key2: retrieve_arg(4)?, - value: retrieve_arg(5)?, - }, - retrieve_arg(6).ok(), - use_default_hasher(1)? && use_default_hasher(3)?, - ), - StorageKind::NMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::NMap { keys, keygen, value: retrieve_arg(2)? }, - retrieve_arg(3).ok(), - false, - ) - }, - StorageKind::CountedNMap => { - let keygen = retrieve_arg(1)?; - let keys = collect_keys(&keygen)?; - ( - None, - Metadata::CountedNMap { keys, keygen, value: retrieve_arg(2)? }, - retrieve_arg(3).ok(), - false, - ) - }, - }; - - Ok(res) + let mut err = syn::Error::new(prefix_arg.span(), msg); + err.combine(e); + err + })?; + + let use_default_hasher = |arg_pos| { + let arg = retrieve_arg(arg_pos)?; + if syn::parse2::(arg.to_token_stream()).is_ok() { + if dev_mode { + Ok(true) + } else { + let msg = "`_` can only be used in dev_mode. Please specify an appropriate hasher."; + Err(syn::Error::new(arg.span(), msg)) + } + } else { + Ok(false) + } + }; + + let res = match storage { + StorageKind::Value => ( + None, + Metadata::Value { + value: retrieve_arg(1)?, + }, + retrieve_arg(2).ok(), + false, + ), + StorageKind::Map => ( + None, + Metadata::Map { + key: retrieve_arg(2)?, + value: retrieve_arg(3)?, + }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::CountedMap => ( + None, + Metadata::CountedMap { + key: retrieve_arg(2)?, + value: retrieve_arg(3)?, + }, + retrieve_arg(4).ok(), + use_default_hasher(1)?, + ), + StorageKind::DoubleMap => ( + None, + Metadata::DoubleMap { + key1: retrieve_arg(2)?, + key2: retrieve_arg(4)?, + value: retrieve_arg(5)?, + }, + retrieve_arg(6).ok(), + use_default_hasher(1)? && use_default_hasher(3)?, + ), + StorageKind::NMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::NMap { + keys, + keygen, + value: retrieve_arg(2)?, + }, + retrieve_arg(3).ok(), + false, + ) + } + StorageKind::CountedNMap => { + let keygen = retrieve_arg(1)?; + let keys = collect_keys(&keygen)?; + ( + None, + Metadata::CountedNMap { + keys, + keygen, + value: retrieve_arg(2)?, + }, + retrieve_arg(3).ok(), + false, + ) + } + }; + + Ok(res) } /// Returns `(named generics, metadata, query kind, use_default_hasher)` fn process_generics( - segment: &syn::PathSegment, - dev_mode: bool, + segment: &syn::PathSegment, + dev_mode: bool, ) -> syn::Result<(Option, Metadata, Option, bool)> { - let storage_kind = match &*segment.ident.to_string() { - "StorageValue" => StorageKind::Value, - "StorageMap" => StorageKind::Map, - "CountedStorageMap" => StorageKind::CountedMap, - "StorageDoubleMap" => StorageKind::DoubleMap, - "StorageNMap" => StorageKind::NMap, - "CountedStorageNMap" => StorageKind::CountedNMap, - found => { - let msg = format!( - "Invalid pallet::storage, expected ident: `StorageValue` or \ + let storage_kind = match &*segment.ident.to_string() { + "StorageValue" => StorageKind::Value, + "StorageMap" => StorageKind::Map, + "CountedStorageMap" => StorageKind::CountedMap, + "StorageDoubleMap" => StorageKind::DoubleMap, + "StorageNMap" => StorageKind::NMap, + "CountedStorageNMap" => StorageKind::CountedNMap, + found => { + let msg = format!( + "Invalid pallet::storage, expected ident: `StorageValue` or \ `StorageMap` or `CountedStorageMap` or `StorageDoubleMap` or `StorageNMap` or `CountedStorageNMap` \ in order to expand metadata, found `{}`.", - found, - ); - return Err(syn::Error::new(segment.ident.span(), msg)) - }, - }; - - let args_span = segment.arguments.span(); - - let args = match &segment.arguments { - syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, - _ => { - let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ + found, + ); + return Err(syn::Error::new(segment.ident.span(), msg)); + } + }; + + let args_span = segment.arguments.span(); + + let args = match &segment.arguments { + syn::PathArguments::AngleBracketed(args) if !args.args.is_empty() => args, + _ => { + let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ expect more that 0 generic arguments."; - return Err(syn::Error::new(segment.span(), msg)) - }, - }; - - if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Type(_))) { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::Type(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are types"), - }) - .collect::>(); - process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) - } else if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) { - let args = args - .args - .iter() - .map(|gen| match gen { - syn::GenericArgument::AssocType(gen) => gen.clone(), - _ => unreachable!("It is asserted above that all generics are bindings"), - }) - .collect::>(); - process_named_generics(&storage_kind, args_span, &args, dev_mode) - } else { - let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ + return Err(syn::Error::new(segment.span(), msg)); + } + }; + + if args + .args + .iter() + .all(|gen| matches!(gen, syn::GenericArgument::Type(_))) + { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::Type(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are types"), + }) + .collect::>(); + process_unnamed_generics(&storage_kind, args_span, &args, dev_mode) + } else if args + .args + .iter() + .all(|gen| matches!(gen, syn::GenericArgument::AssocType(_))) + { + let args = args + .args + .iter() + .map(|gen| match gen { + syn::GenericArgument::AssocType(gen) => gen.clone(), + _ => unreachable!("It is asserted above that all generics are bindings"), + }) + .collect::>(); + process_named_generics(&storage_kind, args_span, &args, dev_mode) + } else { + let msg = "Invalid pallet::storage, invalid generic declaration for storage. Expect only \ type generics or binding generics, e.g. `` or \ ``."; - Err(syn::Error::new(segment.span(), msg)) - } + Err(syn::Error::new(segment.span(), msg)) + } } /// Parse the 2nd type argument to `StorageNMap` and return its keys. fn collect_keys(keygen: &syn::Type) -> syn::Result> { - if let syn::Type::Tuple(tup) = keygen { - tup.elems.iter().map(extract_key).collect::>>() - } else { - Ok(vec![extract_key(keygen)?]) - } + if let syn::Type::Tuple(tup) = keygen { + tup.elems + .iter() + .map(extract_key) + .collect::>>() + } else { + Ok(vec![extract_key(keygen)?]) + } } /// In `Key`, extract K and return it. fn extract_key(ty: &syn::Type) -> syn::Result { - let typ = if let syn::Type::Path(typ) = ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(ty.span(), msg)) - }; - - let key_struct = typ.path.segments.last().ok_or_else(|| { - let msg = "Invalid pallet::storage, expected type path with at least one segment"; - syn::Error::new(typ.path.span(), msg) - })?; - if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { - let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; - return Err(syn::Error::new(key_struct.ident.span(), msg)) - } - - let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { - args - } else { - let msg = "Invalid pallet::storage, expected angle bracketed arguments"; - return Err(syn::Error::new(key_struct.arguments.span(), msg)) - }; - - if ty_params.args.len() != 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + let typ = if let syn::Type::Path(typ) = ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(ty.span(), msg)); + }; + + let key_struct = typ.path.segments.last().ok_or_else(|| { + let msg = "Invalid pallet::storage, expected type path with at least one segment"; + syn::Error::new(typ.path.span(), msg) + })?; + if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { + let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; + return Err(syn::Error::new(key_struct.ident.span(), msg)); + } + + let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { + args + } else { + let msg = "Invalid pallet::storage, expected angle bracketed arguments"; + return Err(syn::Error::new(key_struct.arguments.span(), msg)); + }; + + if ty_params.args.len() != 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for Key struct, expected 2 args, found {}", - ty_params.args.len() - ); - return Err(syn::Error::new(ty_params.span(), msg)) - } - - let key = match &ty_params.args[1] { - syn::GenericArgument::Type(key_ty) => key_ty.clone(), - _ => { - let msg = "Invalid pallet::storage, expected type"; - return Err(syn::Error::new(ty_params.args[1].span(), msg)) - }, - }; - - Ok(key) + ty_params.args.len() + ); + return Err(syn::Error::new(ty_params.span(), msg)); + } + + let key = match &ty_params.args[1] { + syn::GenericArgument::Type(key_ty) => key_ty.clone(), + _ => { + let msg = "Invalid pallet::storage, expected type"; + return Err(syn::Error::new(ty_params.args[1].span(), msg)); + } + }; + + Ok(key) } impl StorageDef { - /// Return the storage prefix for this storage item - pub fn prefix(&self) -> String { - self.rename_as - .as_ref() - .map(syn::LitStr::value) - .unwrap_or_else(|| self.ident.to_string()) - } - - /// Return either the span of the ident or the span of the literal in the - /// #[storage_prefix] attribute - pub fn prefix_span(&self) -> proc_macro2::Span { - self.rename_as - .as_ref() - .map(syn::LitStr::span) - .unwrap_or_else(|| self.ident.span()) - } - - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - dev_mode: bool, - ) -> syn::Result { - let item = if let syn::Item::Type(item) = item { - item - } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")) - }; - - let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; - let PalletStorageAttrInfo { getter, rename_as, mut unbounded, whitelisted, try_decode } = - PalletStorageAttrInfo::from_attrs(attrs)?; - - // set all storages to be unbounded if dev_mode is enabled - unbounded |= dev_mode; - let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); - - let instances = vec![helper::check_type_def_gen(&item.generics, item.ident.span())?]; - - let where_clause = item.generics.where_clause.clone(); - let docs = get_doc_literals(&item.attrs); - - let typ = if let syn::Type::Path(typ) = &*item.ty { - typ - } else { - let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(item.ty.span(), msg)) - }; - - if typ.path.segments.len() != 1 { - let msg = "Invalid pallet::storage, expected type path with one segment"; - return Err(syn::Error::new(item.ty.span(), msg)) - } - - let (named_generics, metadata, query_kind, use_default_hasher) = - process_generics(&typ.path.segments[0], dev_mode)?; - - let query_kind = query_kind - .map(|query_kind| { - use syn::{ - AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, - TypePath, - }; - - let result_query = match query_kind { - Type::Path(path) - if path - .path - .segments - .last() - .map_or(false, |s| s.ident == "OptionQuery") => - return Ok(Some(QueryKind::OptionQuery)), - Type::Path(TypePath { path: Path { segments, .. }, .. }) - if segments.last().map_or(false, |s| s.ident == "ResultQuery") => - segments - .last() - .expect("segments is checked to have the last value; qed") - .clone(), - Type::Path(path) - if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") => - return Ok(Some(QueryKind::ValueQuery)), - _ => return Ok(None), - }; - - let error_type = match result_query.arguments { - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - args, .. - }) => { - if args.len() != 1 { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic arguments \ + /// Return the storage prefix for this storage item + pub fn prefix(&self) -> String { + self.rename_as + .as_ref() + .map(syn::LitStr::value) + .unwrap_or_else(|| self.ident.to_string()) + } + + /// Return either the span of the ident or the span of the literal in the + /// #[storage_prefix] attribute + pub fn prefix_span(&self) -> proc_macro2::Span { + self.rename_as + .as_ref() + .map(syn::LitStr::span) + .unwrap_or_else(|| self.ident.span()) + } + + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + dev_mode: bool, + ) -> syn::Result { + let item = if let syn::Item::Type(item) = item { + item + } else { + return Err(syn::Error::new( + item.span(), + "Invalid pallet::storage, expect item type.", + )); + }; + + let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let PalletStorageAttrInfo { + getter, + rename_as, + mut unbounded, + whitelisted, + try_decode, + } = PalletStorageAttrInfo::from_attrs(attrs)?; + + // set all storages to be unbounded if dev_mode is enabled + unbounded |= dev_mode; + let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); + + let instances = vec![helper::check_type_def_gen( + &item.generics, + item.ident.span(), + )?]; + + let where_clause = item.generics.where_clause.clone(); + let docs = get_doc_literals(&item.attrs); + + let typ = if let syn::Type::Path(typ) = &*item.ty { + typ + } else { + let msg = "Invalid pallet::storage, expected type path"; + return Err(syn::Error::new(item.ty.span(), msg)); + }; + + if typ.path.segments.len() != 1 { + let msg = "Invalid pallet::storage, expected type path with one segment"; + return Err(syn::Error::new(item.ty.span(), msg)); + } + + let (named_generics, metadata, query_kind, use_default_hasher) = + process_generics(&typ.path.segments[0], dev_mode)?; + + let query_kind = query_kind + .map(|query_kind| { + use syn::{ + AngleBracketedGenericArguments, GenericArgument, Path, PathArguments, Type, + TypePath, + }; + + let result_query = match query_kind { + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "OptionQuery") => + { + return Ok(Some(QueryKind::OptionQuery)) + } + Type::Path(TypePath { + path: Path { segments, .. }, + .. + }) if segments.last().map_or(false, |s| s.ident == "ResultQuery") => segments + .last() + .expect("segments is checked to have the last value; qed") + .clone(), + Type::Path(path) + if path + .path + .segments + .last() + .map_or(false, |s| s.ident == "ValueQuery") => + { + return Ok(Some(QueryKind::ValueQuery)) + } + _ => return Ok(None), + }; + + let error_type = match result_query.arguments { + PathArguments::AngleBracketed(AngleBracketedGenericArguments { + args, .. + }) => { + if args.len() != 1 { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ for ResultQuery, expected 1 type argument, found {}", - args.len(), - ); - return Err(syn::Error::new(args.span(), msg)) - } - - args[0].clone() - }, - args => { - let msg = format!( - "Invalid pallet::storage, unexpected generic args for ResultQuery, \ + args.len(), + ); + return Err(syn::Error::new(args.span(), msg)); + } + + args[0].clone() + } + args => { + let msg = format!( + "Invalid pallet::storage, unexpected generic args for ResultQuery, \ expected angle-bracketed arguments, found `{}`", - args.to_token_stream().to_string() - ); - return Err(syn::Error::new(args.span(), msg)) - }, - }; - - match error_type { - GenericArgument::Type(Type::Path(TypePath { - path: Path { segments: err_variant, leading_colon }, - .. - })) => { - if err_variant.len() < 2 { - let msg = format!( - "Invalid pallet::storage, unexpected number of path segments for \ + args.to_token_stream().to_string() + ); + return Err(syn::Error::new(args.span(), msg)); + } + }; + + match error_type { + GenericArgument::Type(Type::Path(TypePath { + path: + Path { + segments: err_variant, + leading_colon, + }, + .. + })) => { + if err_variant.len() < 2 { + let msg = format!( + "Invalid pallet::storage, unexpected number of path segments for \ the generics in ResultQuery, expected a path with at least 2 \ segments, found {}", - err_variant.len(), - ); - return Err(syn::Error::new(err_variant.span(), msg)) - } - let mut error = err_variant.clone(); - let err_variant = error - .pop() - .expect("Checked to have at least 2; qed") - .into_value() - .ident; - - // Necessary here to eliminate the last double colon - let last = - error.pop().expect("Checked to have at least 2; qed").into_value(); - error.push_value(last); - - Ok(Some(QueryKind::ResultQuery( - syn::Path { leading_colon, segments: error }, - err_variant, - ))) - }, - gen_arg => { - let msg = format!( + err_variant.len(), + ); + return Err(syn::Error::new(err_variant.span(), msg)); + } + let mut error = err_variant.clone(); + let err_variant = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value() + .ident; + + // Necessary here to eliminate the last double colon + let last = error + .pop() + .expect("Checked to have at least 2; qed") + .into_value(); + error.push_value(last); + + Ok(Some(QueryKind::ResultQuery( + syn::Path { + leading_colon, + segments: error, + }, + err_variant, + ))) + } + gen_arg => { + let msg = format!( "Invalid pallet::storage, unexpected generic argument kind, expected a \ type path to a `PalletError` enum variant, found `{}`", gen_arg.to_token_stream().to_string(), ); - Err(syn::Error::new(gen_arg.span(), msg)) - }, - } - }) - .transpose()? - .unwrap_or(Some(QueryKind::OptionQuery)); - - if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { - let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ + Err(syn::Error::new(gen_arg.span(), msg)) + } + } + }) + .transpose()? + .unwrap_or(Some(QueryKind::OptionQuery)); + + if let (None, Some(getter)) = (query_kind.as_ref(), getter.as_ref()) { + let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ identifiable. QueryKind must be `OptionQuery`, `ResultQuery`, `ValueQuery`, or default \ one to be identifiable."; - return Err(syn::Error::new(getter.span(), msg)) - } - - Ok(StorageDef { - attr_span, - index, - vis: item.vis.clone(), - ident: item.ident.clone(), - instances, - metadata, - docs, - getter, - rename_as, - query_kind, - where_clause, - cfg_attrs, - named_generics, - unbounded, - whitelisted, - try_decode, - use_default_hasher, - }) - } + return Err(syn::Error::new(getter.span(), msg)); + } + + Ok(StorageDef { + attr_span, + index, + vis: item.vis.clone(), + ident: item.ident.clone(), + instances, + metadata, + docs, + getter, + rename_as, + query_kind, + where_clause, + cfg_attrs, + named_generics, + unbounded, + whitelisted, + try_decode, + use_default_hasher, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index 6405bb415..50633fbd0 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -30,96 +30,103 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use syn::{ - parse::ParseStream, - parse2, - spanned::Spanned, - token::{Bracket, Paren, PathSep, Pound}, - Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, - PathArguments, Result, TypePath, + parse::ParseStream, + parse2, + spanned::Spanned, + token::{Bracket, Paren, PathSep, Pound}, + Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, + PathArguments, Result, TypePath, }; pub mod keywords { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(tasks_experimental); - custom_keyword!(task_enum); - custom_keyword!(task_list); - custom_keyword!(task_condition); - custom_keyword!(task_index); - custom_keyword!(task_weight); - custom_keyword!(pallet); + custom_keyword!(tasks_experimental); + custom_keyword!(task_enum); + custom_keyword!(task_list); + custom_keyword!(task_condition); + custom_keyword!(task_index); + custom_keyword!(task_weight); + custom_keyword!(pallet); } /// Represents the `#[pallet::tasks_experimental]` attribute and its attached item. Also includes /// metadata about the linked [`TaskEnumDef`] if applicable. #[derive(Clone, Debug)] pub struct TasksDef { - pub tasks_attr: Option, - pub tasks: Vec, - pub item_impl: ItemImpl, - /// Path to `frame_support` - pub scrate: Path, - pub enum_ident: Ident, - pub enum_arguments: PathArguments, + pub tasks_attr: Option, + pub tasks: Vec, + pub item_impl: ItemImpl, + /// Path to `frame_support` + pub scrate: Path, + pub enum_ident: Ident, + pub enum_arguments: PathArguments, } impl syn::parse::Parse for TasksDef { - fn parse(input: ParseStream) -> Result { - let item_impl: ItemImpl = input.parse()?; - let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); - let tasks_attr = match tasks_attrs.first() { - Some(attr) => Some(parse2::(attr.to_token_stream())?), - None => None, - }; - if let Some(extra_tasks_attr) = tasks_attrs.get(1) { - return Err(Error::new( - extra_tasks_attr.span(), - "unexpected extra `#[pallet::tasks_experimental]` attribute", - )) - } - let tasks: Vec = if tasks_attr.is_some() { - item_impl - .items - .clone() - .into_iter() - .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) - .map(|item| parse2::(item.to_token_stream())) - .collect::>()? - } else { - Vec::new() - }; - let mut task_indices = HashSet::::new(); - for task in tasks.iter() { - let task_index = &task.index_attr.meta.index; - if !task_indices.insert(task_index.clone()) { - return Err(Error::new( - task_index.span(), - format!("duplicate task index `{}`", task_index), - )) - } - } - let mut item_impl = item_impl; - item_impl.attrs = normal_attrs; - - // we require the path on the impl to be a TypePath - let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; - let segments = enum_path.path.segments.iter().collect::>(); - let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { - return Err(Error::new( - enum_path.span(), - "if specified manually, the task enum must be defined locally in this \ + fn parse(input: ParseStream) -> Result { + let item_impl: ItemImpl = input.parse()?; + let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); + let tasks_attr = match tasks_attrs.first() { + Some(attr) => Some(parse2::(attr.to_token_stream())?), + None => None, + }; + if let Some(extra_tasks_attr) = tasks_attrs.get(1) { + return Err(Error::new( + extra_tasks_attr.span(), + "unexpected extra `#[pallet::tasks_experimental]` attribute", + )); + } + let tasks: Vec = if tasks_attr.is_some() { + item_impl + .items + .clone() + .into_iter() + .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) + .map(|item| parse2::(item.to_token_stream())) + .collect::>()? + } else { + Vec::new() + }; + let mut task_indices = HashSet::::new(); + for task in tasks.iter() { + let task_index = &task.index_attr.meta.index; + if !task_indices.insert(task_index.clone()) { + return Err(Error::new( + task_index.span(), + format!("duplicate task index `{}`", task_index), + )); + } + } + let mut item_impl = item_impl; + item_impl.attrs = normal_attrs; + + // we require the path on the impl to be a TypePath + let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; + let segments = enum_path.path.segments.iter().collect::>(); + let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + return Err(Error::new( + enum_path.span(), + "if specified manually, the task enum must be defined locally in this \ pallet and cannot be a re-export", - )) - }; - let enum_ident = last_seg.ident.clone(); - let enum_arguments = last_seg.arguments.clone(); - - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; - - Ok(TasksDef { tasks_attr, item_impl, tasks, scrate, enum_ident, enum_arguments }) - } + )); + }; + let enum_ident = last_seg.ident.clone(); + let enum_arguments = last_seg.arguments.clone(); + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + Ok(TasksDef { + tasks_attr, + item_impl, + tasks, + scrate, + enum_ident, + enum_arguments, + }) + } } /// Parsing for a `#[pallet::tasks_experimental]` attr. @@ -148,821 +155,851 @@ pub type PalletTaskEnumAttr = PalletTaskAttr; /// attached `#[pallet::task_enum]` attribute. #[derive(Clone, Debug)] pub struct TaskEnumDef { - pub attr: Option, - pub item_enum: ItemEnum, - pub scrate: Path, - pub type_use_generics: TokenStream2, + pub attr: Option, + pub item_enum: ItemEnum, + pub scrate: Path, + pub type_use_generics: TokenStream2, } impl syn::parse::Parse for TaskEnumDef { - fn parse(input: ParseStream) -> Result { - let mut item_enum = input.parse::()?; - let attr = extract_pallet_attr(&mut item_enum)?; - let attr = match attr { - Some(attr) => Some(parse2(attr)?), - None => None, - }; + fn parse(input: ParseStream) -> Result { + let mut item_enum = input.parse::()?; + let attr = extract_pallet_attr(&mut item_enum)?; + let attr = match attr { + Some(attr) => Some(parse2(attr)?), + None => None, + }; - // We do this here because it would be improper to do something fallible like this at - // the expansion phase. Fallible stuff should happen during parsing. - let scrate = generate_access_from_frame_or_crate("frame-support")?; + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; - let type_use_generics = quote!(T); + let type_use_generics = quote!(T); - Ok(TaskEnumDef { attr, item_enum, scrate, type_use_generics }) - } + Ok(TaskEnumDef { + attr, + item_enum, + scrate, + type_use_generics, + }) + } } /// Represents an individual tasks within a [`TasksDef`]. #[derive(Debug, Clone)] pub struct TaskDef { - pub index_attr: TaskIndexAttr, - pub condition_attr: TaskConditionAttr, - pub list_attr: TaskListAttr, - pub weight_attr: TaskWeightAttr, - pub normal_attrs: Vec, - pub item: ImplItemFn, - pub arg_names: Vec, + pub index_attr: TaskIndexAttr, + pub condition_attr: TaskConditionAttr, + pub list_attr: TaskListAttr, + pub weight_attr: TaskWeightAttr, + pub normal_attrs: Vec, + pub item: ImplItemFn, + pub arg_names: Vec, } impl syn::parse::Parse for TaskDef { - fn parse(input: ParseStream) -> Result { - let item = input.parse::()?; - // we only want to activate TaskAttrType parsing errors for tasks-related attributes, - // so we filter them here - let (task_attrs, normal_attrs) = partition_task_attrs(&item); - - let task_attrs: Vec = task_attrs - .into_iter() - .map(|attr| parse2(attr.to_token_stream())) - .collect::>()?; - - let Some(index_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_index(..)]` attribute", - )) - }; - - let Some(condition_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_condition(..)]` attribute", - )) - }; - - let Some(list_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_list(..)]` attribute", - )) - }; - - let Some(weight_attr) = task_attrs - .iter() - .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) - .cloned() - else { - return Err(Error::new( - item.sig.ident.span(), - "missing `#[pallet::task_weight(..)]` attribute", - )) - }; - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_condition(..)]` attribute", - )) - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_list(..)]` attribute", - )) - } - - if let Some(duplicate) = task_attrs - .iter() - .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) - .collect::>() - .get(1) - { - return Err(Error::new( - duplicate.span(), - "unexpected extra `#[pallet::task_index(..)]` attribute", - )) - } - - let mut arg_names = vec![]; - for input in item.sig.inputs.iter() { - match input { - syn::FnArg::Typed(pat_type) => match &*pat_type.pat { - syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), - _ => return Err(Error::new(input.span(), "unexpected pattern type")), - }, - _ => return Err(Error::new(input.span(), "unexpected function argument type")), - } - } - - let index_attr = index_attr.try_into().expect("we check the type above; QED"); - let condition_attr = condition_attr.try_into().expect("we check the type above; QED"); - let list_attr = list_attr.try_into().expect("we check the type above; QED"); - let weight_attr = weight_attr.try_into().expect("we check the type above; QED"); - - Ok(TaskDef { - index_attr, - condition_attr, - list_attr, - weight_attr, - normal_attrs, - item, - arg_names, - }) - } + fn parse(input: ParseStream) -> Result { + let item = input.parse::()?; + // we only want to activate TaskAttrType parsing errors for tasks-related attributes, + // so we filter them here + let (task_attrs, normal_attrs) = partition_task_attrs(&item); + + let task_attrs: Vec = task_attrs + .into_iter() + .map(|attr| parse2(attr.to_token_stream())) + .collect::>()?; + + let Some(index_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_index(..)]` attribute", + )); + }; + + let Some(condition_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_condition(..)]` attribute", + )); + }; + + let Some(list_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_list(..)]` attribute", + )); + }; + + let Some(weight_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_weight(..)]` attribute", + )); + }; + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_condition(..)]` attribute", + )); + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_list(..)]` attribute", + )); + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_index(..)]` attribute", + )); + } + + let mut arg_names = vec![]; + for input in item.sig.inputs.iter() { + match input { + syn::FnArg::Typed(pat_type) => match &*pat_type.pat { + syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), + _ => return Err(Error::new(input.span(), "unexpected pattern type")), + }, + _ => { + return Err(Error::new( + input.span(), + "unexpected function argument type", + )) + } + } + } + + let index_attr = index_attr.try_into().expect("we check the type above; QED"); + let condition_attr = condition_attr + .try_into() + .expect("we check the type above; QED"); + let list_attr = list_attr.try_into().expect("we check the type above; QED"); + let weight_attr = weight_attr + .try_into() + .expect("we check the type above; QED"); + + Ok(TaskDef { + index_attr, + condition_attr, + list_attr, + weight_attr, + normal_attrs, + item, + arg_names, + }) + } } /// The contents of a [`TasksDef`]-related attribute. #[derive(Parse, Debug, Clone)] pub enum TaskAttrMeta { - #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] - TaskList(TaskListAttrMeta), - #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] - TaskIndex(TaskIndexAttrMeta), - #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] - TaskCondition(TaskConditionAttrMeta), - #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] - TaskWeight(TaskWeightAttrMeta), + #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] + TaskList(TaskListAttrMeta), + #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] + TaskIndex(TaskIndexAttrMeta), + #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] + TaskCondition(TaskConditionAttrMeta), + #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] + TaskWeight(TaskWeightAttrMeta), } /// The contents of a `#[pallet::task_list]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskListAttrMeta { - pub task_list: keywords::task_list, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_list: keywords::task_list, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_index]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskIndexAttrMeta { - pub task_index: keywords::task_index, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub index: LitInt, + pub task_index: keywords::task_index, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub index: LitInt, } /// The contents of a `#[pallet::task_condition]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskConditionAttrMeta { - pub task_condition: keywords::task_condition, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_condition: keywords::task_condition, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task_weight]` attribute. #[derive(Parse, Debug, Clone)] pub struct TaskWeightAttrMeta { - pub task_weight: keywords::task_weight, - #[paren] - _paren: Paren, - #[inside(_paren)] - pub expr: Expr, + pub task_weight: keywords::task_weight, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, } /// The contents of a `#[pallet::task]` attribute. #[derive(Parse, Debug, Clone)] pub struct PalletTaskAttr { - pub pound: Pound, - #[bracket] - _bracket: Bracket, - #[inside(_bracket)] - pub pallet: keywords::pallet, - #[inside(_bracket)] - pub colons: PathSep, - #[inside(_bracket)] - pub meta: T, + pub pound: Pound, + #[bracket] + _bracket: Bracket, + #[inside(_bracket)] + pub pallet: keywords::pallet, + #[inside(_bracket)] + pub colons: PathSep, + #[inside(_bracket)] + pub meta: T, } impl ToTokens for TaskListAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_list = self.task_list; - let expr = &self.expr; - tokens.extend(quote!(#task_list(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_list = self.task_list; + let expr = &self.expr; + tokens.extend(quote!(#task_list(#expr))); + } } impl ToTokens for TaskConditionAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_condition = self.task_condition; - let expr = &self.expr; - tokens.extend(quote!(#task_condition(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_condition = self.task_condition; + let expr = &self.expr; + tokens.extend(quote!(#task_condition(#expr))); + } } impl ToTokens for TaskWeightAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_weight = self.task_weight; - let expr = &self.expr; - tokens.extend(quote!(#task_weight(#expr))); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_weight = self.task_weight; + let expr = &self.expr; + tokens.extend(quote!(#task_weight(#expr))); + } } impl ToTokens for TaskIndexAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let task_index = self.task_index; - let index = &self.index; - tokens.extend(quote!(#task_index(#index))) - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_index = self.task_index; + let index = &self.index; + tokens.extend(quote!(#task_index(#index))) + } } impl ToTokens for TaskAttrMeta { - fn to_tokens(&self, tokens: &mut TokenStream2) { - match self { - TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), - TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), - TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), - TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), - } - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), + TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), + TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), + TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), + } + } } impl ToTokens for PalletTaskAttr { - fn to_tokens(&self, tokens: &mut TokenStream2) { - let pound = self.pound; - let pallet = self.pallet; - let colons = self.colons; - let meta = &self.meta; - tokens.extend(quote!(#pound[#pallet #colons #meta])); - } + fn to_tokens(&self, tokens: &mut TokenStream2) { + let pound = self.pound; + let pallet = self.pallet; + let colons = self.colons; + let meta = &self.meta; + tokens.extend(quote!(#pound[#pallet #colons #meta])); + } } impl TryFrom> for TaskIndexAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskIndexAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskIndexAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskConditionAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskConditionAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskConditionAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskWeightAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskWeightAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!( + "`{:?}` cannot be converted to a `TaskWeightAttr`", + value.meta + ), + )) + } + } + } } impl TryFrom> for TaskListAttr { - type Error = syn::Error; - - fn try_from(value: PalletTaskAttr) -> Result { - let pound = value.pound; - let pallet = value.pallet; - let colons = value.colons; - match value.meta { - TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), - _ => - return Err(Error::new( - value.span(), - format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), - )), - } - } + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => { + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )) + } + } + } } fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> { - let mut duplicate = None; - let mut attr = None; - item_enum.attrs = item_enum - .attrs - .iter() - .filter(|found_attr| { - let segs = found_attr - .path() - .segments - .iter() - .map(|seg| seg.ident.clone()) - .collect::>(); - let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { - return true - }; - if seg1 != "pallet" { - return true - } - if attr.is_some() { - duplicate = Some(found_attr.span()); - } - attr = Some(found_attr.to_token_stream()); - false - }) - .cloned() - .collect(); - if let Some(span) = duplicate { - return Err(Error::new(span, "only one `#[pallet::_]` attribute is supported on this item")) - } - Ok(attr) + let mut duplicate = None; + let mut attr = None; + item_enum.attrs = item_enum + .attrs + .iter() + .filter(|found_attr| { + let segs = found_attr + .path() + .segments + .iter() + .map(|seg| seg.ident.clone()) + .collect::>(); + let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + return true; + }; + if seg1 != "pallet" { + return true; + } + if attr.is_some() { + duplicate = Some(found_attr.span()); + } + attr = Some(found_attr.to_token_stream()); + false + }) + .cloned() + .collect(); + if let Some(span) = duplicate { + return Err(Error::new( + span, + "only one `#[pallet::_]` attribute is supported on this item", + )); + } + Ok(attr) } fn partition_tasks_attrs(item_impl: &ItemImpl) -> (Vec, Vec) { - item_impl.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix), None) = - (path_segs.next(), path_segs.next(), path_segs.next()) - else { - return false - }; - prefix.ident == "pallet" && suffix.ident == "tasks_experimental" - }) + item_impl.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix), None) = + (path_segs.next(), path_segs.next(), path_segs.next()) + else { + return false; + }; + prefix.ident == "pallet" && suffix.ident == "tasks_experimental" + }) } fn partition_task_attrs(item: &ImplItemFn) -> (Vec, Vec) { - item.attrs.clone().into_iter().partition(|attr| { - let mut path_segs = attr.path().segments.iter(); - let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { - return false - }; - // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than - // parsing and makes no stack or heap allocations - prefix.ident == "pallet" && - (suffix.ident == "tasks_experimental" || - suffix.ident == "task_list" || - suffix.ident == "task_condition" || - suffix.ident == "task_weight" || - suffix.ident == "task_index") - }) + item.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { + return false; + }; + // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than + // parsing and makes no stack or heap allocations + prefix.ident == "pallet" + && (suffix.ident == "tasks_experimental" + || suffix.ident == "task_list" + || suffix.ident == "task_condition" + || suffix.ident == "task_weight" + || suffix.ident == "task_index") + }) } #[test] fn test_parse_task_list_() { - parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); - parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); - parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list()])), - "expected an expression" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_list])), - "expected parentheses" - ); + parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); + parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); + parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list()])), + "expected an expression" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list])), + "expected parentheses" + ); } #[test] fn test_parse_task_index() { - parse2::(quote!(#[pallet::task_index(3)])).unwrap(); - parse2::(quote!(#[pallet::task_index(0)])).unwrap(); - parse2::(quote!(#[pallet::task_index(17)])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index])), - "expected parentheses" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index("hey")])), - "expected integer literal" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::task_index(0.3)])), - "expected integer literal" - ); + parse2::(quote!(#[pallet::task_index(3)])).unwrap(); + parse2::(quote!(#[pallet::task_index(0)])).unwrap(); + parse2::(quote!(#[pallet::task_index(17)])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index])), + "expected parentheses" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index("hey")])), + "expected integer literal" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index(0.3)])), + "expected integer literal" + ); } #[test] fn test_parse_task_condition() { - parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); - parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); } #[test] fn test_parse_tasks_attr() { - parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::taskss])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_])), - "expected `tasks_experimental`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pal::tasks])), - "expected `pallet`" - ); - assert_parse_error_matches!( - parse2::(quote!(#[pallet::tasks_experimental()])), - "unexpected token" - ); + parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::taskss])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pal::tasks])), + "expected `pallet`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_experimental()])), + "unexpected token" + ); } #[test] fn test_parse_tasks_def_basic() { - simulate_manifest_dir("../../examples/basic", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Add a pair of numbers into the totals and remove them. - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn add_number_into_total(i: u32) -> DispatchResult { - let v = Numbers::::take(i).ok_or(Error::::NotFound)?; - Total::::mutate(|(total_keys, total_values)| { - *total_keys += i; - *total_values += v; - }); - Ok(()) - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 1); - }); + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 1); + }); } #[test] fn test_parse_tasks_def_basic_increment_decrement() { - simulate_manifest_dir("../../examples/basic", || { - let parsed = parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - /// Get the value and check if it can be incremented - #[pallet::task_index(0)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value < 255 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn increment() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value >= 255 { - Err(Error::::ValueOverflow.into()) - } else { - let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Incremented { new_val }); - Ok(()) - } - } - - // Get the value and check if it can be decremented - #[pallet::task_index(1)] - #[pallet::task_condition(|| { - let value = Value::::get().unwrap(); - value > 0 - })] - #[pallet::task_list(Vec::>::new())] - #[pallet::task_weight(0)] - fn decrement() -> DispatchResult { - let value = Value::::get().unwrap_or_default(); - if value == 0 { - Err(Error::::ValueUnderflow.into()) - } else { - let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; - Value::::put(new_val); - Pallet::::deposit_event(Event::Decremented { new_val }); - Ok(()) - } - } - } - }) - .unwrap(); - assert_eq!(parsed.tasks.len(), 2); - }); + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Get the value and check if it can be incremented + #[pallet::task_index(0)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value < 255 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn increment() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value >= 255 { + Err(Error::::ValueOverflow.into()) + } else { + let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Incremented { new_val }); + Ok(()) + } + } + + // Get the value and check if it can be decremented + #[pallet::task_index(1)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value > 0 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn decrement() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value == 0 { + Err(Error::::ValueUnderflow.into()) + } else { + let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Decremented { new_val }); + Ok(()) + } + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 2); + }); } #[test] fn test_parse_tasks_def_duplicate_index() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - - #[pallet::task_list(Numbers::::iter_keys())] - #[pallet::task_condition(|i| Numbers::::contains_key(i))] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - pub fn bar(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - "duplicate task index `0`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn bar(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + "duplicate task index `0`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_list() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_condition() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_index() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_missing_task_weight() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_index(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"missing `#\[pallet::task_weight\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_weight\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_list_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_weight(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_list(SomethingElse::iter())] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_list(SomethingElse::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_condition_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_condition(|i| i % 4 == 0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_condition(|i| i % 4 == 0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_unexpected_extra_task_index_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - impl, I: 'static> Pallet { - #[pallet::task_condition(|i| i % 2 == 0)] - #[pallet::task_index(0)] - #[pallet::task_index(0)] - #[pallet::task_list(Something::iter())] - #[pallet::task_weight(0)] - pub fn foo(i: u32) -> DispatchResult { - Ok(()) - } - } - }), - r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" + ); + }); } #[test] fn test_parse_tasks_def_extra_tasks_attribute() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::tasks_experimental] - #[pallet::tasks_experimental] - impl, I: 'static> Pallet {} - }), - r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + #[pallet::tasks_experimental] + impl, I: 'static> Pallet {} + }), + r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" + ); + }); } #[test] fn test_parse_task_enum_def_basic() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_non_task_name() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - #[pallet::task_enum] - pub enum Something { - Foo - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Something { + Foo + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_allowed() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - pub enum Task { - Increment, - Decrement, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_missing_attr_alternate_name_allowed() { - simulate_manifest_dir("../../examples/basic", || { - parse2::(quote! { - pub enum Foo { - Red, - } - }) - .unwrap(); - }); + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Foo { + Red, + } + }) + .unwrap(); + }); } #[test] fn test_parse_task_enum_def_wrong_attr() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::something] - pub enum Task { - Increment, - Decrement, - } - }), - "expected `task_enum`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::something] + pub enum Task { + Increment, + Decrement, + } + }), + "expected `task_enum`" + ); + }); } #[test] fn test_parse_task_enum_def_wrong_item() { - simulate_manifest_dir("../../examples/basic", || { - assert_parse_error_matches!( - parse2::(quote! { - #[pallet::task_enum] - pub struct Something; - }), - "expected `enum`" - ); - }); + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::task_enum] + pub struct Something; + }), + "expected `enum`" + ); + }); } diff --git a/support/procedural-fork/src/pallet/parse/tests/mod.rs b/support/procedural-fork/src/pallet/parse/tests/mod.rs index a3661f307..fd7dc2dbe 100644 --- a/support/procedural-fork/src/pallet/parse/tests/mod.rs +++ b/support/procedural-fork/src/pallet/parse/tests/mod.rs @@ -20,7 +20,7 @@ use syn::parse_quote; #[doc(hidden)] pub mod __private { - pub use regex; + pub use regex; } /// Allows you to assert that the input expression resolves to an error whose string @@ -63,22 +63,22 @@ pub mod __private { /// enough that it will work with any error with a reasonable [`core::fmt::Display`] impl. #[macro_export] macro_rules! assert_parse_error_matches { - ($expr:expr, $reg:literal) => { - match $expr { - Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), - Err(e) => { - let error_message = e.to_string(); - let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) - .expect("Invalid regex pattern"); - assert!( - re.is_match(&error_message), - "Error message \"{}\" does not match the pattern \"{}\"", - error_message, - $reg - ); - }, - } - }; + ($expr:expr, $reg:literal) => { + match $expr { + Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), + Err(e) => { + let error_message = e.to_string(); + let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) + .expect("Invalid regex pattern"); + assert!( + re.is_match(&error_message), + "Error message \"{}\" does not match the pattern \"{}\"", + error_message, + $reg + ); + } + } + }; } /// Allows you to assert that an entire pallet parses successfully. A custom syntax is used for @@ -183,82 +183,82 @@ macro_rules! assert_pallet_parse_error { /// This function uses a [`Mutex`] to avoid a race condition created when multiple tests try to /// modify and then restore the `CARGO_MANIFEST_DIR` ENV var in an overlapping way. pub fn simulate_manifest_dir, F: FnOnce() + std::panic::UnwindSafe>( - path: P, - closure: F, + path: P, + closure: F, ) { - use std::{env::*, path::*}; + use std::{env::*, path::*}; - /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, - /// avoiding a race condition because `cargo test` runs tests in parallel. - /// - /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with - /// respect to each other, this is still several orders of magnitude faster than using UI - /// tests, even if they are run in parallel. - static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); + /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, + /// avoiding a race condition because `cargo test` runs tests in parallel. + /// + /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with + /// respect to each other, this is still several orders of magnitude faster than using UI + /// tests, even if they are run in parallel. + static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); - // avoid race condition when swapping out `CARGO_MANIFEST_DIR` - let guard = MANIFEST_DIR_LOCK.lock().unwrap(); + // avoid race condition when swapping out `CARGO_MANIFEST_DIR` + let guard = MANIFEST_DIR_LOCK.lock().unwrap(); - // obtain the current/original `CARGO_MANIFEST_DIR` - let orig = PathBuf::from( - var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), - ); + // obtain the current/original `CARGO_MANIFEST_DIR` + let orig = PathBuf::from( + var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), + ); - // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir - set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); + // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir + set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); - // safely run closure catching any panics - let result = panic::catch_unwind(closure); + // safely run closure catching any panics + let result = panic::catch_unwind(closure); - // restore original `CARGO_MANIFEST_DIR` before unwinding - set_var("CARGO_MANIFEST_DIR", &orig); + // restore original `CARGO_MANIFEST_DIR` before unwinding + set_var("CARGO_MANIFEST_DIR", &orig); - // unlock the mutex so we don't poison it if there is a panic - drop(guard); + // unlock the mutex so we don't poison it if there is a panic + drop(guard); - // unwind any panics originally encountered when running closure - result.unwrap(); + // unwind any panics originally encountered when running closure + result.unwrap(); } mod tasks; #[test] fn test_parse_minimal_pallet() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} - #[pallet::pallet] - pub struct Pallet(_); - } - }; + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_missing_pallet() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::config] - pub trait Config: frame_system::Config {} - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + } + } } #[test] fn test_parse_pallet_missing_config() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::config\\]`")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::config\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::pallet] + pub struct Pallet(_); + } + } } diff --git a/support/procedural-fork/src/pallet/parse/tests/tasks.rs b/support/procedural-fork/src/pallet/parse/tests/tasks.rs index 9f1436284..6cd4d13bb 100644 --- a/support/procedural-fork/src/pallet/parse/tests/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tests/tasks.rs @@ -19,222 +19,222 @@ use syn::parse_quote; #[test] fn test_parse_pallet_with_task_enum_missing_impl() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum Task { - Something, - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum Task { + Something, + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_with_task_enum_wrong_attribute() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("expected one of")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::wrong_attribute] - pub enum Task { - Something, - } - - #[pallet::task_list] - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("expected one of")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::wrong_attribute] + pub enum Task { + Something, + } + + #[pallet::task_list] + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_missing_task_enum() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::tasks_experimental] - #[cfg(test)] // aha, this means it's being eaten - impl frame_support::traits::Task for Task - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::tasks_experimental] + #[cfg(test)] // aha, this means it's being eaten + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_task_list_in_wrong_place() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex("can only be used on items within an `impl` statement.")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::task_list] - pub fn something() { - println!("hey"); - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("can only be used on items within an `impl` statement.")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::task_list] + pub fn something() { + println!("hey"); + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { - assert_pallet_parse_error! { - #[manifest_dir("../../examples/basic")] - #[error_regex(".*attribute must be attached to your.*")] - #[frame_support::pallet] - pub mod pallet { - - impl frame_support::traits::Task for Task - where - T: TypeInfo, - { - type Enumeration = sp_std::vec::IntoIter>; - - fn iter() -> Self::Enumeration { - sp_std::vec![Task::increment, Task::decrement].into_iter() - } - } - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - } + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex(".*attribute must be attached to your.*")] + #[frame_support::pallet] + pub mod pallet { + + impl frame_support::traits::Task for Task + where + T: TypeInfo, + { + type Enumeration = sp_std::vec::IntoIter>; + + fn iter() -> Self::Enumeration { + sp_std::vec![Task::increment, Task::decrement].into_iter() + } + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } } #[test] fn test_parse_pallet_manual_task_enum_non_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_non_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - #[pallet::task_enum] - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_manual_impl() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum MyCustomTaskEnum { - Something, - } - - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } #[test] fn test_parse_pallet_manual_task_enum_mismatch_ident() { - assert_pallet_parses! { - #[manifest_dir("../../examples/basic")] - #[frame_support::pallet] - pub mod pallet { - pub enum WrongIdent { - Something, - } - - #[pallet::tasks_experimental] - impl frame_support::traits::Task for MyCustomTaskEnum - where - T: TypeInfo, - {} - - #[pallet::config] - pub trait Config: frame_system::Config {} - - #[pallet::pallet] - pub struct Pallet(_); - } - }; + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum WrongIdent { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; } diff --git a/support/procedural-fork/src/pallet/parse/type_value.rs b/support/procedural-fork/src/pallet/parse/type_value.rs index 4d9db30b3..d5c85248f 100644 --- a/support/procedural-fork/src/pallet/parse/type_value.rs +++ b/support/procedural-fork/src/pallet/parse/type_value.rs @@ -20,104 +20,104 @@ use syn::spanned::Spanned; /// Definition of type value. Just a function which is expanded to a struct implementing `Get`. pub struct TypeValueDef { - /// The index of error item in pallet module. - pub index: usize, - /// Visibility of the struct to generate. - pub vis: syn::Visibility, - /// Ident of the struct to generate. - pub ident: syn::Ident, - /// The type return by Get. - pub type_: Box, - /// The block returning the value to get - pub block: Box, - /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) - pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, - /// The where clause of the function. - pub where_clause: Option, - /// The span of the pallet::type_value attribute. - pub attr_span: proc_macro2::Span, - /// Docs on the item. - pub docs: Vec, + /// The index of error item in pallet module. + pub index: usize, + /// Visibility of the struct to generate. + pub vis: syn::Visibility, + /// Ident of the struct to generate. + pub ident: syn::Ident, + /// The type return by Get. + pub type_: Box, + /// The block returning the value to get + pub block: Box, + /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) + pub is_generic: bool, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, + /// The where clause of the function. + pub where_clause: Option, + /// The span of the pallet::type_value attribute. + pub attr_span: proc_macro2::Span, + /// Docs on the item. + pub docs: Vec, } impl TypeValueDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { - let item = if let syn::Item::Fn(item) = item { - item - } else { - let msg = "Invalid pallet::type_value, expected item fn"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from( + attr_span: proc_macro2::Span, + index: usize, + item: &mut syn::Item, + ) -> syn::Result { + let item = if let syn::Item::Fn(item) = item { + item + } else { + let msg = "Invalid pallet::type_value, expected item fn"; + return Err(syn::Error::new(item.span(), msg)); + }; - let mut docs = vec![]; - for attr in &item.attrs { - if let syn::Meta::NameValue(meta) = &attr.meta { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - docs.push(meta.value.clone()); - continue - } - } + let mut docs = vec![]; + for attr in &item.attrs { + if let syn::Meta::NameValue(meta) = &attr.meta { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + docs.push(meta.value.clone()); + continue; + } + } - let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ + let msg = "Invalid pallet::type_value, unexpected attribute, only doc attribute are \ allowed"; - return Err(syn::Error::new(attr.span(), msg)) - } + return Err(syn::Error::new(attr.span(), msg)); + } - if let Some(span) = item - .sig - .constness - .as_ref() - .map(|t| t.span()) - .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) - .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) - .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) - .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) - { - let msg = "Invalid pallet::type_value, unexpected token"; - return Err(syn::Error::new(span, msg)) - } + if let Some(span) = item + .sig + .constness + .as_ref() + .map(|t| t.span()) + .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) + .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) + .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) + .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) + { + let msg = "Invalid pallet::type_value, unexpected token"; + return Err(syn::Error::new(span, msg)); + } - if !item.sig.inputs.is_empty() { - let msg = "Invalid pallet::type_value, unexpected argument"; - return Err(syn::Error::new(item.sig.inputs[0].span(), msg)) - } + if !item.sig.inputs.is_empty() { + let msg = "Invalid pallet::type_value, unexpected argument"; + return Err(syn::Error::new(item.sig.inputs[0].span(), msg)); + } - let vis = item.vis.clone(); - let ident = item.sig.ident.clone(); - let block = item.block.clone(); - let type_ = match item.sig.output.clone() { - syn::ReturnType::Type(_, type_) => type_, - syn::ReturnType::Default => { - let msg = "Invalid pallet::type_value, expected return type"; - return Err(syn::Error::new(item.sig.span(), msg)) - }, - }; + let vis = item.vis.clone(); + let ident = item.sig.ident.clone(); + let block = item.block.clone(); + let type_ = match item.sig.output.clone() { + syn::ReturnType::Type(_, type_) => type_, + syn::ReturnType::Default => { + let msg = "Invalid pallet::type_value, expected return type"; + return Err(syn::Error::new(item.sig.span(), msg)); + } + }; - let mut instances = vec![]; - if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { - instances.push(usage); - } + let mut instances = vec![]; + if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { + instances.push(usage); + } - let is_generic = item.sig.generics.type_params().count() > 0; - let where_clause = item.sig.generics.where_clause.clone(); + let is_generic = item.sig.generics.type_params().count() > 0; + let where_clause = item.sig.generics.where_clause.clone(); - Ok(TypeValueDef { - attr_span, - index, - is_generic, - vis, - ident, - block, - type_, - instances, - where_clause, - docs, - }) - } + Ok(TypeValueDef { + attr_span, + index, + is_generic, + vis, + ident, + block, + type_, + instances, + where_clause, + docs, + }) + } } diff --git a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs index 2bf0a1b6c..6e5109a74 100644 --- a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs @@ -20,43 +20,43 @@ use syn::spanned::Spanned; /// The definition of the pallet validate unsigned implementation. pub struct ValidateUnsignedDef { - /// The index of validate unsigned item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, + /// The index of validate unsigned item in pallet module. + pub index: usize, + /// A set of usage of instance, must be check for consistency with config. + pub instances: Vec, } impl ValidateUnsignedDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Impl(item) = item { - item - } else { - let msg = "Invalid pallet::validate_unsigned, expected item impl"; - return Err(syn::Error::new(item.span(), msg)) - }; - - if item.trait_.is_none() { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Impl(item) = item { + item + } else { + let msg = "Invalid pallet::validate_unsigned, expected item impl"; + return Err(syn::Error::new(item.span(), msg)); + }; + + if item.trait_.is_none() { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } - - if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { - if last.ident != "ValidateUnsigned" { - let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; - return Err(syn::Error::new(last.span(), msg)) - } - } else { - let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ + return Err(syn::Error::new(item.span(), msg)); + } + + if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { + if last.ident != "ValidateUnsigned" { + let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; + return Err(syn::Error::new(last.span(), msg)); + } + } else { + let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)) - } + return Err(syn::Error::new(item.span(), msg)); + } - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + let instances = vec![ + helper::check_pallet_struct_usage(&item.self_ty)?, + helper::check_impl_gen(&item.generics, item.impl_token.span())?, + ]; - Ok(ValidateUnsignedDef { index, instances }) - } + Ok(ValidateUnsignedDef { index, instances }) + } } diff --git a/support/procedural-fork/src/pallet_error.rs b/support/procedural-fork/src/pallet_error.rs index 693a1e982..bdf8330cd 100644 --- a/support/procedural-fork/src/pallet_error.rs +++ b/support/procedural-fork/src/pallet_error.rs @@ -20,159 +20,172 @@ use quote::ToTokens; // Derive `PalletError` pub fn derive_pallet_error(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let syn::DeriveInput { ident: name, generics, data, .. } = match syn::parse(input) { - Ok(input) => input, - Err(e) => return e.to_compile_error().into(), - }; - - let frame_support = match generate_access_from_frame_or_crate("frame-support") { - Ok(c) => c, - Err(e) => return e.into_compile_error().into(), - }; - let frame_support = &frame_support; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - - let max_encoded_size = match data { - syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | - syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { - let maybe_field_tys = fields - .iter() - .map(|f| generate_field_types(f, &frame_support)) - .collect::>>(); - let field_tys = match maybe_field_tys { - Ok(tys) => tys.into_iter().flatten(), - Err(e) => return e.into_compile_error().into(), - }; - quote::quote! { - 0_usize - #( - .saturating_add(< - #field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE) - )* - } - }, - syn::Fields::Unit => quote::quote!(0), - }, - syn::Data::Enum(syn::DataEnum { variants, .. }) => { - let field_tys = variants - .iter() - .map(|variant| generate_variant_field_types(variant, &frame_support)) - .collect::>>, syn::Error>>(); - - let field_tys = match field_tys { - Ok(tys) => tys.into_iter().flatten().collect::>(), - Err(e) => return e.to_compile_error().into(), - }; - - // We start with `1`, because the discriminant of an enum is stored as u8 - if field_tys.is_empty() { - quote::quote!(1) - } else { - let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { - quote::quote! { - 1_usize - #(.saturating_add(< - #variant_field_tys as #frame_support::traits::PalletError - >::MAX_ENCODED_SIZE))* - } - }); - - quote::quote! {{ - let mut size = 1_usize; - let mut tmp = 0_usize; - #( - tmp = #variant_sizes; - size = if tmp > size { tmp } else { size }; - tmp = 0_usize; - )* - size - }} - } - }, - syn::Data::Union(syn::DataUnion { union_token, .. }) => { - let msg = "Cannot derive `PalletError` for union; please implement it directly"; - return syn::Error::new(union_token.span, msg).into_compile_error().into() - }, - }; - - quote::quote!( - const _: () = { - impl #impl_generics #frame_support::traits::PalletError - for #name #ty_generics #where_clause - { - const MAX_ENCODED_SIZE: usize = #max_encoded_size; - } - }; - ) - .into() + let syn::DeriveInput { + ident: name, + generics, + data, + .. + } = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + let frame_support = match generate_access_from_frame_or_crate("frame-support") { + Ok(c) => c, + Err(e) => return e.into_compile_error().into(), + }; + let frame_support = &frame_support; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + let max_encoded_size = match data { + syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) + | syn::Fields::Unnamed(syn::FieldsUnnamed { + unnamed: fields, .. + }) => { + let maybe_field_tys = fields + .iter() + .map(|f| generate_field_types(f, &frame_support)) + .collect::>>(); + let field_tys = match maybe_field_tys { + Ok(tys) => tys.into_iter().flatten(), + Err(e) => return e.into_compile_error().into(), + }; + quote::quote! { + 0_usize + #( + .saturating_add(< + #field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE) + )* + } + } + syn::Fields::Unit => quote::quote!(0), + }, + syn::Data::Enum(syn::DataEnum { variants, .. }) => { + let field_tys = variants + .iter() + .map(|variant| generate_variant_field_types(variant, &frame_support)) + .collect::>>, syn::Error>>(); + + let field_tys = match field_tys { + Ok(tys) => tys.into_iter().flatten().collect::>(), + Err(e) => return e.to_compile_error().into(), + }; + + // We start with `1`, because the discriminant of an enum is stored as u8 + if field_tys.is_empty() { + quote::quote!(1) + } else { + let variant_sizes = field_tys.into_iter().map(|variant_field_tys| { + quote::quote! { + 1_usize + #(.saturating_add(< + #variant_field_tys as #frame_support::traits::PalletError + >::MAX_ENCODED_SIZE))* + } + }); + + quote::quote! {{ + let mut size = 1_usize; + let mut tmp = 0_usize; + #( + tmp = #variant_sizes; + size = if tmp > size { tmp } else { size }; + tmp = 0_usize; + )* + size + }} + } + } + syn::Data::Union(syn::DataUnion { union_token, .. }) => { + let msg = "Cannot derive `PalletError` for union; please implement it directly"; + return syn::Error::new(union_token.span, msg) + .into_compile_error() + .into(); + } + }; + + quote::quote!( + const _: () = { + impl #impl_generics #frame_support::traits::PalletError + for #name #ty_generics #where_clause + { + const MAX_ENCODED_SIZE: usize = #max_encoded_size; + } + }; + ) + .into() } fn generate_field_types( - field: &syn::Field, - scrate: &syn::Path, + field: &syn::Field, + scrate: &syn::Path, ) -> syn::Result> { - let attrs = &field.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut res = None; - - attr.parse_nested_meta(|meta| { - if meta.path.is_ident("skip") { - res = Some(None); - } else if meta.path.is_ident("compact") { - let field_ty = &field.ty; - res = Some(Some(quote::quote!(#scrate::__private::codec::Compact<#field_ty>))); - } else if meta.path.is_ident("compact") { - res = Some(Some(meta.value()?.parse()?)); - } - - Ok(()) - })?; - - if let Some(v) = res { - return Ok(v) - } - } - } - - Ok(Some(field.ty.to_token_stream())) + let attrs = &field.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut res = None; + + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("skip") { + res = Some(None); + } else if meta.path.is_ident("compact") { + let field_ty = &field.ty; + res = Some(Some( + quote::quote!(#scrate::__private::codec::Compact<#field_ty>), + )); + } else if meta.path.is_ident("compact") { + res = Some(Some(meta.value()?.parse()?)); + } + + Ok(()) + })?; + + if let Some(v) = res { + return Ok(v); + } + } + } + + Ok(Some(field.ty.to_token_stream())) } fn generate_variant_field_types( - variant: &syn::Variant, - scrate: &syn::Path, + variant: &syn::Variant, + scrate: &syn::Path, ) -> syn::Result>> { - let attrs = &variant.attrs; - - for attr in attrs { - if attr.path().is_ident("codec") { - let mut skip = false; - - // We ignore the error intentionally as this isn't `codec(skip)` when - // `parse_nested_meta` fails. - let _ = attr.parse_nested_meta(|meta| { - skip = meta.path.is_ident("skip"); - Ok(()) - }); - - if skip { - return Ok(None) - } - } - } - - match &variant.fields { - syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) | - syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }) => { - let field_tys = fields - .iter() - .map(|field| generate_field_types(field, scrate)) - .collect::>>()?; - Ok(Some(field_tys.into_iter().flatten().collect())) - }, - syn::Fields::Unit => Ok(None), - } + let attrs = &variant.attrs; + + for attr in attrs { + if attr.path().is_ident("codec") { + let mut skip = false; + + // We ignore the error intentionally as this isn't `codec(skip)` when + // `parse_nested_meta` fails. + let _ = attr.parse_nested_meta(|meta| { + skip = meta.path.is_ident("skip"); + Ok(()) + }); + + if skip { + return Ok(None); + } + } + } + + match &variant.fields { + syn::Fields::Named(syn::FieldsNamed { named: fields, .. }) + | syn::Fields::Unnamed(syn::FieldsUnnamed { + unnamed: fields, .. + }) => { + let field_tys = fields + .iter() + .map(|field| generate_field_types(field, scrate)) + .collect::>>()?; + Ok(Some(field_tys.into_iter().flatten().collect())) + } + syn::Fields::Unit => Ok(None), + } } diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index 93c88fce9..c26cbccb7 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -17,20 +17,20 @@ use super::parse::runtime_types::RuntimeType; use crate::{ - construct_runtime::{ - check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, - decl_static_assertions, expand, - }, - runtime::{ - parse::{ - AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, - }, - Def, - }, + construct_runtime::{ + check_pallet_number, decl_all_pallets, decl_integrity_test, decl_pallet_runtime_setup, + decl_static_assertions, expand, + }, + runtime::{ + parse::{ + AllPalletsDeclaration, ExplicitAllPalletsDeclaration, ImplicitAllPalletsDeclaration, + }, + Def, + }, }; use cfg_expr::Predicate; use frame_support_procedural_tools::{ - generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, + generate_access_from_frame_or_crate, generate_crate_access, generate_hidden_includes, }; use proc_macro2::TokenStream as TokenStream2; use quote::quote; @@ -41,280 +41,300 @@ use syn::{Ident, Result}; const SYSTEM_PALLET_NAME: &str = "System"; pub fn expand(def: Def, legacy_ordering: bool) -> TokenStream2 { - let input = def.input; - - let (check_pallet_number_res, res) = match def.pallets { - AllPalletsDeclaration::Implicit(ref decl) => ( - check_pallet_number(input.clone(), decl.pallet_count), - construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), - ), - AllPalletsDeclaration::Explicit(ref decl) => ( - check_pallet_number(input, decl.pallets.len()), - construct_runtime_final_expansion( - def.runtime_struct.ident.clone(), - decl.clone(), - def.runtime_types.clone(), - legacy_ordering, - ), - ), - }; - - let res = res.unwrap_or_else(|e| e.to_compile_error()); - - // We want to provide better error messages to the user and thus, handle the error here - // separately. If there is an error, we print the error and still generate all of the code to - // get in overall less errors for the user. - let res = if let Err(error) = check_pallet_number_res { - let error = error.to_compile_error(); - - quote! { - #error - - #res - } - } else { - res - }; - - let res = expander::Expander::new("construct_runtime") - .dry(std::env::var("FRAME_EXPAND").is_err()) - .verbose(true) - .write_to_out_dir(res) - .expect("Does not fail because of IO in OUT_DIR; qed"); - - res.into() + let input = def.input; + + let (check_pallet_number_res, res) = match def.pallets { + AllPalletsDeclaration::Implicit(ref decl) => ( + check_pallet_number(input.clone(), decl.pallet_count), + construct_runtime_implicit_to_explicit(input.into(), decl.clone(), legacy_ordering), + ), + AllPalletsDeclaration::Explicit(ref decl) => ( + check_pallet_number(input, decl.pallets.len()), + construct_runtime_final_expansion( + def.runtime_struct.ident.clone(), + decl.clone(), + def.runtime_types.clone(), + legacy_ordering, + ), + ), + }; + + let res = res.unwrap_or_else(|e| e.to_compile_error()); + + // We want to provide better error messages to the user and thus, handle the error here + // separately. If there is an error, we print the error and still generate all of the code to + // get in overall less errors for the user. + let res = if let Err(error) = check_pallet_number_res { + let error = error.to_compile_error(); + + quote! { + #error + + #res + } + } else { + res + }; + + let res = expander::Expander::new("construct_runtime") + .dry(std::env::var("FRAME_EXPAND").is_err()) + .verbose(true) + .write_to_out_dir(res) + .expect("Does not fail because of IO in OUT_DIR; qed"); + + res.into() } fn construct_runtime_implicit_to_explicit( - input: TokenStream2, - definition: ImplicitAllPalletsDeclaration, - legacy_ordering: bool, + input: TokenStream2, + definition: ImplicitAllPalletsDeclaration, + legacy_ordering: bool, ) -> Result { - let frame_support = generate_access_from_frame_or_crate("frame-support")?; - let attr = if legacy_ordering { quote!((legacy_ordering)) } else { quote!() }; - let mut expansion = quote::quote!( - #[frame_support::runtime #attr] - #input - ); - for pallet in definition.pallet_decls.iter() { - let pallet_path = &pallet.path; - let pallet_name = &pallet.name; - let pallet_instance = pallet.instance.as_ref().map(|instance| quote::quote!(<#instance>)); - expansion = quote::quote!( - #frame_support::__private::tt_call! { - macro = [{ #pallet_path::tt_default_parts_v2 }] - frame_support = [{ #frame_support }] - ~~> #frame_support::match_and_insert! { - target = [{ #expansion }] - pattern = [{ #pallet_name = #pallet_path #pallet_instance }] - } - } - ); - } - - Ok(expansion) + let frame_support = generate_access_from_frame_or_crate("frame-support")?; + let attr = if legacy_ordering { + quote!((legacy_ordering)) + } else { + quote!() + }; + let mut expansion = quote::quote!( + #[frame_support::runtime #attr] + #input + ); + for pallet in definition.pallet_decls.iter() { + let pallet_path = &pallet.path; + let pallet_name = &pallet.name; + let pallet_instance = pallet + .instance + .as_ref() + .map(|instance| quote::quote!(<#instance>)); + expansion = quote::quote!( + #frame_support::__private::tt_call! { + macro = [{ #pallet_path::tt_default_parts_v2 }] + frame_support = [{ #frame_support }] + ~~> #frame_support::match_and_insert! { + target = [{ #expansion }] + pattern = [{ #pallet_name = #pallet_path #pallet_instance }] + } + } + ); + } + + Ok(expansion) } fn construct_runtime_final_expansion( - name: Ident, - definition: ExplicitAllPalletsDeclaration, - runtime_types: Vec, - legacy_ordering: bool, + name: Ident, + definition: ExplicitAllPalletsDeclaration, + runtime_types: Vec, + legacy_ordering: bool, ) -> Result { - let ExplicitAllPalletsDeclaration { mut pallets, name: pallets_name } = definition; - - if !legacy_ordering { - // Ensure that order of hooks is based on the pallet index - pallets.sort_by_key(|p| p.index); - } - - let system_pallet = - pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { - syn::Error::new( - pallets_name.span(), - "`System` pallet declaration is missing. \ + let ExplicitAllPalletsDeclaration { + mut pallets, + name: pallets_name, + } = definition; + + if !legacy_ordering { + // Ensure that order of hooks is based on the pallet index + pallets.sort_by_key(|p| p.index); + } + + let system_pallet = pallets + .iter() + .find(|decl| decl.name == SYSTEM_PALLET_NAME) + .ok_or_else(|| { + syn::Error::new( + pallets_name.span(), + "`System` pallet declaration is missing. \ Please add this line: `pub type System = frame_system;`", - ) - })?; - if !system_pallet.cfg_pattern.is_empty() { - return Err(syn::Error::new( - system_pallet.name.span(), - "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", - )) - } - - let features = pallets - .iter() - .filter_map(|decl| { - (!decl.cfg_pattern.is_empty()).then(|| { - decl.cfg_pattern.iter().flat_map(|attr| { - attr.predicates().filter_map(|pred| match pred { - Predicate::Feature(feat) => Some(feat), - Predicate::Test => Some("test"), - _ => None, - }) - }) - }) - }) - .flatten() - .collect::>(); - - let hidden_crate_name = "construct_runtime"; - let scrate = generate_crate_access(hidden_crate_name, "frame-support"); - let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); - - let frame_system = generate_access_from_frame_or_crate("frame-system")?; - let block = quote!(<#name as #frame_system::Config>::Block); - let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); - - let mut dispatch = None; - let mut outer_event = None; - let mut outer_error = None; - let mut outer_origin = None; - let mut freeze_reason = None; - let mut hold_reason = None; - let mut slash_reason = None; - let mut lock_id = None; - let mut task = None; - - for runtime_type in runtime_types.iter() { - match runtime_type { - RuntimeType::RuntimeCall(_) => { - dispatch = - Some(expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate)); - }, - RuntimeType::RuntimeEvent(_) => { - outer_event = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Event, - )?); - }, - RuntimeType::RuntimeError(_) => { - outer_error = Some(expand::expand_outer_enum( - &name, - &pallets, - &scrate, - expand::OuterEnumType::Error, - )?); - }, - RuntimeType::RuntimeOrigin(_) => { - outer_origin = - Some(expand::expand_outer_origin(&name, system_pallet, &pallets, &scrate)?); - }, - RuntimeType::RuntimeFreezeReason(_) => { - freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeHoldReason(_) => { - hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeSlashReason(_) => { - slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); - }, - RuntimeType::RuntimeLockId(_) => { - lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); - }, - RuntimeType::RuntimeTask(_) => { - task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); - }, - } - } - - let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); - let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); - - let metadata = expand::expand_runtime_metadata( - &name, - &pallets, - &scrate, - &unchecked_extrinsic, - &system_pallet.path, - ); - let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); - let inherent = - expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); - let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); - let integrity_test = decl_integrity_test(&scrate); - let static_assertions = decl_static_assertions(&name, &pallets, &scrate); - - let res = quote!( - #scrate_decl - - // Prevent UncheckedExtrinsic to print unused warning. - const _: () = { - #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; - }; - - #[derive( - Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, - #scrate::__private::scale_info::TypeInfo - )] - pub struct #name; - impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { - type RuntimeBlock = #block; - } - - // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. - // The function is implemented by calling `impl_runtime_apis!`. - // - // However, the `runtime` may be used without calling `impl_runtime_apis!`. - // Rely on the `Deref` trait to differentiate between a runtime that implements - // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). - // - // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. - // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), - // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). - // - // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` - // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` - // is used. - - #[doc(hidden)] - trait InternalConstructRuntime { - #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { - Default::default() - } - } - #[doc(hidden)] - impl InternalConstructRuntime for &#name {} - - #outer_event - - #outer_error - - #outer_origin - - #all_pallets - - #pallet_to_index - - #dispatch - - #task - - #metadata - - #outer_config - - #inherent - - #validate_unsigned - - #freeze_reason - - #hold_reason - - #lock_id - - #slash_reason - - #integrity_test - - #static_assertions - ); + ) + })?; + if !system_pallet.cfg_pattern.is_empty() { + return Err(syn::Error::new( + system_pallet.name.span(), + "`System` pallet declaration is feature gated, please remove any `#[cfg]` attributes", + )); + } + + let features = pallets + .iter() + .filter_map(|decl| { + (!decl.cfg_pattern.is_empty()).then(|| { + decl.cfg_pattern.iter().flat_map(|attr| { + attr.predicates().filter_map(|pred| match pred { + Predicate::Feature(feat) => Some(feat), + Predicate::Test => Some("test"), + _ => None, + }) + }) + }) + }) + .flatten() + .collect::>(); + + let hidden_crate_name = "construct_runtime"; + let scrate = generate_crate_access(hidden_crate_name, "frame-support"); + let scrate_decl = generate_hidden_includes(hidden_crate_name, "frame-support"); + + let frame_system = generate_access_from_frame_or_crate("frame-system")?; + let block = quote!(<#name as #frame_system::Config>::Block); + let unchecked_extrinsic = quote!(<#block as #scrate::sp_runtime::traits::Block>::Extrinsic); + + let mut dispatch = None; + let mut outer_event = None; + let mut outer_error = None; + let mut outer_origin = None; + let mut freeze_reason = None; + let mut hold_reason = None; + let mut slash_reason = None; + let mut lock_id = None; + let mut task = None; + + for runtime_type in runtime_types.iter() { + match runtime_type { + RuntimeType::RuntimeCall(_) => { + dispatch = Some(expand::expand_outer_dispatch( + &name, + system_pallet, + &pallets, + &scrate, + )); + } + RuntimeType::RuntimeEvent(_) => { + outer_event = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Event, + )?); + } + RuntimeType::RuntimeError(_) => { + outer_error = Some(expand::expand_outer_enum( + &name, + &pallets, + &scrate, + expand::OuterEnumType::Error, + )?); + } + RuntimeType::RuntimeOrigin(_) => { + outer_origin = Some(expand::expand_outer_origin( + &name, + system_pallet, + &pallets, + &scrate, + )?); + } + RuntimeType::RuntimeFreezeReason(_) => { + freeze_reason = Some(expand::expand_outer_freeze_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeHoldReason(_) => { + hold_reason = Some(expand::expand_outer_hold_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeSlashReason(_) => { + slash_reason = Some(expand::expand_outer_slash_reason(&pallets, &scrate)); + } + RuntimeType::RuntimeLockId(_) => { + lock_id = Some(expand::expand_outer_lock_id(&pallets, &scrate)); + } + RuntimeType::RuntimeTask(_) => { + task = Some(expand::expand_outer_task(&name, &pallets, &scrate)); + } + } + } + + let all_pallets = decl_all_pallets(&name, pallets.iter(), &features); + let pallet_to_index = decl_pallet_runtime_setup(&name, &pallets, &scrate); + + let metadata = expand::expand_runtime_metadata( + &name, + &pallets, + &scrate, + &unchecked_extrinsic, + &system_pallet.path, + ); + let outer_config = expand::expand_outer_config(&name, &pallets, &scrate); + let inherent = + expand::expand_outer_inherent(&name, &block, &unchecked_extrinsic, &pallets, &scrate); + let validate_unsigned = expand::expand_outer_validate_unsigned(&name, &pallets, &scrate); + let integrity_test = decl_integrity_test(&scrate); + let static_assertions = decl_static_assertions(&name, &pallets, &scrate); + + let res = quote!( + #scrate_decl + + // Prevent UncheckedExtrinsic to print unused warning. + const _: () = { + #[allow(unused)] + type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + }; + + #[derive( + Clone, Copy, PartialEq, Eq, #scrate::sp_runtime::RuntimeDebug, + #scrate::__private::scale_info::TypeInfo + )] + pub struct #name; + impl #scrate::sp_runtime::traits::GetRuntimeBlockType for #name { + type RuntimeBlock = #block; + } + + // Each runtime must expose the `runtime_metadata()` to fetch the runtime API metadata. + // The function is implemented by calling `impl_runtime_apis!`. + // + // However, the `runtime` may be used without calling `impl_runtime_apis!`. + // Rely on the `Deref` trait to differentiate between a runtime that implements + // APIs (by macro impl_runtime_apis!) and a runtime that is simply created (by macro runtime). + // + // Both `InternalConstructRuntime` and `InternalImplRuntimeApis` expose a `runtime_metadata()` function. + // `InternalConstructRuntime` is implemented by the `runtime` for Runtime references (`& Runtime`), + // while `InternalImplRuntimeApis` is implemented by the `impl_runtime_apis!` for Runtime (`Runtime`). + // + // Therefore, the `Deref` trait will resolve the `runtime_metadata` from `impl_runtime_apis!` + // when both macros are called; and will resolve an empty `runtime_metadata` when only the `runtime` + // is used. + + #[doc(hidden)] + trait InternalConstructRuntime { + #[inline(always)] + fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + Default::default() + } + } + #[doc(hidden)] + impl InternalConstructRuntime for &#name {} + + #outer_event + + #outer_error + + #outer_origin + + #all_pallets + + #pallet_to_index + + #dispatch + + #task + + #metadata + + #outer_config + + #inherent + + #validate_unsigned + + #freeze_reason + + #hold_reason + + #lock_id + + #slash_reason + + #integrity_test + + #static_assertions + ); - Ok(res) + Ok(res) } diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs index aaae579eb..589acff6c 100644 --- a/support/procedural-fork/src/runtime/mod.rs +++ b/support/procedural-fork/src/runtime/mod.rs @@ -210,27 +210,27 @@ mod expand; mod parse; mod keyword { - syn::custom_keyword!(legacy_ordering); + syn::custom_keyword!(legacy_ordering); } pub fn runtime(attr: TokenStream, tokens: TokenStream) -> TokenStream { - let mut legacy_ordering = false; - if !attr.is_empty() { - if let Ok(_) = syn::parse::(attr.clone()) { - legacy_ordering = true; - } else { - let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ + let mut legacy_ordering = false; + if !attr.is_empty() { + if let Ok(_) = syn::parse::(attr.clone()) { + legacy_ordering = true; + } else { + let msg = "Invalid runtime macro call: unexpected attribute. Macro call must be \ bare, such as `#[frame_support::runtime]` or `#[runtime]`, or must specify the \ `legacy_ordering` attribute, such as `#[frame_support::runtime(legacy_ordering)]` or \ #[runtime(legacy_ordering)]."; - let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into() - } - } + let span = proc_macro2::TokenStream::from(attr).span(); + return syn::Error::new(span, msg).to_compile_error().into(); + } + } - let item = syn::parse_macro_input!(tokens as syn::ItemMod); - match parse::Def::try_from(item) { - Ok(def) => expand::expand(def, legacy_ordering).into(), - Err(e) => e.to_compile_error().into(), - } + let item = syn::parse_macro_input!(tokens as syn::ItemMod); + match parse::Def::try_from(item) { + Ok(def) => expand::expand(def, legacy_ordering).into(), + Err(e) => e.to_compile_error().into(), + } } diff --git a/support/procedural-fork/src/runtime/parse/helper.rs b/support/procedural-fork/src/runtime/parse/helper.rs index f05395f9b..17e362410 100644 --- a/support/procedural-fork/src/runtime/parse/helper.rs +++ b/support/procedural-fork/src/runtime/parse/helper.rs @@ -19,19 +19,26 @@ use crate::pallet::parse::helper::MutItemAttrs; use quote::ToTokens; pub(crate) fn take_first_item_runtime_attr( - item: &mut impl MutItemAttrs, + item: &mut impl MutItemAttrs, ) -> syn::Result> where - Attr: syn::parse::Parse, + Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + let attrs = if let Some(attrs) = item.mut_item_attrs() { + attrs + } else { + return Ok(None); + }; - if let Some(index) = attrs.iter().position(|attr| { - attr.path().segments.first().map_or(false, |segment| segment.ident == "runtime") - }) { - let runtime_attr = attrs.remove(index); - Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) - } else { - Ok(None) - } + if let Some(index) = attrs.iter().position(|attr| { + attr.path() + .segments + .first() + .map_or(false, |segment| segment.ident == "runtime") + }) { + let runtime_attr = attrs.remove(index); + Ok(Some(syn::parse2(runtime_attr.into_token_stream())?)) + } else { + Ok(None) + } } diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs index 893cb4726..79cf894e8 100644 --- a/support/procedural-fork/src/runtime/parse/mod.rs +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -32,235 +32,244 @@ use frame_support_procedural_tools::syn_ext as ext; use runtime_types::RuntimeType; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(runtime); - custom_keyword!(derive); - custom_keyword!(pallet_index); - custom_keyword!(disable_call); - custom_keyword!(disable_unsigned); + custom_keyword!(runtime); + custom_keyword!(derive); + custom_keyword!(pallet_index); + custom_keyword!(disable_call); + custom_keyword!(disable_unsigned); } enum RuntimeAttr { - Runtime(proc_macro2::Span), - Derive(proc_macro2::Span, Vec), - PalletIndex(proc_macro2::Span, u8), - DisableCall(proc_macro2::Span), - DisableUnsigned(proc_macro2::Span), + Runtime(proc_macro2::Span), + Derive(proc_macro2::Span, Vec), + PalletIndex(proc_macro2::Span, u8), + DisableCall(proc_macro2::Span), + DisableUnsigned(proc_macro2::Span), } impl RuntimeAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Runtime(span) => *span, - Self::Derive(span, _) => *span, - Self::PalletIndex(span, _) => *span, - Self::DisableCall(span) => *span, - Self::DisableUnsigned(span) => *span, - } - } + fn span(&self) -> proc_macro2::Span { + match self { + Self::Runtime(span) => *span, + Self::Derive(span, _) => *span, + Self::PalletIndex(span, _) => *span, + Self::DisableCall(span) => *span, + Self::DisableUnsigned(span) => *span, + } + } } impl syn::parse::Parse for RuntimeAttr { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - input.parse::()?; - let content; - syn::bracketed!(content in input); - content.parse::()?; - content.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + input.parse::()?; + let content; + syn::bracketed!(content in input); + content.parse::()?; + content.parse::()?; - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::runtime) { - Ok(RuntimeAttr::Runtime(content.parse::()?.span())) - } else if lookahead.peek(keyword::derive) { - let _ = content.parse::(); - let derive_content; - syn::parenthesized!(derive_content in content); - let runtime_types = - derive_content.parse::>()?; - let runtime_types = runtime_types.inner.into_iter().collect(); - Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) - } else if lookahead.peek(keyword::pallet_index) { - let _ = content.parse::(); - let pallet_index_content; - syn::parenthesized!(pallet_index_content in content); - let pallet_index = pallet_index_content.parse::()?; - if !pallet_index.suffix().is_empty() { - let msg = "Number literal must not have a suffix"; - return Err(syn::Error::new(pallet_index.span(), msg)) - } - Ok(RuntimeAttr::PalletIndex(pallet_index.span(), pallet_index.base10_parse()?)) - } else if lookahead.peek(keyword::disable_call) { - Ok(RuntimeAttr::DisableCall(content.parse::()?.span())) - } else if lookahead.peek(keyword::disable_unsigned) { - Ok(RuntimeAttr::DisableUnsigned(content.parse::()?.span())) - } else { - Err(lookahead.error()) - } - } + let lookahead = content.lookahead1(); + if lookahead.peek(keyword::runtime) { + Ok(RuntimeAttr::Runtime( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::derive) { + let _ = content.parse::(); + let derive_content; + syn::parenthesized!(derive_content in content); + let runtime_types = + derive_content.parse::>()?; + let runtime_types = runtime_types.inner.into_iter().collect(); + Ok(RuntimeAttr::Derive(derive_content.span(), runtime_types)) + } else if lookahead.peek(keyword::pallet_index) { + let _ = content.parse::(); + let pallet_index_content; + syn::parenthesized!(pallet_index_content in content); + let pallet_index = pallet_index_content.parse::()?; + if !pallet_index.suffix().is_empty() { + let msg = "Number literal must not have a suffix"; + return Err(syn::Error::new(pallet_index.span(), msg)); + } + Ok(RuntimeAttr::PalletIndex( + pallet_index.span(), + pallet_index.base10_parse()?, + )) + } else if lookahead.peek(keyword::disable_call) { + Ok(RuntimeAttr::DisableCall( + content.parse::()?.span(), + )) + } else if lookahead.peek(keyword::disable_unsigned) { + Ok(RuntimeAttr::DisableUnsigned( + content.parse::()?.span(), + )) + } else { + Err(lookahead.error()) + } + } } #[derive(Debug, Clone)] pub enum AllPalletsDeclaration { - Implicit(ImplicitAllPalletsDeclaration), - Explicit(ExplicitAllPalletsDeclaration), + Implicit(ImplicitAllPalletsDeclaration), + Explicit(ExplicitAllPalletsDeclaration), } /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug, Clone)] pub struct ImplicitAllPalletsDeclaration { - pub name: Ident, - pub pallet_decls: Vec, - pub pallet_count: usize, + pub name: Ident, + pub pallet_decls: Vec, + pub pallet_count: usize, } /// Declaration of a runtime with all pallet having explicit declaration of parts. #[derive(Debug, Clone)] pub struct ExplicitAllPalletsDeclaration { - pub name: Ident, - pub pallets: Vec, + pub name: Ident, + pub pallets: Vec, } pub struct Def { - pub input: TokenStream2, - pub item: syn::ItemMod, - pub runtime_struct: runtime_struct::RuntimeStructDef, - pub pallets: AllPalletsDeclaration, - pub runtime_types: Vec, + pub input: TokenStream2, + pub item: syn::ItemMod, + pub runtime_struct: runtime_struct::RuntimeStructDef, + pub pallets: AllPalletsDeclaration, + pub runtime_types: Vec, } impl Def { - pub fn try_from(mut item: syn::ItemMod) -> syn::Result { - let input: TokenStream2 = item.to_token_stream().into(); - let item_span = item.span(); - let items = &mut item - .content - .as_mut() - .ok_or_else(|| { - let msg = "Invalid runtime definition, expected mod to be inlined."; - syn::Error::new(item_span, msg) - })? - .1; + pub fn try_from(mut item: syn::ItemMod) -> syn::Result { + let input: TokenStream2 = item.to_token_stream().into(); + let item_span = item.span(); + let items = &mut item + .content + .as_mut() + .ok_or_else(|| { + let msg = "Invalid runtime definition, expected mod to be inlined."; + syn::Error::new(item_span, msg) + })? + .1; - let mut runtime_struct = None; - let mut runtime_types = None; + let mut runtime_struct = None; + let mut runtime_types = None; - let mut indices = HashMap::new(); - let mut names = HashMap::new(); + let mut indices = HashMap::new(); + let mut names = HashMap::new(); - let mut pallet_decls = vec![]; - let mut pallets = vec![]; + let mut pallet_decls = vec![]; + let mut pallets = vec![]; - for item in items.iter_mut() { - let mut pallet_item = None; - let mut pallet_index = 0; + for item in items.iter_mut() { + let mut pallet_item = None; + let mut pallet_index = 0; - let mut disable_call = false; - let mut disable_unsigned = false; + let mut disable_call = false; + let mut disable_unsigned = false; - while let Some(runtime_attr) = - helper::take_first_item_runtime_attr::(item)? - { - match runtime_attr { - RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { - let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; - runtime_struct = Some(p); - }, - RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { - runtime_types = Some(types); - }, - RuntimeAttr::PalletIndex(span, index) => { - pallet_index = index; - pallet_item = if let syn::Item::Type(item) = item { - Some(item.clone()) - } else { - let msg = "Invalid runtime::pallet_index, expected type definition"; - return Err(syn::Error::new(span, msg)) - }; - }, - RuntimeAttr::DisableCall(_) => disable_call = true, - RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, - attr => { - let msg = "Invalid duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)) - }, - } - } + while let Some(runtime_attr) = + helper::take_first_item_runtime_attr::(item)? + { + match runtime_attr { + RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { + let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; + runtime_struct = Some(p); + } + RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { + runtime_types = Some(types); + } + RuntimeAttr::PalletIndex(span, index) => { + pallet_index = index; + pallet_item = if let syn::Item::Type(item) = item { + Some(item.clone()) + } else { + let msg = "Invalid runtime::pallet_index, expected type definition"; + return Err(syn::Error::new(span, msg)); + }; + } + RuntimeAttr::DisableCall(_) => disable_call = true, + RuntimeAttr::DisableUnsigned(_) => disable_unsigned = true, + attr => { + let msg = "Invalid duplicated attribute"; + return Err(syn::Error::new(attr.span(), msg)); + } + } + } - if let Some(pallet_item) = pallet_item { - match *pallet_item.ty.clone() { - syn::Type::Path(ref path) => { - let pallet_decl = - PalletDeclaration::try_from(item.span(), &pallet_item, path)?; + if let Some(pallet_item) = pallet_item { + match *pallet_item.ty.clone() { + syn::Type::Path(ref path) => { + let pallet_decl = + PalletDeclaration::try_from(item.span(), &pallet_item, path)?; - if let Some(used_pallet) = - names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) - { - let msg = "Two pallets with the same name!"; + if let Some(used_pallet) = + names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) + { + let msg = "Two pallets with the same name!"; - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); - return Err(err) - } + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet_decl.name.span(), &msg)); + return Err(err); + } - pallet_decls.push(pallet_decl); - }, - syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { - let pallet = Pallet::try_from( - item.span(), - &pallet_item, - pallet_index, - disable_call, - disable_unsigned, - &bounds, - )?; + pallet_decls.push(pallet_decl); + } + syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) => { + let pallet = Pallet::try_from( + item.span(), + &pallet_item, + pallet_index, + disable_call, + disable_unsigned, + &bounds, + )?; - if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) - { - let msg = format!( + if let Some(used_pallet) = indices.insert(pallet.index, pallet.name.clone()) + { + let msg = format!( "Pallet indices are conflicting: Both pallets {} and {} are at index {}", used_pallet, pallet.name, pallet.index, ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err) - } + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err); + } - pallets.push(pallet); - }, - _ => continue, - } - } - } + pallets.push(pallet); + } + _ => continue, + } + } + } - let name = item.ident.clone(); - let decl_count = pallet_decls.len(); - let pallets = if decl_count > 0 { - AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { - name, - pallet_decls, - pallet_count: decl_count.saturating_add(pallets.len()), - }) - } else { - AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) - }; + let name = item.ident.clone(); + let decl_count = pallet_decls.len(); + let pallets = if decl_count > 0 { + AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { + name, + pallet_decls, + pallet_count: decl_count.saturating_add(pallets.len()), + }) + } else { + AllPalletsDeclaration::Explicit(ExplicitAllPalletsDeclaration { name, pallets }) + }; - let def = Def { - input, - item, - runtime_struct: runtime_struct.ok_or_else(|| { - syn::Error::new(item_span, + let def = Def { + input, + item, + runtime_struct: runtime_struct.ok_or_else(|| { + syn::Error::new(item_span, "Missing Runtime. Please add a struct inside the module and annotate it with `#[runtime::runtime]`" ) - })?, - pallets, - runtime_types: runtime_types.ok_or_else(|| { - syn::Error::new(item_span, + })?, + pallets, + runtime_types: runtime_types.ok_or_else(|| { + syn::Error::new(item_span, "Missing Runtime Types. Please annotate the runtime struct with `#[runtime::derive]`" ) - })?, - }; + })?, + }; - Ok(def) - } + Ok(def) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index d2f1857fb..039e2631b 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -20,80 +20,88 @@ use quote::ToTokens; use syn::{punctuated::Punctuated, spanned::Spanned, token, Error, Ident, PathArguments}; impl Pallet { - pub fn try_from( - attr_span: proc_macro2::Span, - item: &syn::ItemType, - pallet_index: u8, - disable_call: bool, - disable_unsigned: bool, - bounds: &Punctuated, - ) -> syn::Result { - let name = item.ident.clone(); + pub fn try_from( + attr_span: proc_macro2::Span, + item: &syn::ItemType, + pallet_index: u8, + disable_call: bool, + disable_unsigned: bool, + bounds: &Punctuated, + ) -> syn::Result { + let name = item.ident.clone(); - let mut pallet_path = None; - let mut pallet_parts = vec![]; + let mut pallet_path = None; + let mut pallet_parts = vec![]; - for (index, bound) in bounds.into_iter().enumerate() { - if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { - if index == 0 { - pallet_path = Some(PalletPath { inner: path.clone() }); - } else { - let pallet_part = syn::parse2::(bound.into_token_stream())?; - pallet_parts.push(pallet_part); - } - } else { - return Err(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - )) - }; - } + for (index, bound) in bounds.into_iter().enumerate() { + if let syn::TypeParamBound::Trait(syn::TraitBound { path, .. }) = bound { + if index == 0 { + pallet_path = Some(PalletPath { + inner: path.clone(), + }); + } else { + let pallet_part = syn::parse2::(bound.into_token_stream())?; + pallet_parts.push(pallet_part); + } + } else { + return Err(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + )); + }; + } - let mut path = pallet_path.ok_or(Error::new( - attr_span, - "Invalid pallet declaration, expected a path or a trait object", - ))?; + let mut path = pallet_path.ok_or(Error::new( + attr_span, + "Invalid pallet declaration, expected a path or a trait object", + ))?; - let mut instance = None; - if let Some(segment) = path.inner.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) - { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = - Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); - segment.arguments = PathArguments::None; - } - } - } + let mut instance = None; + if let Some(segment) = path + .inner + .segments + .iter_mut() + .find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = Some(Ident::new( + &arg_path.to_token_stream().to_string(), + arg_path.span(), + )); + segment.arguments = PathArguments::None; + } + } + } - pallet_parts = pallet_parts - .into_iter() - .filter(|part| { - if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { - false - } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = - (disable_unsigned, &part.keyword) - { - false - } else { - true - } - }) - .collect(); + pallet_parts = pallet_parts + .into_iter() + .filter(|part| { + if let (true, &PalletPartKeyword::Call(_)) = (disable_call, &part.keyword) { + false + } else if let (true, &PalletPartKeyword::ValidateUnsigned(_)) = + (disable_unsigned, &part.keyword) + { + false + } else { + true + } + }) + .collect(); - let cfg_pattern = vec![]; + let cfg_pattern = vec![]; - Ok(Pallet { - is_expanded: true, - name, - index: pallet_index, - path, - instance, - cfg_pattern, - pallet_parts, - }) - } + Ok(Pallet { + is_expanded: true, + name, + index: pallet_index, + path, + instance, + cfg_pattern, + pallet_parts, + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/pallet_decl.rs b/support/procedural-fork/src/runtime/parse/pallet_decl.rs index 437a163cf..bb1246606 100644 --- a/support/procedural-fork/src/runtime/parse/pallet_decl.rs +++ b/support/procedural-fork/src/runtime/parse/pallet_decl.rs @@ -21,40 +21,51 @@ use syn::{spanned::Spanned, Attribute, Ident, PathArguments}; /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// The name of the pallet, e.g.`System` in `System: frame_system`. - pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. - pub path: syn::Path, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. - pub instance: Option, + /// The name of the pallet, e.g.`System` in `System: frame_system`. + pub name: Ident, + /// Optional attributes tagged right above a pallet declaration. + pub attrs: Vec, + /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + pub path: syn::Path, + /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + pub instance: Option, } impl PalletDeclaration { - pub fn try_from( - _attr_span: proc_macro2::Span, - item: &syn::ItemType, - path: &syn::TypePath, - ) -> syn::Result { - let name = item.ident.clone(); - - let mut path = path.path.clone(); - - let mut instance = None; - if let Some(segment) = path.segments.iter_mut().find(|seg| !seg.arguments.is_empty()) { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = - Some(Ident::new(&arg_path.to_token_stream().to_string(), arg_path.span())); - segment.arguments = PathArguments::None; - } - } - } - - Ok(Self { name, path, instance, attrs: item.attrs.clone() }) - } + pub fn try_from( + _attr_span: proc_macro2::Span, + item: &syn::ItemType, + path: &syn::TypePath, + ) -> syn::Result { + let name = item.ident.clone(); + + let mut path = path.path.clone(); + + let mut instance = None; + if let Some(segment) = path + .segments + .iter_mut() + .find(|seg| !seg.arguments.is_empty()) + { + if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { + args, .. + }) = segment.arguments.clone() + { + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { + instance = Some(Ident::new( + &arg_path.to_token_stream().to_string(), + arg_path.span(), + )); + segment.arguments = PathArguments::None; + } + } + } + + Ok(Self { + name, + path, + instance, + attrs: item.attrs.clone(), + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_struct.rs b/support/procedural-fork/src/runtime/parse/runtime_struct.rs index 8fa746ee8..7ddbdcfeb 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_struct.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_struct.rs @@ -17,19 +17,22 @@ use syn::spanned::Spanned; pub struct RuntimeStructDef { - pub ident: syn::Ident, - pub attr_span: proc_macro2::Span, + pub ident: syn::Ident, + pub attr_span: proc_macro2::Span, } impl RuntimeStructDef { - pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { - let item = if let syn::Item::Struct(item) = item { - item - } else { - let msg = "Invalid runtime::runtime, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)) - }; + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { + let item = if let syn::Item::Struct(item) = item { + item + } else { + let msg = "Invalid runtime::runtime, expected struct definition"; + return Err(syn::Error::new(item.span(), msg)); + }; - Ok(Self { ident: item.ident.clone(), attr_span }) - } + Ok(Self { + ident: item.ident.clone(), + attr_span, + }) + } } diff --git a/support/procedural-fork/src/runtime/parse/runtime_types.rs b/support/procedural-fork/src/runtime/parse/runtime_types.rs index a4480e2a1..4d8c8358c 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_types.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_types.rs @@ -16,61 +16,61 @@ // limitations under the License. use syn::{ - parse::{Parse, ParseStream}, - Result, + parse::{Parse, ParseStream}, + Result, }; mod keyword { - use syn::custom_keyword; + use syn::custom_keyword; - custom_keyword!(RuntimeCall); - custom_keyword!(RuntimeEvent); - custom_keyword!(RuntimeError); - custom_keyword!(RuntimeOrigin); - custom_keyword!(RuntimeFreezeReason); - custom_keyword!(RuntimeHoldReason); - custom_keyword!(RuntimeSlashReason); - custom_keyword!(RuntimeLockId); - custom_keyword!(RuntimeTask); + custom_keyword!(RuntimeCall); + custom_keyword!(RuntimeEvent); + custom_keyword!(RuntimeError); + custom_keyword!(RuntimeOrigin); + custom_keyword!(RuntimeFreezeReason); + custom_keyword!(RuntimeHoldReason); + custom_keyword!(RuntimeSlashReason); + custom_keyword!(RuntimeLockId); + custom_keyword!(RuntimeTask); } #[derive(Debug, Clone, PartialEq)] pub enum RuntimeType { - RuntimeCall(keyword::RuntimeCall), - RuntimeEvent(keyword::RuntimeEvent), - RuntimeError(keyword::RuntimeError), - RuntimeOrigin(keyword::RuntimeOrigin), - RuntimeFreezeReason(keyword::RuntimeFreezeReason), - RuntimeHoldReason(keyword::RuntimeHoldReason), - RuntimeSlashReason(keyword::RuntimeSlashReason), - RuntimeLockId(keyword::RuntimeLockId), - RuntimeTask(keyword::RuntimeTask), + RuntimeCall(keyword::RuntimeCall), + RuntimeEvent(keyword::RuntimeEvent), + RuntimeError(keyword::RuntimeError), + RuntimeOrigin(keyword::RuntimeOrigin), + RuntimeFreezeReason(keyword::RuntimeFreezeReason), + RuntimeHoldReason(keyword::RuntimeHoldReason), + RuntimeSlashReason(keyword::RuntimeSlashReason), + RuntimeLockId(keyword::RuntimeLockId), + RuntimeTask(keyword::RuntimeTask), } impl Parse for RuntimeType { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); - if lookahead.peek(keyword::RuntimeCall) { - Ok(Self::RuntimeCall(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeEvent) { - Ok(Self::RuntimeEvent(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeError) { - Ok(Self::RuntimeError(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeOrigin) { - Ok(Self::RuntimeOrigin(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeFreezeReason) { - Ok(Self::RuntimeFreezeReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeHoldReason) { - Ok(Self::RuntimeHoldReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeSlashReason) { - Ok(Self::RuntimeSlashReason(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeLockId) { - Ok(Self::RuntimeLockId(input.parse()?)) - } else if lookahead.peek(keyword::RuntimeTask) { - Ok(Self::RuntimeTask(input.parse()?)) - } else { - Err(lookahead.error()) - } - } + if lookahead.peek(keyword::RuntimeCall) { + Ok(Self::RuntimeCall(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeEvent) { + Ok(Self::RuntimeEvent(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeError) { + Ok(Self::RuntimeError(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeOrigin) { + Ok(Self::RuntimeOrigin(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeFreezeReason) { + Ok(Self::RuntimeFreezeReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeHoldReason) { + Ok(Self::RuntimeHoldReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeSlashReason) { + Ok(Self::RuntimeSlashReason(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeLockId) { + Ok(Self::RuntimeLockId(input.parse()?)) + } else if lookahead.peek(keyword::RuntimeTask) { + Ok(Self::RuntimeTask(input.parse()?)) + } else { + Err(lookahead.error()) + } + } } diff --git a/support/procedural-fork/src/storage_alias.rs b/support/procedural-fork/src/storage_alias.rs index 06f62768f..7099239f9 100644 --- a/support/procedural-fork/src/storage_alias.rs +++ b/support/procedural-fork/src/storage_alias.rs @@ -22,655 +22,688 @@ use frame_support_procedural_tools::generate_access_from_frame_or_crate; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens}; use syn::{ - parenthesized, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned, - token, - visit::Visit, - Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, + parenthesized, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned, + token, + visit::Visit, + Attribute, Error, Ident, Result, Token, Type, TypeParam, Visibility, WhereClause, }; /// Extension trait for [`Type`]. trait TypeExt { - fn get_ident(&self) -> Option<&Ident>; - fn contains_ident(&self, ident: &Ident) -> bool; + fn get_ident(&self) -> Option<&Ident>; + fn contains_ident(&self, ident: &Ident) -> bool; } impl TypeExt for Type { - fn get_ident(&self) -> Option<&Ident> { - match self { - Type::Path(p) => match &p.qself { - Some(qself) => qself.ty.get_ident(), - None => p.path.get_ident(), - }, - _ => None, - } - } - - fn contains_ident(&self, ident: &Ident) -> bool { - struct ContainsIdent<'a> { - ident: &'a Ident, - found: bool, - } - impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { - fn visit_ident(&mut self, i: &'ast Ident) { - if i == self.ident { - self.found = true; - } - } - } - - let mut visitor = ContainsIdent { ident, found: false }; - syn::visit::visit_type(&mut visitor, self); - visitor.found - } + fn get_ident(&self) -> Option<&Ident> { + match self { + Type::Path(p) => match &p.qself { + Some(qself) => qself.ty.get_ident(), + None => p.path.get_ident(), + }, + _ => None, + } + } + + fn contains_ident(&self, ident: &Ident) -> bool { + struct ContainsIdent<'a> { + ident: &'a Ident, + found: bool, + } + impl<'a, 'ast> Visit<'ast> for ContainsIdent<'a> { + fn visit_ident(&mut self, i: &'ast Ident) { + if i == self.ident { + self.found = true; + } + } + } + + let mut visitor = ContainsIdent { + ident, + found: false, + }; + syn::visit::visit_type(&mut visitor, self); + visitor.found + } } /// Represents generics which only support [`TypeParam`] separated by commas. struct SimpleGenerics { - lt_token: Token![<], - params: Punctuated, - gt_token: Token![>], + lt_token: Token![<], + params: Punctuated, + gt_token: Token![>], } impl SimpleGenerics { - /// Returns the generics for types declarations etc. - fn type_generics(&self) -> impl Iterator { - self.params.iter().map(|p| &p.ident) - } - - /// Returns the generics for the `impl` block. - fn impl_generics(&self) -> impl Iterator { - self.params.iter() - } + /// Returns the generics for types declarations etc. + fn type_generics(&self) -> impl Iterator { + self.params.iter().map(|p| &p.ident) + } + + /// Returns the generics for the `impl` block. + fn impl_generics(&self) -> impl Iterator { + self.params.iter() + } } impl Parse for SimpleGenerics { - fn parse(input: ParseStream<'_>) -> Result { - Ok(Self { - lt_token: input.parse()?, - params: Punctuated::parse_separated_nonempty(input)?, - gt_token: input.parse()?, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + Ok(Self { + lt_token: input.parse()?, + params: Punctuated::parse_separated_nonempty(input)?, + gt_token: input.parse()?, + }) + } } impl ToTokens for SimpleGenerics { - fn to_tokens(&self, tokens: &mut TokenStream) { - self.lt_token.to_tokens(tokens); - self.params.to_tokens(tokens); - self.gt_token.to_tokens(tokens); - } + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lt_token.to_tokens(tokens); + self.params.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } } mod storage_types { - syn::custom_keyword!(StorageValue); - syn::custom_keyword!(StorageMap); - syn::custom_keyword!(CountedStorageMap); - syn::custom_keyword!(StorageDoubleMap); - syn::custom_keyword!(StorageNMap); + syn::custom_keyword!(StorageValue); + syn::custom_keyword!(StorageMap); + syn::custom_keyword!(CountedStorageMap); + syn::custom_keyword!(StorageDoubleMap); + syn::custom_keyword!(StorageNMap); } /// The types of prefixes the storage alias macro supports. mod prefix_types { - // Use the verbatim/unmodified input name as the prefix. - syn::custom_keyword!(verbatim); - // The input type is a pallet and its pallet name should be used as the prefix. - syn::custom_keyword!(pallet_name); - // The input type implements `Get<'static str>` and this `str` should be used as the prefix. - syn::custom_keyword!(dynamic); + // Use the verbatim/unmodified input name as the prefix. + syn::custom_keyword!(verbatim); + // The input type is a pallet and its pallet name should be used as the prefix. + syn::custom_keyword!(pallet_name); + // The input type implements `Get<'static str>` and this `str` should be used as the prefix. + syn::custom_keyword!(dynamic); } /// The supported storage types enum StorageType { - Value { - _kw: storage_types::StorageValue, - _lt_token: Token![<], - prefix: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - Map { - _kw: storage_types::StorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - CountedMap { - _kw: storage_types::CountedStorageMap, - _lt_token: Token![<], - prefix: Type, - _hasher_comma: Token![,], - hasher_ty: Type, - _key_comma: Token![,], - key_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - DoubleMap { - _kw: storage_types::StorageDoubleMap, - _lt_token: Token![<], - prefix: Type, - _hasher1_comma: Token![,], - hasher1_ty: Type, - _key1_comma: Token![,], - key1_ty: Type, - _hasher2_comma: Token![,], - hasher2_ty: Type, - _key2_comma: Token![,], - key2_ty: Type, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, - NMap { - _kw: storage_types::StorageNMap, - _lt_token: Token![<], - prefix: Type, - _paren_comma: Token![,], - _paren_token: token::Paren, - key_types: Punctuated, - _value_comma: Token![,], - value_ty: Type, - query_type: Option<(Token![,], Type)>, - _trailing_comma: Option, - _gt_token: Token![>], - }, + Value { + _kw: storage_types::StorageValue, + _lt_token: Token![<], + prefix: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + Map { + _kw: storage_types::StorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + CountedMap { + _kw: storage_types::CountedStorageMap, + _lt_token: Token![<], + prefix: Type, + _hasher_comma: Token![,], + hasher_ty: Type, + _key_comma: Token![,], + key_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + DoubleMap { + _kw: storage_types::StorageDoubleMap, + _lt_token: Token![<], + prefix: Type, + _hasher1_comma: Token![,], + hasher1_ty: Type, + _key1_comma: Token![,], + key1_ty: Type, + _hasher2_comma: Token![,], + hasher2_ty: Type, + _key2_comma: Token![,], + key2_ty: Type, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, + NMap { + _kw: storage_types::StorageNMap, + _lt_token: Token![<], + prefix: Type, + _paren_comma: Token![,], + _paren_token: token::Paren, + key_types: Punctuated, + _value_comma: Token![,], + value_ty: Type, + query_type: Option<(Token![,], Type)>, + _trailing_comma: Option, + _gt_token: Token![>], + }, } impl StorageType { - /// Generate the actual type declaration. - fn generate_type_declaration( - &self, - crate_: &syn::Path, - storage_instance: &StorageInstance, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - visibility: &Visibility, - attributes: &[Attribute], - ) -> TokenStream { - let storage_instance_generics = &storage_instance.generics; - let storage_instance = &storage_instance.name; - let attributes = attributes.iter(); - let storage_generics = storage_generics.map(|g| { - let generics = g.type_generics(); - - quote!( < #( #generics ),* > ) - }); - - match self { - Self::Value { value_ty, query_type, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< - #storage_instance #storage_instance_generics, - #value_ty - #query_type - >; - } - }, - Self::CountedMap { value_ty, query_type, hasher_ty, key_ty, .. } | - Self::Map { value_ty, query_type, hasher_ty, key_ty, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let map_type = Ident::new( - match self { - Self::Map { .. } => "StorageMap", - _ => "CountedStorageMap", - }, - Span::call_site(), - ); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< - #storage_instance #storage_instance_generics, - #hasher_ty, - #key_ty, - #value_ty - #query_type - >; - } - }, - Self::DoubleMap { - value_ty, - query_type, - hasher1_ty, - key1_ty, - hasher2_ty, - key2_ty, - .. - } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< - #storage_instance #storage_instance_generics, - #hasher1_ty, - #key1_ty, - #hasher2_ty, - #key2_ty, - #value_ty - #query_type - >; - } - }, - Self::NMap { value_ty, query_type, key_types, .. } => { - let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); - let key_types = key_types.iter(); - - quote! { - #( #attributes )* - #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< - #storage_instance #storage_instance_generics, - ( #( #key_types ),* ), - #value_ty - #query_type - >; - } - }, - } - } - - /// The prefix for this storage type. - fn prefix(&self) -> &Type { - match self { - Self::Value { prefix, .. } | - Self::Map { prefix, .. } | - Self::CountedMap { prefix, .. } | - Self::NMap { prefix, .. } | - Self::DoubleMap { prefix, .. } => prefix, - } - } + /// Generate the actual type declaration. + fn generate_type_declaration( + &self, + crate_: &syn::Path, + storage_instance: &StorageInstance, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + visibility: &Visibility, + attributes: &[Attribute], + ) -> TokenStream { + let storage_instance_generics = &storage_instance.generics; + let storage_instance = &storage_instance.name; + let attributes = attributes.iter(); + let storage_generics = storage_generics.map(|g| { + let generics = g.type_generics(); + + quote!( < #( #generics ),* > ) + }); + + match self { + Self::Value { + value_ty, + query_type, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageValue< + #storage_instance #storage_instance_generics, + #value_ty + #query_type + >; + } + } + Self::CountedMap { + value_ty, + query_type, + hasher_ty, + key_ty, + .. + } + | Self::Map { + value_ty, + query_type, + hasher_ty, + key_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let map_type = Ident::new( + match self { + Self::Map { .. } => "StorageMap", + _ => "CountedStorageMap", + }, + Span::call_site(), + ); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::#map_type< + #storage_instance #storage_instance_generics, + #hasher_ty, + #key_ty, + #value_ty + #query_type + >; + } + } + Self::DoubleMap { + value_ty, + query_type, + hasher1_ty, + key1_ty, + hasher2_ty, + key2_ty, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageDoubleMap< + #storage_instance #storage_instance_generics, + #hasher1_ty, + #key1_ty, + #hasher2_ty, + #key2_ty, + #value_ty + #query_type + >; + } + } + Self::NMap { + value_ty, + query_type, + key_types, + .. + } => { + let query_type = query_type.as_ref().map(|(c, t)| quote!(#c #t)); + let key_types = key_types.iter(); + + quote! { + #( #attributes )* + #visibility type #storage_name #storage_generics = #crate_::storage::types::StorageNMap< + #storage_instance #storage_instance_generics, + ( #( #key_types ),* ), + #value_ty + #query_type + >; + } + } + } + } + + /// The prefix for this storage type. + fn prefix(&self) -> &Type { + match self { + Self::Value { prefix, .. } + | Self::Map { prefix, .. } + | Self::CountedMap { prefix, .. } + | Self::NMap { prefix, .. } + | Self::DoubleMap { prefix, .. } => prefix, + } + } } impl Parse for StorageType { - fn parse(input: ParseStream<'_>) -> Result { - let lookahead = input.lookahead1(); - - let parse_query_type = |input: ParseStream<'_>| -> Result> { - if input.peek(Token![,]) && !input.peek2(Token![>]) { - Ok(Some((input.parse()?, input.parse()?))) - } else { - Ok(None) - } - }; - - if lookahead.peek(storage_types::StorageValue) { - Ok(Self::Value { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageMap) { - Ok(Self::Map { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::CountedStorageMap) { - Ok(Self::CountedMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher_comma: input.parse()?, - hasher_ty: input.parse()?, - _key_comma: input.parse()?, - key_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageDoubleMap) { - Ok(Self::DoubleMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _hasher1_comma: input.parse()?, - hasher1_ty: input.parse()?, - _key1_comma: input.parse()?, - key1_ty: input.parse()?, - _hasher2_comma: input.parse()?, - hasher2_ty: input.parse()?, - _key2_comma: input.parse()?, - key2_ty: input.parse()?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else if lookahead.peek(storage_types::StorageNMap) { - let content; - Ok(Self::NMap { - _kw: input.parse()?, - _lt_token: input.parse()?, - prefix: input.parse()?, - _paren_comma: input.parse()?, - _paren_token: parenthesized!(content in input), - key_types: Punctuated::parse_terminated(&content)?, - _value_comma: input.parse()?, - value_ty: input.parse()?, - query_type: parse_query_type(input)?, - _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, - _gt_token: input.parse()?, - }) - } else { - Err(lookahead.error()) - } - } + fn parse(input: ParseStream<'_>) -> Result { + let lookahead = input.lookahead1(); + + let parse_query_type = |input: ParseStream<'_>| -> Result> { + if input.peek(Token![,]) && !input.peek2(Token![>]) { + Ok(Some((input.parse()?, input.parse()?))) + } else { + Ok(None) + } + }; + + if lookahead.peek(storage_types::StorageValue) { + Ok(Self::Value { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageMap) { + Ok(Self::Map { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::CountedStorageMap) { + Ok(Self::CountedMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher_comma: input.parse()?, + hasher_ty: input.parse()?, + _key_comma: input.parse()?, + key_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageDoubleMap) { + Ok(Self::DoubleMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _hasher1_comma: input.parse()?, + hasher1_ty: input.parse()?, + _key1_comma: input.parse()?, + key1_ty: input.parse()?, + _hasher2_comma: input.parse()?, + hasher2_ty: input.parse()?, + _key2_comma: input.parse()?, + key2_ty: input.parse()?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else if lookahead.peek(storage_types::StorageNMap) { + let content; + Ok(Self::NMap { + _kw: input.parse()?, + _lt_token: input.parse()?, + prefix: input.parse()?, + _paren_comma: input.parse()?, + _paren_token: parenthesized!(content in input), + key_types: Punctuated::parse_terminated(&content)?, + _value_comma: input.parse()?, + value_ty: input.parse()?, + query_type: parse_query_type(input)?, + _trailing_comma: input.peek(Token![,]).then(|| input.parse()).transpose()?, + _gt_token: input.parse()?, + }) + } else { + Err(lookahead.error()) + } + } } /// The input expected by this macro. struct Input { - attributes: Vec, - visibility: Visibility, - _type: Token![type], - storage_name: Ident, - storage_generics: Option, - where_clause: Option, - _equal: Token![=], - storage_type: StorageType, - _semicolon: Token![;], + attributes: Vec, + visibility: Visibility, + _type: Token![type], + storage_name: Ident, + storage_generics: Option, + where_clause: Option, + _equal: Token![=], + storage_type: StorageType, + _semicolon: Token![;], } impl Parse for Input { - fn parse(input: ParseStream<'_>) -> Result { - let attributes = input.call(Attribute::parse_outer)?; - let visibility = input.parse()?; - let _type = input.parse()?; - let storage_name = input.parse()?; - - let lookahead = input.lookahead1(); - let storage_generics = if lookahead.peek(Token![<]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()) - }; - - let lookahead = input.lookahead1(); - let where_clause = if lookahead.peek(Token![where]) { - Some(input.parse()?) - } else if lookahead.peek(Token![=]) { - None - } else { - return Err(lookahead.error()) - }; - - let _equal = input.parse()?; - - let storage_type = input.parse()?; - - let _semicolon = input.parse()?; - - Ok(Self { - attributes, - visibility, - _type, - storage_name, - storage_generics, - _equal, - storage_type, - where_clause, - _semicolon, - }) - } + fn parse(input: ParseStream<'_>) -> Result { + let attributes = input.call(Attribute::parse_outer)?; + let visibility = input.parse()?; + let _type = input.parse()?; + let storage_name = input.parse()?; + + let lookahead = input.lookahead1(); + let storage_generics = if lookahead.peek(Token![<]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()); + }; + + let lookahead = input.lookahead1(); + let where_clause = if lookahead.peek(Token![where]) { + Some(input.parse()?) + } else if lookahead.peek(Token![=]) { + None + } else { + return Err(lookahead.error()); + }; + + let _equal = input.parse()?; + + let storage_type = input.parse()?; + + let _semicolon = input.parse()?; + + Ok(Self { + attributes, + visibility, + _type, + storage_name, + storage_generics, + _equal, + storage_type, + where_clause, + _semicolon, + }) + } } /// Defines which type of prefix the storage alias is using. #[derive(Clone, Copy)] enum PrefixType { - /// An appropriate prefix will be determined automatically. - /// - /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. - /// Otherwise use the verbatim passed name as prefix. - Compatibility, - /// The provided ident/name will be used as the prefix. - Verbatim, - /// The provided type will be used to determine the prefix. This type must - /// implement `PalletInfoAccess` which specifies the proper name. This - /// name is then used as the prefix. - PalletName, - /// Uses the provided type implementing `Get<'static str>` to determine the prefix. - Dynamic, + /// An appropriate prefix will be determined automatically. + /// + /// If generics are passed, this is assumed to be a pallet and the pallet name should be used. + /// Otherwise use the verbatim passed name as prefix. + Compatibility, + /// The provided ident/name will be used as the prefix. + Verbatim, + /// The provided type will be used to determine the prefix. This type must + /// implement `PalletInfoAccess` which specifies the proper name. This + /// name is then used as the prefix. + PalletName, + /// Uses the provided type implementing `Get<'static str>` to determine the prefix. + Dynamic, } /// Implementation of the `storage_alias` attribute macro. pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> Result { - let input = syn::parse2::(input)?; - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - - let prefix_type = if attributes.is_empty() { - PrefixType::Compatibility - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Verbatim - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::PalletName - } else if syn::parse2::(attributes.clone()).is_ok() { - PrefixType::Dynamic - } else { - return Err(Error::new(attributes.span(), "Unknown attributes")) - }; - - let storage_instance = generate_storage_instance( - &crate_, - &input.storage_name, - input.storage_generics.as_ref(), - input.where_clause.as_ref(), - input.storage_type.prefix(), - &input.visibility, - matches!(input.storage_type, StorageType::CountedMap { .. }), - prefix_type, - )?; - - let definition = input.storage_type.generate_type_declaration( - &crate_, - &storage_instance, - &input.storage_name, - input.storage_generics.as_ref(), - &input.visibility, - &input.attributes, - ); - - let storage_instance_code = storage_instance.code; - - Ok(quote! { - #storage_instance_code - - #definition - }) + let input = syn::parse2::(input)?; + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + + let prefix_type = if attributes.is_empty() { + PrefixType::Compatibility + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Verbatim + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::PalletName + } else if syn::parse2::(attributes.clone()).is_ok() { + PrefixType::Dynamic + } else { + return Err(Error::new(attributes.span(), "Unknown attributes")); + }; + + let storage_instance = generate_storage_instance( + &crate_, + &input.storage_name, + input.storage_generics.as_ref(), + input.where_clause.as_ref(), + input.storage_type.prefix(), + &input.visibility, + matches!(input.storage_type, StorageType::CountedMap { .. }), + prefix_type, + )?; + + let definition = input.storage_type.generate_type_declaration( + &crate_, + &storage_instance, + &input.storage_name, + input.storage_generics.as_ref(), + &input.visibility, + &input.attributes, + ); + + let storage_instance_code = storage_instance.code; + + Ok(quote! { + #storage_instance_code + + #definition + }) } /// The storage instance to use for the storage alias. struct StorageInstance { - name: Ident, - generics: TokenStream, - code: TokenStream, + name: Ident, + generics: TokenStream, + code: TokenStream, } /// Generate the [`StorageInstance`] for the storage alias. fn generate_storage_instance( - crate_: &syn::Path, - storage_name: &Ident, - storage_generics: Option<&SimpleGenerics>, - storage_where_clause: Option<&WhereClause>, - prefix: &Type, - visibility: &Visibility, - is_counted_map: bool, - prefix_type: PrefixType, + crate_: &syn::Path, + storage_name: &Ident, + storage_generics: Option<&SimpleGenerics>, + storage_where_clause: Option<&WhereClause>, + prefix: &Type, + visibility: &Visibility, + is_counted_map: bool, + prefix_type: PrefixType, ) -> Result { - if let Type::Infer(_) = prefix { - return Err(Error::new(prefix.span(), "`_` is not allowed as prefix by `storage_alias`.")) - } - - let impl_generics_used_by_prefix = storage_generics - .as_ref() - .map(|g| { - g.impl_generics() - .filter(|g| prefix.contains_ident(&g.ident)) - .collect::>() - }) - .unwrap_or_default(); - - let (pallet_prefix, impl_generics, type_generics) = match prefix_type { - PrefixType::Compatibility => - if !impl_generics_used_by_prefix.is_empty() { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - < #prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - } else if let Some(prefix) = prefix.get_ident() { - let prefix_str = prefix.to_string(); - - (quote!(#prefix_str), quote!(), quote!()) - } else { - return Err(Error::new_spanned( - prefix, - "If there are no generics, the prefix is only allowed to be an identifier.", - )) - }, - PrefixType::Verbatim => { - let prefix_str = match prefix.get_ident() { - Some(p) => p.to_string(), - None => - return Err(Error::new_spanned( - prefix, - "Prefix type `verbatim` requires that the prefix is an ident.", - )), - }; - - (quote!(#prefix_str), quote!(), quote!()) - }, - PrefixType::PalletName => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::PalletInfoAccess>::name() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - }, - PrefixType::Dynamic => { - let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); - let impl_generics = impl_generics_used_by_prefix.iter(); - - ( - quote! { - <#prefix as #crate_::traits::Get<_>>::get() - }, - quote!( #( #impl_generics ),* ), - quote!( #( #type_generics ),* ), - ) - }, - }; - - let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); - - let name_str = format!("{}_Storage_Instance", storage_name); - let name = Ident::new(&name_str, Span::call_site()); - let storage_name_str = storage_name.to_string(); - - let counter_code = is_counted_map.then(|| { - let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); - let counter_storage_name_str = counter_prefix(&storage_name_str); - let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); - - quote! { - #visibility struct #counter_name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #counter_name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #counter_storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance - for #name< #type_generics > #where_clause - { - type CounterPrefix = #counter_name < #type_generics >; - } - } - }); - - let storage_prefix_hash = helper::two128_str(&storage_name_str); - - // Implement `StorageInstance` trait. - let code = quote! { - #[allow(non_camel_case_types)] - #visibility struct #name< #impl_generics >( - ::core::marker::PhantomData<(#type_generics)> - ) #where_clause; - - impl<#impl_generics> #crate_::traits::StorageInstance - for #name< #type_generics > #where_clause - { - fn pallet_prefix() -> &'static str { - #pallet_prefix - } - - const STORAGE_PREFIX: &'static str = #storage_name_str; - fn storage_prefix_hash() -> [u8; 16] { - #storage_prefix_hash - } - } - - #counter_code - }; - - Ok(StorageInstance { name, code, generics: quote!( < #type_generics > ) }) + if let Type::Infer(_) = prefix { + return Err(Error::new( + prefix.span(), + "`_` is not allowed as prefix by `storage_alias`.", + )); + } + + let impl_generics_used_by_prefix = storage_generics + .as_ref() + .map(|g| { + g.impl_generics() + .filter(|g| prefix.contains_ident(&g.ident)) + .collect::>() + }) + .unwrap_or_default(); + + let (pallet_prefix, impl_generics, type_generics) = match prefix_type { + PrefixType::Compatibility => { + if !impl_generics_used_by_prefix.is_empty() { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + < #prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } else if let Some(prefix) = prefix.get_ident() { + let prefix_str = prefix.to_string(); + + (quote!(#prefix_str), quote!(), quote!()) + } else { + return Err(Error::new_spanned( + prefix, + "If there are no generics, the prefix is only allowed to be an identifier.", + )); + } + } + PrefixType::Verbatim => { + let prefix_str = match prefix.get_ident() { + Some(p) => p.to_string(), + None => { + return Err(Error::new_spanned( + prefix, + "Prefix type `verbatim` requires that the prefix is an ident.", + )) + } + }; + + (quote!(#prefix_str), quote!(), quote!()) + } + PrefixType::PalletName => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::PalletInfoAccess>::name() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } + PrefixType::Dynamic => { + let type_generics = impl_generics_used_by_prefix.iter().map(|g| &g.ident); + let impl_generics = impl_generics_used_by_prefix.iter(); + + ( + quote! { + <#prefix as #crate_::traits::Get<_>>::get() + }, + quote!( #( #impl_generics ),* ), + quote!( #( #type_generics ),* ), + ) + } + }; + + let where_clause = storage_where_clause.map(|w| quote!(#w)).unwrap_or_default(); + + let name_str = format!("{}_Storage_Instance", storage_name); + let name = Ident::new(&name_str, Span::call_site()); + let storage_name_str = storage_name.to_string(); + + let counter_code = is_counted_map.then(|| { + let counter_name = Ident::new(&counter_prefix(&name_str), Span::call_site()); + let counter_storage_name_str = counter_prefix(&storage_name_str); + let storage_prefix_hash = helper::two128_str(&counter_storage_name_str); + + quote! { + #visibility struct #counter_name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #counter_name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #counter_storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + impl<#impl_generics> #crate_::storage::types::CountedStorageMapInstance + for #name< #type_generics > #where_clause + { + type CounterPrefix = #counter_name < #type_generics >; + } + } + }); + + let storage_prefix_hash = helper::two128_str(&storage_name_str); + + // Implement `StorageInstance` trait. + let code = quote! { + #[allow(non_camel_case_types)] + #visibility struct #name< #impl_generics >( + ::core::marker::PhantomData<(#type_generics)> + ) #where_clause; + + impl<#impl_generics> #crate_::traits::StorageInstance + for #name< #type_generics > #where_clause + { + fn pallet_prefix() -> &'static str { + #pallet_prefix + } + + const STORAGE_PREFIX: &'static str = #storage_name_str; + fn storage_prefix_hash() -> [u8; 16] { + #storage_prefix_hash + } + } + + #counter_code + }; + + Ok(StorageInstance { + name, + code, + generics: quote!( < #type_generics > ), + }) } diff --git a/support/procedural-fork/src/transactional.rs b/support/procedural-fork/src/transactional.rs index e9d4f84b7..73a841d9b 100644 --- a/support/procedural-fork/src/transactional.rs +++ b/support/procedural-fork/src/transactional.rs @@ -21,40 +21,50 @@ use quote::quote; use syn::{ItemFn, Result}; pub fn transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - use #crate_::storage::{with_transaction, TransactionOutcome}; - with_transaction(|| { - let r = (|| { #block })(); - if r.is_ok() { - TransactionOutcome::Commit(r) - } else { - TransactionOutcome::Rollback(r) - } - }) - } - }; - - Ok(output.into()) + let ItemFn { + attrs, + vis, + sig, + block, + } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + use #crate_::storage::{with_transaction, TransactionOutcome}; + with_transaction(|| { + let r = (|| { #block })(); + if r.is_ok() { + TransactionOutcome::Commit(r) + } else { + TransactionOutcome::Rollback(r) + } + }) + } + }; + + Ok(output.into()) } pub fn require_transactional(_attr: TokenStream, input: TokenStream) -> Result { - let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; - - let crate_ = generate_access_from_frame_or_crate("frame-support")?; - let output = quote! { - #(#attrs)* - #vis #sig { - if !#crate_::storage::transactional::is_transactional() { - return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); - } - #block - } - }; - - Ok(output.into()) + let ItemFn { + attrs, + vis, + sig, + block, + } = syn::parse(input)?; + + let crate_ = generate_access_from_frame_or_crate("frame-support")?; + let output = quote! { + #(#attrs)* + #vis #sig { + if !#crate_::storage::transactional::is_transactional() { + return Err(#crate_::sp_runtime::TransactionalError::NoLayer.into()); + } + #block + } + }; + + Ok(output.into()) } diff --git a/support/procedural-fork/src/tt_macro.rs b/support/procedural-fork/src/tt_macro.rs index d37127421..3f280013f 100644 --- a/support/procedural-fork/src/tt_macro.rs +++ b/support/procedural-fork/src/tt_macro.rs @@ -22,29 +22,29 @@ use proc_macro2::{Ident, TokenStream}; use quote::format_ident; struct CreateTtReturnMacroDef { - name: Ident, - args: Vec<(Ident, TokenStream)>, + name: Ident, + args: Vec<(Ident, TokenStream)>, } impl syn::parse::Parse for CreateTtReturnMacroDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let name = input.parse()?; - let _ = input.parse::()?; + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let name = input.parse()?; + let _ = input.parse::()?; - let mut args = Vec::new(); - while !input.is_empty() { - let mut value; - let key: Ident = input.parse()?; - let _ = input.parse::()?; - let _: syn::token::Bracket = syn::bracketed!(value in input); - let _: syn::token::Brace = syn::braced!(value in value); - let value: TokenStream = value.parse()?; + let mut args = Vec::new(); + while !input.is_empty() { + let mut value; + let key: Ident = input.parse()?; + let _ = input.parse::()?; + let _: syn::token::Bracket = syn::bracketed!(value in input); + let _: syn::token::Brace = syn::braced!(value in value); + let value: TokenStream = value.parse()?; - args.push((key, value)) - } + args.push((key, value)) + } - Ok(Self { name, args }) - } + Ok(Self { name, args }) + } } /// A proc macro that accepts a name and any number of key-value pairs, to be used to create a @@ -74,32 +74,32 @@ impl syn::parse::Parse for CreateTtReturnMacroDef { /// } /// ``` pub fn create_tt_return_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let CreateTtReturnMacroDef { name, args } = - syn::parse_macro_input!(input as CreateTtReturnMacroDef); + let CreateTtReturnMacroDef { name, args } = + syn::parse_macro_input!(input as CreateTtReturnMacroDef); - let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); - let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let unique_name = format_ident!("{}_{}", name, count); + let (keys, values): (Vec<_>, Vec<_>) = args.into_iter().unzip(); + let count = COUNTER.with(|counter| counter.borrow_mut().inc()); + let unique_name = format_ident!("{}_{}", name, count); - let decl_macro = quote::quote! { - #[macro_export] - #[doc(hidden)] - macro_rules! #unique_name { - { - $caller:tt - $(your_tt_return = [{ $my_tt_macro:path }])? - } => { - $my_tt_return! { - $caller - #( - #keys = [{ #values }] - )* - } - } - } + let decl_macro = quote::quote! { + #[macro_export] + #[doc(hidden)] + macro_rules! #unique_name { + { + $caller:tt + $(your_tt_return = [{ $my_tt_macro:path }])? + } => { + $my_tt_return! { + $caller + #( + #keys = [{ #values }] + )* + } + } + } - pub use #unique_name as #name; - }; + pub use #unique_name as #name; + }; - decl_macro.into() + decl_macro.into() } From af79b30ad55449c8729d6118ea30ea2c07b2dc55 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Thu, 19 Sep 2024 11:49:01 -0400 Subject: [PATCH 100/213] Update to Polkadot SDK 1.16.0-rc1, wip: benchmarks don't work --- Cargo.lock | 3003 ++++++++++++++++++++++-------------- Cargo.toml | 142 +- node/src/chain_spec/mod.rs | 4 +- node/src/command.rs | 75 +- node/src/rpc.rs | 7 +- node/src/service.rs | 82 +- runtime/src/lib.rs | 57 +- scripts/localnet.sh | 8 +- 8 files changed, 2040 insertions(+), 1338 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee0933379..c19682774 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,18 +23,18 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ - "gimli 0.29.0", + "gimli 0.31.0", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aead" @@ -68,7 +68,7 @@ dependencies = [ "cipher 0.4.4", "ctr", "ghash", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -77,7 +77,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", ] @@ -89,7 +89,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -125,20 +125,11 @@ dependencies = [ "libc", ] -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", @@ -151,33 +142,33 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -185,9 +176,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "approx" @@ -198,20 +189,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "aquamarine" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da02abba9f9063d786eab1509833ebb2fac0f966862ca59439c76b9c566760" -dependencies = [ - "include_dir", - "itertools 0.10.5", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "aquamarine" version = "0.5.0" @@ -223,7 +200,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -384,7 +361,7 @@ dependencies = [ "num-bigint", "num-traits", "paste", - "rustc_version 0.4.0", + "rustc_version 0.4.1", "zeroize", ] @@ -454,7 +431,7 @@ dependencies = [ [[package]] name = "ark-secret-scalar" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" +source = "git+https://github.com/w3f/ring-vrf?rev=0fef826#0fef8266d851932ad25d6b41bc4b34d834d1e11d" dependencies = [ "ark-ec", "ark-ff", @@ -503,22 +480,16 @@ dependencies = [ [[package]] name = "ark-transcript" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" +source = "git+https://github.com/w3f/ring-vrf?rev=0fef826#0fef8266d851932ad25d6b41bc4b34d834d1e11d" dependencies = [ "ark-ff", "ark-serialize", "ark-std", "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", "sha3", ] -[[package]] -name = "array-bytes" -version = "4.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6" - [[package]] name = "array-bytes" version = "6.2.3" @@ -527,15 +498,15 @@ checksum = "5d5dde061bd34119e902bbb2d9b90c5692635cf59fb91d582c2b68043f1b8293" [[package]] name = "arrayref" -version = "0.3.7" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "asn1-rs" @@ -543,8 +514,24 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0" dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", + "asn1-rs-derive 0.4.0", + "asn1-rs-impl 0.1.0", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5493c3bedbacf7fd7382c6346bbd66687d12bbaad3a89a2d2c303ee6cf20b048" +dependencies = [ + "asn1-rs-derive 0.5.1", + "asn1-rs-impl 0.2.0", "displaydoc", "nom", "num-traits", @@ -562,7 +549,19 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", + "synstructure 0.13.1", ] [[package]] @@ -576,6 +575,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "asn1-rs-impl" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "async-channel" version = "1.9.0" @@ -589,9 +599,9 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.3" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" dependencies = [ "async-lock", "cfg-if", @@ -600,10 +610,10 @@ dependencies = [ "futures-lite", "parking", "polling", - "rustix 0.38.34", + "rustix 0.38.37", "slab", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -614,18 +624,18 @@ checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ "event-listener 5.3.1", "event-listener-strategy", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -638,7 +648,24 @@ dependencies = [ "futures-sink", "futures-util", "memchr", - "pin-project-lite 0.2.14", + "pin-project-lite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "attohttpc" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" +dependencies = [ + "http 0.2.12", + "log", + "url", ] [[package]] @@ -649,23 +676,23 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ - "addr2line 0.22.0", - "cc", + "addr2line 0.24.1", "cfg-if", "libc", "miniz_oxide", - "object 0.36.0", + "object 0.36.4", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] name = "bandersnatch_vrfs" version = "0.0.4" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" +source = "git+https://github.com/w3f/ring-vrf?rev=0fef826#0fef8266d851932ad25d6b41bc4b34d834d1e11d" dependencies = [ "ark-bls12-381", "ark-ec", @@ -674,10 +701,8 @@ dependencies = [ "ark-serialize", "ark-std", "dleq_vrf", - "fflonk", - "merlin", "rand_chacha", - "rand_core 0.6.4", + "rand_core", "ring 0.1.0", "sha2 0.10.8", "sp-ark-bls12-381", @@ -710,19 +735,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] -name = "base64ct" -version = "1.6.0" +name = "base64" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] -name = "beef" -version = "0.5.2" +name = "base64ct" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" -dependencies = [ - "serde", -] +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bincode" @@ -745,13 +767,13 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "prettyplease 0.2.20", + "prettyplease 0.2.22", "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -778,9 +800,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bitvec" @@ -839,9 +861,9 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.1" +version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" dependencies = [ "arrayref", "arrayvec", @@ -924,9 +946,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytemuck" -version = "1.16.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b236fc92302c97ed75b38da1f4917b5cdda4984745740f153a5d3059e48d725e" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" [[package]] name = "byteorder" @@ -936,9 +958,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.6.0" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" [[package]] name = "bzip2-sys" @@ -963,9 +985,9 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" dependencies = [ "serde", ] @@ -995,13 +1017,13 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.99" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" +checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" dependencies = [ "jobserver", "libc", - "once_cell", + "shlex", ] [[package]] @@ -1080,7 +1102,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -1091,9 +1113,22 @@ checksum = "b9b68e3193982cd54187d71afdb2a271ad4cf8af157858e9cb911b91321de143" dependencies = [ "core2", "multibase", - "multihash", + "multihash 0.17.0", + "serde", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "cid" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd94671561e36e4e7de75f753f577edafb0e7c05d6e4547229fdf7938fbcd2c3" +dependencies = [ + "core2", + "multibase", + "multihash 0.18.1", "serde", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -1129,9 +1164,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.7" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -1139,9 +1174,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.7" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", @@ -1152,21 +1187,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.5" +version = "4.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" +checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "codespan-reporting" @@ -1180,9 +1215,19 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" + +[[package]] +name = "combine" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] [[package]] name = "comfy-table" @@ -1190,7 +1235,7 @@ version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b34115915337defe99b2aff5c2ce6771e5fbc4079f4b506301f5cf394c8452f7" dependencies = [ - "strum 0.26.2", + "strum 0.26.3", "strum_macros 0.26.4", "unicode-width", ] @@ -1198,7 +1243,7 @@ dependencies = [ [[package]] name = "common" version = "0.1.0" -source = "git+https://github.com/w3f/ring-proof#665f5f51af5734c7b6d90b985dd6861d4c5b4752" +source = "git+https://github.com/w3f/ring-proof?rev=665f5f5#665f5f51af5734c7b6d90b985dd6861d4c5b4752" dependencies = [ "ark-ec", "ark-ff", @@ -1260,16 +1305,16 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom", "once_cell", "tiny-keccak", ] [[package]] name = "constant_time_eq" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "constcat" @@ -1295,9 +1340,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "core2" @@ -1319,9 +1364,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] @@ -1424,6 +1469,21 @@ dependencies = [ "wasmtime-types", ] +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.4.2" @@ -1471,8 +1531,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array 0.14.7", - "rand_core 0.6.4", - "subtle 2.6.0", + "rand_core", + "subtle 2.6.1", "zeroize", ] @@ -1483,7 +1543,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array 0.14.7", - "rand_core 0.6.4", + "rand_core", "typenum 1.17.0", ] @@ -1504,7 +1564,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ "generic-array 0.14.7", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -1516,19 +1576,6 @@ dependencies = [ "cipher 0.4.4", ] -[[package]] -name = "curve25519-dalek" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle 2.6.0", - "zeroize", -] - [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -1540,8 +1587,8 @@ dependencies = [ "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", - "rustc_version 0.4.0", - "subtle 2.6.0", + "rustc_version 0.4.1", + "subtle 2.6.1", "zeroize", ] @@ -1553,14 +1600,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "cxx" -version = "1.0.124" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "273dcfd3acd4e1e276af13ed2a43eea7001318823e7a726a6b3ed39b4acc0b82" +checksum = "54ccead7d199d584d139148b04b4a368d1ec7556a1d9ea2548febb1b9d49f9a4" dependencies = [ "cc", "cxxbridge-flags", @@ -1570,9 +1617,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.124" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b2766fbd92be34e9ed143898fce6c572dc009de39506ed6903e5a05b68914e" +checksum = "c77953e99f01508f89f55c494bfa867171ef3a6c8cea03d26975368f2121a5c1" dependencies = [ "cc", "codespan-reporting", @@ -1580,31 +1627,31 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "cxxbridge-flags" -version = "1.0.124" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "839fcd5e43464614ffaa989eaf1c139ef1f0c51672a1ed08023307fa1b909ccd" +checksum = "65777e06cc48f0cb0152024c77d6cf9e4bdb4408e7b48bea993d42fa0f5b02b6" [[package]] name = "cxxbridge-macro" -version = "1.0.124" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2c1c1776b986979be68bb2285da855f8d8a35851a769fca8740df7c3d07877" +checksum = "98532a60dedaebc4848cb2cba5023337cc9ea3af16a5b062633fabfd9f18fb60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "darling" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -1612,27 +1659,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "darling_macro" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -1690,7 +1737,21 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e" dependencies = [ - "asn1-rs", + "asn1-rs 0.5.2", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "der-parser" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" +dependencies = [ + "asn1-rs 0.6.2", "displaydoc", "nom", "num-bigint", @@ -1719,17 +1780,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "derive-syn-parse" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "derive-syn-parse" version = "0.2.0" @@ -1738,7 +1788,7 @@ checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -1750,8 +1800,8 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version 0.4.0", - "syn 2.0.71", + "rustc_version 0.4.1", + "syn 2.0.77", ] [[package]] @@ -1787,7 +1837,7 @@ dependencies = [ "block-buffer 0.10.4", "const-oid", "crypto-common", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -1840,13 +1890,13 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "dleq_vrf" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" +source = "git+https://github.com/w3f/ring-vrf?rev=0fef826#0fef8266d851932ad25d6b41bc4b34d834d1e11d" dependencies = [ "ark-ec", "ark-ff", @@ -1875,14 +1925,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a081e51fb188742f5a7a1164ad752121abcb22874b21e2c3b0dd040c515fdad" dependencies = [ "common-path", - "derive-syn-parse 0.2.0", + "derive-syn-parse", "once_cell", "proc-macro2", "quote", "regex", - "syn 2.0.71", + "syn 2.0.77", "termcolor", - "toml 0.8.14", + "toml 0.8.19", "walkdir", ] @@ -1956,34 +2006,35 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ - "curve25519-dalek 4.1.3", + "curve25519-dalek", "ed25519", - "rand_core 0.6.4", + "rand_core", "serde", "sha2 0.10.8", - "subtle 2.6.0", + "subtle 2.6.1", "zeroize", ] [[package]] name = "ed25519-zebra" -version = "3.1.0" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c24f403d068ad0b359e577a77f92392118be3f3c927538f2bb544a5ecd828c6" +checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" dependencies = [ - "curve25519-dalek 3.2.0", - "hashbrown 0.12.3", + "curve25519-dalek", + "ed25519", + "hashbrown 0.14.5", "hex", - "rand_core 0.6.4", - "sha2 0.9.9", + "rand_core", + "sha2 0.10.8", "zeroize", ] [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "elliptic-curve" @@ -1998,10 +2049,10 @@ dependencies = [ "generic-array 0.14.7", "group", "pkcs8", - "rand_core 0.6.4", + "rand_core", "sec1", "serdect", - "subtle 2.6.0", + "subtle 2.6.1", "zeroize", ] @@ -2023,6 +2074,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "enum-as-inner" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "enumflags2" version = "0.7.10" @@ -2040,7 +2103,7 @@ checksum = "de0d48a183585823424a4ce1aa132d174a6a81bd540895822eb4c8373a8e49e8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -2119,7 +2182,7 @@ checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2129,7 +2192,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ "event-listener 5.3.1", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2150,10 +2213,10 @@ dependencies = [ "blake2 0.10.6", "file-guard", "fs-err", - "prettyplease 0.2.20", + "prettyplease 0.2.22", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -2170,9 +2233,9 @@ checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "fastrand" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" [[package]] name = "fdlimit" @@ -2190,8 +2253,8 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", - "subtle 2.6.0", + "rand_core", + "subtle 2.6.1", ] [[package]] @@ -2235,14 +2298,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] @@ -2279,17 +2342,6 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" -[[package]] -name = "flate2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" -dependencies = [ - "crc32fast", - "libz-sys", - "miniz_oxide", -] - [[package]] name = "float-cmp" version = "0.9.0" @@ -2305,10 +2357,25 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "fork-tree" version = "12.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", ] @@ -2322,6 +2389,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "forwarded-header-value" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835f84f38484cc86f110a805655697908257fb9a7af005234060891557198e9" +dependencies = [ + "nonempty", + "thiserror", +] + [[package]] name = "fragile" version = "2.0.0" @@ -2331,7 +2408,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-support-procedural", @@ -2347,19 +2424,18 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "static_assertions", ] [[package]] name = "frame-benchmarking-cli" version = "32.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "Inflector", - "array-bytes 6.2.3", + "array-bytes", "chrono", "clap", "comfy-table", @@ -2368,7 +2444,7 @@ dependencies = [ "frame-system", "gethostname", "handlebars", - "itertools 0.10.5", + "itertools 0.11.0", "lazy_static", "linked-hash-map", "log", @@ -2376,6 +2452,7 @@ dependencies = [ "rand", "rand_pcg", "sc-block-builder", + "sc-chain-spec", "sc-cli", "sc-client-api", "sc-client-db", @@ -2388,15 +2465,16 @@ dependencies = [ "sp-blockchain", "sp-core", "sp-database", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-genesis-builder", "sp-inherents", "sp-io", "sp-keystore", "sp-runtime", "sp-state-machine", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-trie", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", "thousands", ] @@ -2404,9 +2482,9 @@ dependencies = [ [[package]] name = "frame-executive" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "aquamarine 0.3.3", + "aquamarine", "frame-support", "frame-system", "frame-try-runtime", @@ -2416,8 +2494,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] @@ -2435,9 +2512,9 @@ dependencies = [ [[package]] name = "frame-metadata-hash-extension" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "docify", "frame-support", "frame-system", @@ -2450,10 +2527,10 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "aquamarine 0.5.0", - "array-bytes 6.2.3", + "aquamarine", + "array-bytes", "bitflags 1.3.2", "docify", "environmental", @@ -2473,7 +2550,7 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-crypto-hashing-proc-macro", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-genesis-builder", "sp-inherents", "sp-io", @@ -2481,8 +2558,8 @@ dependencies = [ "sp-runtime", "sp-staking", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-weights", "static_assertions", "tt-call", @@ -2491,48 +2568,49 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "Inflector", "cfg-expr", - "derive-syn-parse 0.2.0", + "derive-syn-parse", + "docify", "expander", "frame-support-procedural-tools", - "itertools 0.10.5", + "itertools 0.11.0", "macro_magic", - "proc-macro-warning", + "proc-macro-warning 1.0.2", "proc-macro2", "quote", "sp-crypto-hashing", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support-procedural-tools-derive", - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "cfg-if", "docify", @@ -2544,7 +2622,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-version", "sp-weights", ] @@ -2552,7 +2630,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -2561,14 +2639,14 @@ dependencies = [ "scale-info", "sp-core", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "frame-system-rpc-runtime-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ + "docify", "parity-scale-codec", "sp-api", ] @@ -2576,13 +2654,12 @@ dependencies = [ [[package]] name = "frame-try-runtime" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "parity-scale-codec", "sp-api", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -2625,6 +2702,16 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-bounded" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b07bbbe7d7e78809544c6f718d875627addc73a7c3582447abc052cd3dc67e0" +dependencies = [ + "futures-timer", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.30" @@ -2666,7 +2753,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" dependencies = [ "futures-core", - "pin-project-lite 0.2.14", + "pin-project-lite", ] [[package]] @@ -2677,18 +2764,17 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "futures-rustls" -version = "0.22.2" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2411eed028cdf8c8034eaf21f9915f956b6c3abec4d4c7949ee67f0721127bd" +checksum = "35bd3cf68c183738046838e300353e4716c674dc5e56890de4826801a6622a28" dependencies = [ "futures-io", - "rustls 0.20.9", - "webpki", + "rustls 0.21.12", ] [[package]] @@ -2722,7 +2808,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.14", + "pin-project-lite", "pin-utils", "slab", ] @@ -2766,17 +2852,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -2785,7 +2860,7 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] @@ -2795,7 +2870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" dependencies = [ "rand", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -2831,9 +2906,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" [[package]] name = "glob" @@ -2868,8 +2943,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core 0.6.4", - "subtle 2.6.0", + "rand_core", + "subtle 2.6.1", ] [[package]] @@ -2883,8 +2958,27 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", - "indexmap 2.2.6", + "http 0.2.12", + "indexmap 2.5.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.5.0", "slab", "tokio", "tokio-util", @@ -3070,21 +3164,49 @@ dependencies = [ ] [[package]] -name = "http-body" +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", - "pin-project-lite 0.2.14", + "http 0.2.12", + "pin-project-lite", ] [[package]] -name = "http-range-header" -version = "0.3.1" +name = "http-body" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] [[package]] name = "httparse" @@ -3106,21 +3228,21 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.29" +version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.14", + "pin-project-lite", "socket2 0.5.7", "tokio", "tower-service", @@ -3128,6 +3250,26 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -3135,8 +3277,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http", - "hyper", + "http 0.2.12", + "hyper 0.14.30", "log", "rustls 0.21.12", "rustls-native-certs", @@ -3144,11 +3286,28 @@ dependencies = [ "tokio-rustls", ] +[[package]] +name = "hyper-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "tokio", + "tower", + "tower-service", +] + [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3184,6 +3343,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.5.0" @@ -3223,6 +3392,25 @@ dependencies = [ "windows", ] +[[package]] +name = "igd-next" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "064d90fec10d541084e7b39ead8875a5a80d9114a2b18791565253bae25f49e4" +dependencies = [ + "async-trait", + "attohttpc", + "bytes", + "futures", + "http 0.2.12", + "hyper 0.14.30", + "log", + "rand", + "tokio", + "url", + "xmltree", +] + [[package]] name = "impl-codec" version = "0.6.0" @@ -3293,9 +3481,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", "hashbrown 0.14.5", @@ -3359,26 +3547,26 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" [[package]] name = "is-terminal" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi 0.4.0", "libc", "windows-sys 0.52.0", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -3389,6 +3577,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.12.1" @@ -3406,27 +3603,27 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonrpsee" -version = "0.22.5" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfdb12a2381ea5b2e68c3469ec604a007b367778cdb14d09612c8069ebd616ad" +checksum = "8fd1ead9fb95614e8dc5556d12a8681c2f6d352d0c1d3efc8708c7ccbba47bc6" dependencies = [ "jsonrpsee-core", "jsonrpsee-proc-macros", @@ -3438,19 +3635,20 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.22.5" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b257e1ec385e07b0255dde0b933f948b5c8b8c28d42afda9587c3a967b896d" +checksum = "ff79651479f69ada7bda604ef2acf3f1aa50755d97cc36d25ff04c2664f9d96f" dependencies = [ - "anyhow", "async-trait", - "beef", + "bytes", "futures-util", - "hyper", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", "jsonrpsee-types", "parking_lot 0.12.3", "rand", - "rustc-hash", + "rustc-hash 2.0.0", "serde", "serde_json", "thiserror", @@ -3460,26 +3658,29 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.22.5" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d0bb047e79a143b32ea03974a6bf59b62c2a4c5f5d42a381c907a8bbb3f75c0" +checksum = "a0d4c6bec4909c966f59f52db3655c0e9d4685faae8b49185973d9d7389bb884" dependencies = [ - "heck 0.4.1", - "proc-macro-crate 3.1.0", + "heck 0.5.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "jsonrpsee-server" -version = "0.22.5" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12d8b6a9674422a8572e0b0abb12feeb3f2aeda86528c80d0350c2bd0923ab41" +checksum = "ebe2198e5fd96cf2153ecc123364f699b6e2151317ea09c7bf799c43c2fe1415" dependencies = [ "futures-util", - "http", - "hyper", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-util", "jsonrpsee-core", "jsonrpsee-types", "pin-project", @@ -3497,12 +3698,11 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.22.5" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "150d6168405890a7a3231a3c74843f58b8959471f6df76078db2619ddee1d07d" +checksum = "531e386460425e49679587871a056f2895a47dade21457324ad1262cd78ef6d9" dependencies = [ - "anyhow", - "beef", + "http 1.1.0", "serde", "serde_json", "thiserror", @@ -3572,9 +3772,9 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" @@ -3584,18 +3784,18 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -3606,14 +3806,15 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libp2p" -version = "0.51.4" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f35eae38201a993ece6bdc823292d6abd1bffed1c4d0f4a3517d2bd8e1d917fe" +checksum = "e94495eb319a85b70a68b85e2389a95bb3555c71c49025b78c691a854a7e6464" dependencies = [ "bytes", + "either", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -3630,18 +3831,21 @@ dependencies = [ "libp2p-request-response", "libp2p-swarm", "libp2p-tcp", + "libp2p-upnp", "libp2p-wasm-ext", "libp2p-websocket", "libp2p-yamux", - "multiaddr", + "multiaddr 0.18.1", "pin-project", + "rw-stream-sink", + "thiserror", ] [[package]] name = "libp2p-allow-block-list" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "510daa05efbc25184458db837f6f9a5143888f1caa742426d92e1833ddd38a50" +checksum = "55b46558c5c0bf99d3e2a1a38fd54ff5476ca66dd1737b12466a1824dd219311" dependencies = [ "libp2p-core", "libp2p-identity", @@ -3651,9 +3855,9 @@ dependencies = [ [[package]] name = "libp2p-connection-limits" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4caa33f1d26ed664c4fe2cca81a08c8e07d4c1c04f2f4ac7655c2dd85467fda0" +checksum = "2f5107ad45cb20b2f6c3628c7b6014b996fcb13a88053f4569c872c6e30abf58" dependencies = [ "libp2p-core", "libp2p-identity", @@ -3663,9 +3867,9 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.39.2" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c1df63c0b582aa434fb09b2d86897fa2b419ffeccf934b36f87fcedc8e835c2" +checksum = "dd44289ab25e4c9230d9246c475a22241e301b23e8f4061d3bdef304a1a99713" dependencies = [ "either", "fnv", @@ -3674,8 +3878,8 @@ dependencies = [ "instant", "libp2p-identity", "log", - "multiaddr", - "multihash", + "multiaddr 0.18.1", + "multihash 0.19.1", "multistream-select", "once_cell", "parking_lot 0.12.3", @@ -3685,18 +3889,20 @@ dependencies = [ "rw-stream-sink", "smallvec", "thiserror", - "unsigned-varint", + "unsigned-varint 0.7.2", "void", ] [[package]] name = "libp2p-dns" -version = "0.39.0" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146ff7034daae62077c415c2376b8057368042df6ab95f5432ad5e88568b1554" +checksum = "e6a18db73084b4da2871438f6239fef35190b05023de7656e877c18a00541a3b" dependencies = [ + "async-trait", "futures", "libp2p-core", + "libp2p-identity", "log", "parking_lot 0.12.3", "smallvec", @@ -3705,19 +3911,20 @@ dependencies = [ [[package]] name = "libp2p-identify" -version = "0.42.2" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5455f472243e63b9c497ff320ded0314254a9eb751799a39c283c6f20b793f3c" +checksum = "45a96638a0a176bec0a4bcaebc1afa8cf909b114477209d7456ade52c61cd9cd" dependencies = [ "asynchronous-codec", "either", "futures", + "futures-bounded", "futures-timer", "libp2p-core", "libp2p-identity", "libp2p-swarm", "log", - "lru 0.10.1", + "lru 0.12.4", "quick-protobuf", "quick-protobuf-codec", "smallvec", @@ -3727,27 +3934,27 @@ dependencies = [ [[package]] name = "libp2p-identity" -version = "0.1.3" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276bb57e7af15d8f100d3c11cbdd32c6752b7eef4ba7a18ecf464972c07abcce" +checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" dependencies = [ - "bs58 0.4.0", + "bs58 0.5.1", "ed25519-dalek", - "log", - "multiaddr", - "multihash", + "hkdf", + "multihash 0.19.1", "quick-protobuf", "rand", "sha2 0.10.8", "thiserror", + "tracing", "zeroize", ] [[package]] name = "libp2p-kad" -version = "0.43.3" +version = "0.44.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39d5ef876a2b2323d63c258e63c2f8e36f205fe5a11f0b3095d59635650790ff" +checksum = "16ea178dabba6dde6ffc260a8e0452ccdc8f79becf544946692fff9d412fc29d" dependencies = [ "arrayvec", "asynchronous-codec", @@ -3762,20 +3969,21 @@ dependencies = [ "libp2p-swarm", "log", "quick-protobuf", + "quick-protobuf-codec", "rand", "sha2 0.10.8", "smallvec", "thiserror", "uint", - "unsigned-varint", + "unsigned-varint 0.7.2", "void", ] [[package]] name = "libp2p-mdns" -version = "0.43.1" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19983e1f949f979a928f2c603de1cf180cc0dc23e4ac93a62651ccb18341460b" +checksum = "42a2567c305232f5ef54185e9604579a894fd0674819402bb0ac0246da82f52a" dependencies = [ "data-encoding", "futures", @@ -3786,38 +3994,43 @@ dependencies = [ "log", "rand", "smallvec", - "socket2 0.4.10", + "socket2 0.5.7", "tokio", - "trust-dns-proto", + "trust-dns-proto 0.22.0", "void", ] [[package]] name = "libp2p-metrics" -version = "0.12.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a42ec91e227d7d0dafa4ce88b333cdf5f277253873ab087555c92798db2ddd46" +checksum = "239ba7d28f8d0b5d77760dc6619c05c7e88e74ec8fbbe97f856f20a56745e620" dependencies = [ + "instant", "libp2p-core", "libp2p-identify", + "libp2p-identity", "libp2p-kad", "libp2p-ping", "libp2p-swarm", + "once_cell", "prometheus-client", ] [[package]] name = "libp2p-noise" -version = "0.42.2" +version = "0.43.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3673da89d29936bc6435bafc638e2f184180d554ce844db65915113f86ec5e" +checksum = "d2eeec39ad3ad0677551907dd304b2f13f17208ccebe333bef194076cd2e8921" dependencies = [ "bytes", - "curve25519-dalek 3.2.0", + "curve25519-dalek", "futures", "libp2p-core", "libp2p-identity", "log", + "multiaddr 0.18.1", + "multihash 0.19.1", "once_cell", "quick-protobuf", "rand", @@ -3825,21 +4038,22 @@ dependencies = [ "snow", "static_assertions", "thiserror", - "x25519-dalek 1.1.1", + "x25519-dalek", "zeroize", ] [[package]] name = "libp2p-ping" -version = "0.42.0" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e57759c19c28a73ef1eb3585ca410cefb72c1a709fcf6de1612a378e4219202" +checksum = "e702d75cd0827dfa15f8fd92d15b9932abe38d10d21f47c50438c71dd1b5dae3" dependencies = [ "either", "futures", "futures-timer", "instant", "libp2p-core", + "libp2p-identity", "libp2p-swarm", "log", "rand", @@ -3848,9 +4062,9 @@ dependencies = [ [[package]] name = "libp2p-quic" -version = "0.7.0-alpha.3" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6b26abd81cd2398382a1edfe739b539775be8a90fa6914f39b2ab49571ec735" +checksum = "130d451d83f21b81eb7b35b360bc7972aeafb15177784adc56528db082e6b927" dependencies = [ "bytes", "futures", @@ -3861,18 +4075,20 @@ dependencies = [ "libp2p-tls", "log", "parking_lot 0.12.3", - "quinn-proto", + "quinn 0.10.2", "rand", - "rustls 0.20.9", + "ring 0.16.20", + "rustls 0.21.12", + "socket2 0.5.7", "thiserror", "tokio", ] [[package]] name = "libp2p-request-response" -version = "0.24.1" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffdb374267d42dc5ed5bc53f6e601d4a64ac5964779c6e40bb9e4f14c1e30d5" +checksum = "d8e3b4d67870478db72bac87bfc260ee6641d0734e0e3e275798f089c3fecfd4" dependencies = [ "async-trait", "futures", @@ -3880,15 +4096,17 @@ dependencies = [ "libp2p-core", "libp2p-identity", "libp2p-swarm", + "log", "rand", "smallvec", + "void", ] [[package]] name = "libp2p-swarm" -version = "0.42.2" +version = "0.43.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "903b3d592d7694e56204d211f29d31bc004be99386644ba8731fc3e3ef27b296" +checksum = "580189e0074af847df90e75ef54f3f30059aedda37ea5a1659e8b9fca05c0141" dependencies = [ "either", "fnv", @@ -3899,6 +4117,8 @@ dependencies = [ "libp2p-identity", "libp2p-swarm-derive", "log", + "multistream-select", + "once_cell", "rand", "smallvec", "tokio", @@ -3907,36 +4127,39 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" -version = "0.32.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fba456131824ab6acd4c7bf61e9c0f0a3014b5fc9868ccb8e10d344594cdc4f" +checksum = "c4d5ec2a3df00c7836d7696c136274c9c59705bac69133253696a6c932cd1d74" dependencies = [ "heck 0.4.1", + "proc-macro-warning 0.4.2", + "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.77", ] [[package]] name = "libp2p-tcp" -version = "0.39.0" +version = "0.40.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d33698596d7722d85d3ab0c86c2c322254fce1241e91208e3679b4eb3026cf" +checksum = "b558dd40d1bcd1aaaed9de898e9ec6a436019ecc2420dd0016e712fbb61c5508" dependencies = [ "futures", "futures-timer", "if-watch", "libc", "libp2p-core", + "libp2p-identity", "log", - "socket2 0.4.10", + "socket2 0.5.7", "tokio", ] [[package]] name = "libp2p-tls" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff08d13d0dc66e5e9ba6279c1de417b84fa0d0adc3b03e5732928c180ec02781" +checksum = "8218d1d5482b122ccae396bbf38abdcb283ecc96fa54760e1dfd251f0546ac61" dependencies = [ "futures", "futures-rustls", @@ -3944,51 +4167,69 @@ dependencies = [ "libp2p-identity", "rcgen", "ring 0.16.20", - "rustls 0.20.9", + "rustls 0.21.12", + "rustls-webpki", "thiserror", - "webpki", - "x509-parser", + "x509-parser 0.15.1", "yasna", ] +[[package]] +name = "libp2p-upnp" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82775a47b34f10f787ad3e2a22e2c1541e6ebef4fe9f28f3ac553921554c94c1" +dependencies = [ + "futures", + "futures-timer", + "igd-next", + "libp2p-core", + "libp2p-swarm", + "log", + "tokio", + "void", +] + [[package]] name = "libp2p-wasm-ext" -version = "0.39.0" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77dff9d32353a5887adb86c8afc1de1a94d9e8c3bc6df8b2201d7cdf5c848f43" +checksum = "1e5d8e3a9e07da0ef5b55a9f26c009c8fb3c725d492d8bb4b431715786eea79c" dependencies = [ "futures", "js-sys", "libp2p-core", - "parity-send-wrapper", + "send_wrapper", "wasm-bindgen", "wasm-bindgen-futures", ] [[package]] name = "libp2p-websocket" -version = "0.41.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "111273f7b3d3510524c752e8b7a5314b7f7a1fee7e68161c01a7d72cbb06db9f" +checksum = "004ee9c4a4631435169aee6aad2f62e3984dc031c43b6d29731e8e82a016c538" dependencies = [ "either", "futures", "futures-rustls", "libp2p-core", + "libp2p-identity", "log", "parking_lot 0.12.3", - "quicksink", + "pin-project-lite", "rw-stream-sink", "soketto", + "thiserror", "url", "webpki-roots", ] [[package]] name = "libp2p-yamux" -version = "0.43.1" +version = "0.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd21d950662700a385d4c6d68e2f5f54d778e97068cdd718522222ef513bda" +checksum = "8eedcb62824c4300efb9cfd4e2a6edaf3ca097b9e68b36dabe45a44469fd6a85" dependencies = [ "futures", "libp2p-core", @@ -4003,8 +4244,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", + "redox_syscall 0.5.4", ] [[package]] @@ -4049,7 +4291,7 @@ checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" dependencies = [ "crunchy", "digest 0.9.0", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -4072,9 +4314,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.18" +version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" +checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" dependencies = [ "cc", "pkg-config", @@ -4138,6 +4380,61 @@ dependencies = [ "keystream", ] +[[package]] +name = "litep2p" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f46c51c205264b834ceed95c8b195026e700494bc3991aaba3b4ea9e20626d9" +dependencies = [ + "async-trait", + "bs58 0.4.0", + "bytes", + "cid 0.10.1", + "ed25519-dalek", + "futures", + "futures-timer", + "hex-literal", + "indexmap 2.5.0", + "libc", + "mockall 0.12.1", + "multiaddr 0.17.1", + "multihash 0.17.0", + "network-interface", + "nohash-hasher", + "parking_lot 0.12.3", + "pin-project", + "prost 0.12.6", + "prost-build 0.11.9", + "quinn 0.9.4", + "rand", + "rcgen", + "ring 0.16.20", + "rustls 0.20.9", + "serde", + "sha2 0.10.8", + "simple-dns", + "smallvec", + "snow", + "socket2 0.5.7", + "static_assertions", + "str0m", + "thiserror", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tokio-util", + "tracing", + "trust-dns-resolver", + "uint", + "unsigned-varint 0.8.0", + "url", + "webpki", + "x25519-dalek", + "x509-parser 0.16.0", + "yasna", + "zeroize", +] + [[package]] name = "lock_api" version = "0.4.12" @@ -4165,11 +4462,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.10.1" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718e8fae447df0c7e1ba7f5189829e63fd536945c8988d61444c19039f16b670" +checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" dependencies = [ - "hashbrown 0.13.2", + "hashbrown 0.14.5", ] [[package]] @@ -4183,19 +4480,18 @@ dependencies = [ [[package]] name = "lz4" -version = "1.25.0" +version = "1.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6eab492fe7f8651add23237ea56dbf11b3c4ff762ab83d40a47f11433421f91" +checksum = "a231296ca742e418c43660cb68e082486ff2538e8db432bc818580f3965025ed" dependencies = [ - "libc", "lz4-sys", ] [[package]] name = "lz4-sys" -version = "1.9.5" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9764018d143cc854c9f17f0b907de70f14393b1f502da6375dce70f00514eb3" +checksum = "fcb44a01837a858d47e5a630d2ccf304c8efcc4b83b8f9f75b7a9ee4fcc6e57d" dependencies = [ "cc", "libc", @@ -4212,50 +4508,50 @@ dependencies = [ [[package]] name = "macro_magic" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e03844fc635e92f3a0067e25fa4bf3e3dbf3f2927bf3aa01bb7bc8f1c428949d" +checksum = "cc33f9f0351468d26fbc53d9ce00a096c8522ecb42f19b50f34f2c422f76d21d" dependencies = [ "macro_magic_core", "macro_magic_macros", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "macro_magic_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "468155613a44cfd825f1fb0ffa532b018253920d404e6fca1e8d43155198a46d" +checksum = "1687dc887e42f352865a393acae7cf79d98fab6351cde1f58e9e057da89bf150" dependencies = [ "const-random", - "derive-syn-parse 0.1.5", + "derive-syn-parse", "macro_magic_core_macros", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "macro_magic_core_macros" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" +checksum = "b02abfe41815b5bd98dbd4260173db2c116dda171dc0fe7838cb206333b83308" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "macro_magic_macros" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" +checksum = "73ea28ee64b88876bf45277ed9a5817c1817df061a74f2b988971a12570e5869" dependencies = [ "macro_magic_core", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -4264,15 +4560,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" -[[package]] -name = "matchers" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" -dependencies = [ - "regex-automata 0.1.10", -] - [[package]] name = "matchers" version = "0.1.0" @@ -4290,9 +4577,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matrixmultiply" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7574c1cf36da4798ab73da5b215bbf444f50718207754cb522201d78d1cd0ff2" +checksum = "9380b911e3e96d10c1f415da0876389aaf1b56759054eeb0de7df940c456ba1a" dependencies = [ "autocfg", "rawpointer", @@ -4310,7 +4597,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "rustix 0.38.34", + "rustix 0.38.37", ] [[package]] @@ -4324,9 +4611,9 @@ dependencies = [ [[package]] name = "memmap2" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" +checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] @@ -4355,7 +4642,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f313fcff1d2a4bcaa2deeaa00bf7530d77d5f7bd0467a117dde2e29a75a7a17a" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "blake3", "frame-metadata", "parity-scale-codec", @@ -4371,7 +4658,7 @@ checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" dependencies = [ "byteorder", "keccak", - "rand_core 0.6.4", + "rand_core", "zeroize", ] @@ -4383,22 +4670,23 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] name = "mio" -version = "0.8.11" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ + "hermit-abi 0.3.9", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", + "wasi", + "windows-sys 0.52.0", ] [[package]] @@ -4412,7 +4700,7 @@ dependencies = [ "bitflags 1.3.2", "blake2 0.10.6", "c2-chacha", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "either", "hashlink", "lioness", @@ -4421,7 +4709,7 @@ dependencies = [ "rand", "rand_chacha", "rand_distr", - "subtle 2.6.0", + "subtle 2.6.1", "thiserror", "zeroize", ] @@ -4436,8 +4724,23 @@ dependencies = [ "downcast", "fragile", "lazy_static", - "mockall_derive", - "predicates", + "mockall_derive 0.11.4", + "predicates 2.1.5", + "predicates-tree", +] + +[[package]] +name = "mockall" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43766c2b5203b10de348ffe19f7e54564b64f3d6018ff7648d1e2d6d3a0f0a48" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive 0.12.1", + "predicates 3.1.2", "predicates-tree", ] @@ -4453,6 +4756,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "mockall_derive" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "multiaddr" version = "0.17.1" @@ -4464,11 +4779,30 @@ dependencies = [ "data-encoding", "log", "multibase", - "multihash", + "multihash 0.17.0", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.7.2", + "url", +] + +[[package]] +name = "multiaddr" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "libp2p-identity", + "multibase", + "multihash 0.19.1", "percent-encoding", "serde", "static_assertions", - "unsigned-varint", + "unsigned-varint 0.7.2", "url", ] @@ -4497,7 +4831,34 @@ dependencies = [ "multihash-derive", "sha2 0.10.8", "sha3", - "unsigned-varint", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd8a792c1694c6da4f68db0a9d707c72bd260994da179e6030a5dcee00bb815" +dependencies = [ + "blake2b_simd", + "blake2s_simd", + "blake3", + "core2", + "digest 0.10.7", + "multihash-derive", + "sha2 0.10.8", + "sha3", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" +dependencies = [ + "core2", + "unsigned-varint 0.7.2", ] [[package]] @@ -4511,7 +4872,7 @@ dependencies = [ "proc-macro2", "quote", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -4520,18 +4881,24 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + [[package]] name = "multistream-select" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8552ab875c1313b97b8d20cb857b9fd63e2d1d6a0a1b53ce9821e575405f27a" +checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" dependencies = [ "bytes", "futures", "log", "pin-project", "smallvec", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -4552,13 +4919,13 @@ dependencies = [ [[package]] name = "nalgebra-macros" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91761aed67d03ad966ef783ae962ef9bbaca728d2dd7ceb7939ec110fffad998" +checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.77", ] [[package]] @@ -4649,6 +5016,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "network-interface" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a43439bf756eed340bdf8feba761e2d50c7d47175d87545cd5cbe4a137c4d1" +dependencies = [ + "cc", + "libc", + "thiserror", + "winapi", +] + [[package]] name = "nix" version = "0.24.3" @@ -4677,7 +5056,7 @@ dependencies = [ "frame-system", "futures", "jsonrpsee", - "memmap2 0.9.4", + "memmap2 0.9.5", "node-subtensor-runtime", "pallet-commitments", "pallet-transaction-payment", @@ -4770,9 +5149,9 @@ dependencies = [ "sp-offchain", "sp-runtime", "sp-session", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-transaction-pool", "sp-version", "substrate-wasm-builder", @@ -4796,6 +5175,12 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nonempty" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e591e719385e6ebaeb5ce5d3887f7d5676fceca6411d1925ccc95745f3d6f7" + [[package]] name = "nonzero_ext" version = "0.3.0" @@ -4820,9 +5205,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", @@ -4915,9 +5300,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.0" +version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "memchr", ] @@ -4928,7 +5313,16 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff" dependencies = [ - "asn1-rs", + "asn1-rs 0.5.2", +] + +[[package]] +name = "oid-registry" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d8034d9489cdaf79228eb9f6a3b8d7bb32ba00d6645ebd48eef4077ceb5bd9" +dependencies = [ + "asn1-rs 0.6.2", ] [[package]] @@ -4949,12 +5343,60 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-src" +version = "300.3.2+3.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a211a18d945ef7e648cc6e0058f4c548ee46aab922ea203e0d30e966ea23647b" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -4984,8 +5426,8 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-weights", "substrate-fixed", "subtensor-macros", @@ -4994,7 +5436,7 @@ dependencies = [ [[package]] name = "pallet-aura" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-system", @@ -5005,13 +5447,12 @@ dependencies = [ "sp-application-crypto", "sp-consensus-aura", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-system", @@ -5019,13 +5460,12 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "frame-benchmarking", @@ -5035,7 +5475,6 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5051,7 +5490,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "subtensor-macros", ] @@ -5069,14 +5508,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "subtensor-macros", ] [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5093,13 +5532,12 @@ dependencies = [ "sp-runtime", "sp-session", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-insecure-randomness-collective-flip" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-system", @@ -5107,13 +5545,12 @@ dependencies = [ "safe-mix", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-membership" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5124,13 +5561,12 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-multisig" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5140,13 +5576,12 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-preimage" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5157,13 +5592,12 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-proxy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5172,7 +5606,6 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5188,14 +5621,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "subtensor-macros", ] [[package]] name = "pallet-safe-mode" version = "9.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "frame-benchmarking", @@ -5208,13 +5641,12 @@ dependencies = [ "scale-info", "sp-arithmetic", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-scheduler" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "frame-benchmarking", @@ -5225,14 +5657,13 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-weights", ] [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-system", @@ -5247,7 +5678,6 @@ dependencies = [ "sp-session", "sp-staking", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-trie", ] @@ -5282,8 +5712,8 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-version", "substrate-fixed", "subtensor-macros", @@ -5292,7 +5722,7 @@ dependencies = [ [[package]] name = "pallet-sudo" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "frame-benchmarking", @@ -5302,13 +5732,12 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "frame-benchmarking", @@ -5320,15 +5749,14 @@ dependencies = [ "sp-inherents", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-timestamp", ] [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-support", "frame-system", @@ -5338,13 +5766,12 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "pallet-transaction-payment-rpc" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "jsonrpsee", "pallet-transaction-payment-rpc-runtime-api", @@ -5360,7 +5787,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5372,7 +5799,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-benchmarking", "frame-support", @@ -5382,7 +5809,6 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] @@ -5393,7 +5819,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes", "rand", - "rand_core 0.6.4", + "rand_core", "serde", "unicode-normalization", ] @@ -5440,18 +5866,12 @@ version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", "syn 1.0.109", ] -[[package]] -name = "parity-send-wrapper" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa9777aa91b8ad9dd5aaa04a9b6bcb02c7f1deb952fca5a66034d5e63afc5c6f" - [[package]] name = "parity-util-mem" version = "0.12.0" @@ -5478,7 +5898,7 @@ checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" dependencies = [ "proc-macro2", "syn 1.0.109", - "synstructure", + "synstructure 0.12.6", ] [[package]] @@ -5489,9 +5909,9 @@ checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304" [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -5536,9 +5956,9 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.2", + "redox_syscall 0.5.4", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -5554,8 +5974,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" dependencies = [ "base64ct", - "rand_core 0.6.4", - "subtle 2.6.0", + "rand_core", + "subtle 2.6.1", ] [[package]] @@ -5597,9 +6017,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.10" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560131c633294438da9f7c4b08189194b20946c8274c6b9e38881a7874dc8ee8" +checksum = "9c73c26c01b8c87956cea613c907c9d6ecffd8d18a2a5908e5de0adfaa185cea" dependencies = [ "memchr", "thiserror", @@ -5608,9 +6028,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.10" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26293c9193fbca7b1a3bf9b79dc1e388e927e6cacaa78b4a3ab705a1d3d41459" +checksum = "664d22978e2815783adbdd2c588b455b1bd625299ce36b2a99881ac9627e6d8d" dependencies = [ "pest", "pest_generator", @@ -5618,22 +6038,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.10" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ec22af7d3fb470a85dd2ca96b7c577a1eb4ef6f1683a9fe9a8c16e136c04687" +checksum = "a2d5487022d5d33f4c30d91c22afa240ce2a644e87fe08caad974d4eab6badbe" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "pest_meta" -version = "2.7.10" +version = "2.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a240022f37c361ec1878d646fc5b7d7c4d28d5946e1a80ad5a7a4f4ca0bdcd" +checksum = "0091754bbd0ea592c4deb3a122ce8ecbb0753b738aa82bc055fcc2eccc8d8174" dependencies = [ "once_cell", "pest", @@ -5647,7 +6067,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.2.6", + "indexmap 2.5.0", ] [[package]] @@ -5667,15 +6087,9 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] -[[package]] -name = "pin-project-lite" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" - [[package]] name = "pin-project-lite" version = "0.2.14" @@ -5753,7 +6167,7 @@ dependencies = [ "polkavm-common", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -5763,7 +6177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ba81f7b5faac81e528eb6158a6f3c9e0bb1008e0ffa19653bc8dea925ecb429" dependencies = [ "polkavm-derive-impl", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -5789,17 +6203,17 @@ checksum = "26e85d3456948e650dff0cfc85603915847faf893ed1e66b020bb82ef4557120" [[package]] name = "polling" -version = "3.7.2" +version = "3.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi 0.4.0", - "pin-project-lite 0.2.14", - "rustix 0.38.34", + "pin-project-lite", + "rustix 0.38.37", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5827,9 +6241,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" [[package]] name = "powerfmt" @@ -5839,9 +6253,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "predicates" @@ -5857,17 +6274,27 @@ dependencies = [ "regex", ] +[[package]] +name = "predicates" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +dependencies = [ + "anstyle", + "predicates-core", +] + [[package]] name = "predicates-core" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" +checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931" [[package]] name = "predicates-tree" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13" dependencies = [ "predicates-core", "termtree", @@ -5885,12 +6312,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.20" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" dependencies = [ "proc-macro2", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -5919,11 +6346,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.21.1", + "toml_edit", ] [[package]] @@ -5950,6 +6377,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-warning" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1eaa7fa0aa1929ffdf7eeb6eac234dde6268914a14ad44d23521ab6a9b258e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "proc-macro-warning" version = "1.0.2" @@ -5958,7 +6396,7 @@ checksum = "834da187cfe638ae8abb0203f0b33e5ccdb02a28e7199f2f47b3e2754f50edca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -5986,9 +6424,9 @@ dependencies = [ [[package]] name = "prometheus-client" -version = "0.19.0" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6fa99d535dd930d1249e6c79cb3c2915f9172a540fe2b02a4c8f9ca954721e" +checksum = "3c99afa9a01501019ac3a14d71d9f94050346f55ca471ce90c799a15c58f61e2" dependencies = [ "dtoa", "itoa", @@ -6004,7 +6442,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -6038,17 +6476,38 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap", + "multimap 0.8.3", "petgraph", "prettyplease 0.1.25", "prost 0.11.9", - "prost-types", + "prost-types 0.11.9", "regex", "syn 1.0.109", "tempfile", "which", ] +[[package]] +name = "prost-build" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" +dependencies = [ + "bytes", + "heck 0.5.0", + "itertools 0.12.1", + "log", + "multimap 0.10.0", + "once_cell", + "petgraph", + "prettyplease 0.2.22", + "prost 0.12.6", + "prost-types 0.12.6", + "regex", + "syn 2.0.77", + "tempfile", +] + [[package]] name = "prost-derive" version = "0.11.9" @@ -6072,7 +6531,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -6084,11 +6543,20 @@ dependencies = [ "prost 0.11.9", ] +[[package]] +name = "prost-types" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" +dependencies = [ + "prost 0.12.6", +] + [[package]] name = "psm" -version = "0.1.21" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +checksum = "aa37f80ca58604976033fae9515a8a2989fc13797d953f7c04fb8fa36a11f205" dependencies = [ "cc", ] @@ -6103,7 +6571,7 @@ dependencies = [ "libc", "once_cell", "raw-cpuid", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "web-sys", "winapi", ] @@ -6125,26 +6593,51 @@ dependencies = [ [[package]] name = "quick-protobuf-codec" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1693116345026436eb2f10b677806169c1a1260c1c60eaaffe3fb5a29ae23d8b" +checksum = "f8ededb1cd78531627244d51dd0c7139fbe736c7d57af0092a76f0ffb2f56e98" dependencies = [ "asynchronous-codec", "bytes", "quick-protobuf", "thiserror", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] -name = "quicksink" -version = "0.1.2" +name = "quinn" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77de3c815e5a160b1539c6592796801df2043ae35e123b46d73380cfa57af858" +checksum = "2e8b432585672228923edbbf64b8b12c14e1112f62e88737655b4a083dbcd78e" dependencies = [ - "futures-core", - "futures-sink", - "pin-project-lite 0.1.12", + "bytes", + "pin-project-lite", + "quinn-proto 0.9.6", + "quinn-udp 0.3.2", + "rustc-hash 1.1.0", + "rustls 0.20.9", + "thiserror", + "tokio", + "tracing", + "webpki", +] + +[[package]] +name = "quinn" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75" +dependencies = [ + "bytes", + "futures-io", + "pin-project-lite", + "quinn-proto 0.10.6", + "quinn-udp 0.4.1", + "rustc-hash 1.1.0", + "rustls 0.21.12", + "thiserror", + "tokio", + "tracing", ] [[package]] @@ -6156,7 +6649,7 @@ dependencies = [ "bytes", "rand", "ring 0.16.20", - "rustc-hash", + "rustc-hash 1.1.0", "rustls 0.20.9", "slab", "thiserror", @@ -6165,11 +6658,54 @@ dependencies = [ "webpki", ] +[[package]] +name = "quinn-proto" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a" +dependencies = [ + "bytes", + "rand", + "ring 0.16.20", + "rustc-hash 1.1.0", + "rustls 0.21.12", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "641538578b21f5e5c8ea733b736895576d0fe329bb883b937db6f4d163dbaaf4" +dependencies = [ + "libc", + "quinn-proto 0.9.6", + "socket2 0.4.10", + "tracing", + "windows-sys 0.42.0", +] + +[[package]] +name = "quinn-udp" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" +dependencies = [ + "bytes", + "libc", + "socket2 0.5.7", + "tracing", + "windows-sys 0.48.0", +] + [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -6188,7 +6724,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -6198,16 +6734,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -6216,7 +6743,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom", ] [[package]] @@ -6235,16 +6762,16 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] name = "raw-cpuid" -version = "11.0.2" +version = "11.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e29830cbb1290e404f24c73af91c5d8d631ce7e128691e9477556b540cd01ecd" +checksum = "cb9ee317cfe3fbd54b36a511efc1edd42e216903c9cd575e686dd68a2ba90d8d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -6296,29 +6823,20 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom", "libredox", "thiserror", ] @@ -6340,7 +6858,7 @@ checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -6363,16 +6881,16 @@ checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", - "rustc-hash", + "rustc-hash 1.1.0", "slice-group-by", "smallvec", ] [[package]] name = "regex" -version = "1.10.5" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -6429,13 +6947,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac 0.12.1", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] name = "ring" version = "0.1.0" -source = "git+https://github.com/w3f/ring-proof#665f5f51af5734c7b6d90b985dd6861d4c5b4752" +source = "git+https://github.com/w3f/ring-proof?rev=665f5f5#665f5f51af5734c7b6d90b985dd6861d4c5b4752" dependencies = [ "ark-ec", "ark-ff", @@ -6472,7 +6990,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom", "libc", "spin 0.9.8", "untrusted 0.9.0", @@ -6553,6 +7071,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc-hex" version = "2.1.0" @@ -6570,9 +7094,9 @@ dependencies = [ [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver 1.0.23", ] @@ -6602,11 +7126,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys 0.4.14", @@ -6619,7 +7143,6 @@ version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ - "log", "ring 0.16.20", "sct", "webpki", @@ -6676,9 +7199,9 @@ checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "rw-stream-sink" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26338f5e09bb721b85b135ea05af7767c90b52f6de4f087d4f4a3a9d64e7dc04" +checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" dependencies = [ "futures", "pin-project", @@ -6721,18 +7244,18 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "log", "sp-core", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", ] [[package]] name = "sc-basic-authorship" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "futures", "futures-timer", @@ -6754,7 +7277,7 @@ dependencies = [ [[package]] name = "sc-block-builder" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "sp-api", @@ -6768,13 +7291,13 @@ dependencies = [ [[package]] name = "sc-chain-spec" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "28.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "docify", "log", - "memmap2 0.9.4", + "memmap2 0.9.5", "parity-scale-codec", "sc-chain-spec-derive", "sc-client-api", @@ -6790,30 +7313,31 @@ dependencies = [ "sp-io", "sp-runtime", "sp-state-machine", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sc-cli" version = "0.36.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "chrono", "clap", "fdlimit", "futures", - "itertools 0.10.5", + "itertools 0.11.0", "libp2p-identity", "log", "names", @@ -6847,7 +7371,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "fnv", "futures", @@ -6862,11 +7386,11 @@ dependencies = [ "sp-consensus", "sp-core", "sp-database", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-runtime", "sp-state-machine", "sp-statement-store", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-trie", "substrate-prometheus-endpoint", ] @@ -6874,7 +7398,7 @@ dependencies = [ [[package]] name = "sc-client-db" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "hash-db", "kvdb", @@ -6900,16 +7424,15 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", - "futures-timer", - "libp2p-identity", "log", - "mockall", + "mockall 0.11.4", "parking_lot 0.12.3", "sc-client-api", + "sc-network-types", "sc-utils", "serde", "sp-api", @@ -6925,7 +7448,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", @@ -6954,10 +7477,10 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa" version = "0.19.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "ahash 0.8.11", - "array-bytes 6.2.3", + "array-bytes", "async-trait", "dyn-clone", "finality-grandpa", @@ -6976,6 +7499,7 @@ dependencies = [ "sc-network-common", "sc-network-gossip", "sc-network-sync", + "sc-network-types", "sc-telemetry", "sc-transaction-pool-api", "sc-utils", @@ -6997,7 +7521,7 @@ dependencies = [ [[package]] name = "sc-consensus-grandpa-rpc" version = "0.19.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "finality-grandpa", "futures", @@ -7017,7 +7541,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", @@ -7040,7 +7564,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", @@ -7050,25 +7574,25 @@ dependencies = [ "schnellru", "sp-api", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-io", "sp-panic-handler", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-trie", "sp-version", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "tracing", ] [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "polkavm", "sc-allocator", "sp-maybe-compressed-blob", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", "wasm-instrument", ] @@ -7076,18 +7600,18 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "log", "polkavm", "sc-executor-common", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "anyhow", "cfg-if", @@ -7097,17 +7621,17 @@ dependencies = [ "rustix 0.36.17", "sc-allocator", "sc-executor-common", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "wasmtime", ] [[package]] name = "sc-informant" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "ansi_term", + "console", "futures", "futures-timer", "log", @@ -7122,9 +7646,9 @@ dependencies = [ [[package]] name = "sc-keystore" version = "25.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "parking_lot 0.12.3", "serde_json", "sp-application-crypto", @@ -7136,22 +7660,22 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 4.2.0", + "array-bytes", "arrayvec", "blake2 0.10.6", "bytes", "futures", "futures-timer", - "libp2p-identity", "log", "mixnet", - "multiaddr", + "multiaddr 0.18.1", "parity-scale-codec", "parking_lot 0.12.3", "sc-client-api", "sc-network", + "sc-network-types", "sc-transaction-pool-api", "sp-api", "sp-consensus", @@ -7165,13 +7689,14 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "async-trait", "asynchronous-codec", "bytes", + "cid 0.9.0", "either", "fnv", "futures", @@ -7179,16 +7704,22 @@ dependencies = [ "ip_network", "libp2p", "linked_hash_set", + "litep2p", "log", - "mockall", + "mockall 0.11.4", + "once_cell", "parity-scale-codec", "parking_lot 0.12.3", "partial_sort", "pin-project", + "prost 0.12.6", + "prost-build 0.12.6", "rand", "sc-client-api", "sc-network-common", + "sc-network-types", "sc-utils", + "schnellru", "serde", "serde_json", "smallvec", @@ -7200,43 +7731,25 @@ dependencies = [ "thiserror", "tokio", "tokio-stream", - "unsigned-varint", + "unsigned-varint 0.7.2", + "void", "wasm-timer", "zeroize", ] -[[package]] -name = "sc-network-bitswap" -version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" -dependencies = [ - "async-channel", - "cid", - "futures", - "libp2p-identity", - "log", - "prost 0.12.6", - "prost-build", - "sc-client-api", - "sc-network", - "sp-blockchain", - "sp-runtime", - "thiserror", - "unsigned-varint", -] - [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "bitflags 1.3.2", "futures", "libp2p-identity", "parity-scale-codec", - "prost-build", + "prost-build 0.12.6", "sc-consensus", + "sc-network-types", "sp-consensus", "sp-consensus-grandpa", "sp-runtime", @@ -7245,16 +7758,16 @@ dependencies = [ [[package]] name = "sc-network-gossip" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "ahash 0.8.11", "futures", "futures-timer", - "libp2p", "log", "sc-network", "sc-network-common", "sc-network-sync", + "sc-network-types", "schnellru", "sp-runtime", "substrate-prometheus-endpoint", @@ -7264,18 +7777,18 @@ dependencies = [ [[package]] name = "sc-network-light" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "futures", - "libp2p-identity", "log", "parity-scale-codec", "prost 0.12.6", - "prost-build", + "prost-build 0.12.6", "sc-client-api", "sc-network", + "sc-network-types", "sp-blockchain", "sp-core", "sp-runtime", @@ -7285,9 +7798,9 @@ dependencies = [ [[package]] name = "sc-network-sync" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "async-channel", "async-trait", "fork-tree", @@ -7295,14 +7808,15 @@ dependencies = [ "futures-timer", "libp2p", "log", - "mockall", + "mockall 0.11.4", "parity-scale-codec", "prost 0.12.6", - "prost-build", + "prost-build 0.12.6", "sc-client-api", "sc-consensus", "sc-network", "sc-network-common", + "sc-network-types", "sc-utils", "schnellru", "smallvec", @@ -7321,35 +7835,51 @@ dependencies = [ [[package]] name = "sc-network-transactions" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "futures", - "libp2p", "log", "parity-scale-codec", "sc-network", "sc-network-common", "sc-network-sync", + "sc-network-types", "sc-utils", "sp-consensus", "sp-runtime", "substrate-prometheus-endpoint", ] +[[package]] +name = "sc-network-types" +version = "0.10.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" +dependencies = [ + "bs58 0.5.1", + "ed25519-dalek", + "libp2p-identity", + "litep2p", + "log", + "multiaddr 0.18.1", + "multihash 0.19.1", + "rand", + "thiserror", + "zeroize", +] + [[package]] name = "sc-offchain" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "bytes", "fnv", "futures", "futures-timer", - "hyper", + "hyper 0.14.30", "hyper-rustls", - "libp2p", "log", "num_cpus", "once_cell", @@ -7359,11 +7889,12 @@ dependencies = [ "sc-client-api", "sc-network", "sc-network-common", + "sc-network-types", "sc-transaction-pool-api", "sc-utils", "sp-api", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-keystore", "sp-offchain", "sp-runtime", @@ -7374,7 +7905,7 @@ dependencies = [ [[package]] name = "sc-proposer-metrics" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "log", "substrate-prometheus-endpoint", @@ -7383,7 +7914,7 @@ dependencies = [ [[package]] name = "sc-rpc" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "futures", "jsonrpsee", @@ -7415,7 +7946,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -7435,14 +7966,20 @@ dependencies = [ [[package]] name = "sc-rpc-server" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ + "dyn-clone", + "forwarded-header-value", "futures", "governor", - "http", - "hyper", + "http 1.1.0", + "http-body-util", + "hyper 1.4.1", + "ip_network", "jsonrpsee", "log", + "sc-rpc-api", + "serde", "serde_json", "substrate-prometheus-endpoint", "tokio", @@ -7453,9 +7990,9 @@ dependencies = [ [[package]] name = "sc-rpc-spec-v2" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "futures", "futures-util", "hex", @@ -7469,6 +8006,7 @@ dependencies = [ "sc-rpc", "sc-transaction-pool-api", "sc-utils", + "schnellru", "serde", "sp-api", "sp-blockchain", @@ -7484,7 +8022,7 @@ dependencies = [ [[package]] name = "sc-service" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "directories", @@ -7505,11 +8043,11 @@ dependencies = [ "sc-informant", "sc-keystore", "sc-network", - "sc-network-bitswap", "sc-network-common", "sc-network-light", "sc-network-sync", "sc-network-transactions", + "sc-network-types", "sc-rpc", "sc-rpc-server", "sc-rpc-spec-v2", @@ -7526,12 +8064,12 @@ dependencies = [ "sp-blockchain", "sp-consensus", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-keystore", "sp-runtime", "sp-session", "sp-state-machine", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-transaction-pool", "sp-transaction-storage-proof", "sp-trie", @@ -7548,7 +8086,7 @@ dependencies = [ [[package]] name = "sc-state-db" version = "0.30.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "log", "parity-scale-codec", @@ -7559,7 +8097,7 @@ dependencies = [ [[package]] name = "sc-sysinfo" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "derive_more", "futures", @@ -7574,13 +8112,13 @@ dependencies = [ "sp-core", "sp-crypto-hashing", "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "chrono", "futures", @@ -7589,6 +8127,7 @@ dependencies = [ "parking_lot 0.12.3", "pin-project", "rand", + "sc-network", "sc-utils", "serde", "serde_json", @@ -7599,18 +8138,17 @@ dependencies = [ [[package]] name = "sc-tracing" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "ansi_term", "chrono", + "console", "is-terminal", "lazy_static", "libc", "log", "parity-scale-codec", "parking_lot 0.12.3", - "regex", - "rustc-hash", + "rustc-hash 1.1.0", "sc-client-api", "sc-tracing-proc-macro", "serde", @@ -7619,28 +8157,28 @@ dependencies = [ "sp-core", "sp-rpc", "sp-runtime", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", "tracing", - "tracing-log 0.1.4", - "tracing-subscriber 0.2.25", + "tracing-log", + "tracing-subscriber", ] [[package]] name = "sc-tracing-proc-macro" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sc-transaction-pool" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", @@ -7658,7 +8196,7 @@ dependencies = [ "sp-core", "sp-crypto-hashing", "sp-runtime", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-transaction-pool", "substrate-prometheus-endpoint", "thiserror", @@ -7667,7 +8205,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", @@ -7683,7 +8221,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-channel", "futures", @@ -7738,7 +8276,7 @@ version = "2.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" dependencies = [ - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", "syn 1.0.109", @@ -7752,11 +8290,11 @@ checksum = "f0cded6518aa0bd6c1be2b88ac81bf7044992f0f154bfbabd5ad34f43512abcb" [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7779,13 +8317,13 @@ dependencies = [ "aead", "arrayref", "arrayvec", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "getrandom_or_panic", "merlin", - "rand_core 0.6.4", + "rand_core", "serde_bytes", "sha2 0.10.8", - "subtle 2.6.0", + "subtle 2.6.1", "zeroize", ] @@ -7811,6 +8349,21 @@ dependencies = [ "untrusted 0.9.0", ] +[[package]] +name = "sctp-proto" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6220f78bb44c15f326b0596113305f6101097a18755d53727a575c97e09fb24" +dependencies = [ + "bytes", + "crc", + "fxhash", + "log", + "rand", + "slab", + "thiserror", +] + [[package]] name = "sec1" version = "0.7.3" @@ -7822,7 +8375,7 @@ dependencies = [ "generic-array 0.14.7", "pkcs8", "serdect", - "subtle 2.6.0", + "subtle 2.6.1", "zeroize", ] @@ -7855,11 +8408,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -7868,9 +8421,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -7909,11 +8462,17 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" -version = "1.0.204" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] @@ -7929,40 +8488,41 @@ dependencies = [ [[package]] name = "serde_bytes" -version = "0.11.14" +version = "0.11.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -7992,7 +8552,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -8007,15 +8567,34 @@ dependencies = [ [[package]] name = "sha-1" -version = "0.9.8" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", + "sha1-asm", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1-asm" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +checksum = "286acebaf8b67c1130aedffad26f594eff0c1292389158135327d2e23aed582b" dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.1", + "cc", ] [[package]] @@ -8083,7 +8662,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -8099,6 +8678,15 @@ dependencies = [ "wide", ] +[[package]] +name = "simple-dns" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cae9a3fcdadafb6d97f4c0e007e4247b114ee0f119f650c3cbf3a8b3a1479694" +dependencies = [ + "bitflags 2.6.0", +] + [[package]] name = "simple-mermaid" version = "0.1.1" @@ -8147,12 +8735,12 @@ dependencies = [ "aes-gcm", "blake2 0.10.6", "chacha20poly1305", - "curve25519-dalek 4.1.3", - "rand_core 0.6.4", + "curve25519-dalek", + "rand_core", "ring 0.17.8", - "rustc_version 0.4.0", + "rustc_version 0.4.1", "sha2 0.10.8", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -8177,38 +8765,37 @@ dependencies = [ [[package]] name = "soketto" -version = "0.7.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" dependencies = [ - "base64 0.13.1", + "base64 0.22.1", "bytes", - "flate2", "futures", - "http", + "http 1.1.0", "httparse", "log", "rand", - "sha-1", + "sha1", ] [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ + "docify", "hash-db", "log", "parity-scale-codec", "scale-info", "sp-api-proc-macro", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-metadata-ir", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "sp-trie", "sp-version", "thiserror", @@ -8217,34 +8804,33 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "Inflector", "blake2 0.10.6", "expander", - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "scale-info", "serde", "sp-core", "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", ] [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "integer-sqrt", @@ -8252,7 +8838,6 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", "static_assertions", ] @@ -8277,7 +8862,7 @@ dependencies = [ [[package]] name = "sp-block-builder" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "sp-api", "sp-inherents", @@ -8287,25 +8872,26 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "futures", - "log", "parity-scale-codec", "parking_lot 0.12.3", "schnellru", "sp-api", "sp-consensus", + "sp-core", "sp-database", "sp-runtime", "sp-state-machine", "thiserror", + "tracing", ] [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "futures", @@ -8320,7 +8906,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "parity-scale-codec", @@ -8336,7 +8922,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "finality-grandpa", "log", @@ -8353,7 +8939,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "scale-info", @@ -8364,9 +8950,9 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "bandersnatch_vrfs", "bitflags 1.3.2", "blake2 0.10.6", @@ -8378,7 +8964,7 @@ dependencies = [ "hash-db", "hash256-std-hasher", "impl-serde", - "itertools 0.10.5", + "itertools 0.11.0", "k256", "libsecp256k1", "log", @@ -8395,11 +8981,11 @@ dependencies = [ "secrecy", "serde", "sp-crypto-hashing", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "ss58-registry", "substrate-bip39", "thiserror", @@ -8411,7 +8997,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8430,8 +9016,8 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "blake2b_simd", "byteorder", @@ -8443,18 +9029,18 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "quote", "sp-crypto-hashing", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "kvdb", "parking_lot 0.12.3", @@ -8463,37 +9049,37 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "environmental", "parity-scale-codec", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "environmental", "parity-scale-codec", @@ -8502,9 +9088,11 @@ dependencies = [ [[package]] name = "sp-genesis-builder" -version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +version = "0.8.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ + "parity-scale-codec", + "scale-info", "serde_json", "sp-api", "sp-runtime", @@ -8513,7 +9101,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8526,9 +9114,10 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "bytes", + "docify", "ed25519-dalek", "libsecp256k1", "log", @@ -8538,12 +9127,11 @@ dependencies = [ "secp256k1", "sp-core", "sp-crypto-hashing", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-keystore", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-trie", "tracing", "tracing-core", @@ -8552,28 +9140,28 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "sp-core", "sp-runtime", - "strum 0.26.2", + "strum 0.26.3", ] [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "parking_lot 0.12.3", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8582,7 +9170,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "frame-metadata", "parity-scale-codec", @@ -8592,7 +9180,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "scale-info", @@ -8603,7 +9191,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "sp-api", "sp-core", @@ -8613,7 +9201,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "backtrace", "lazy_static", @@ -8623,9 +9211,9 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "rustc-hash", + "rustc-hash 1.1.0", "serde", "sp-core", ] @@ -8633,13 +9221,14 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "docify", "either", "hash256-std-hasher", "impl-trait-for-tuples", "log", + "num-traits", "parity-scale-codec", "paste", "rand", @@ -8650,33 +9239,34 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-io", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-weights", + "tracing", ] [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "bytes", "impl-trait-for-tuples", "parity-scale-codec", "polkavm-derive", "primitive-types", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-runtime-interface-proc-macro 17.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", - "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-runtime-interface-proc-macro 17.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", + "sp-wasm-interface 20.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "static_assertions", ] [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8695,33 +9285,33 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "Inflector", "expander", - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "Inflector", "expander", - "proc-macro-crate 3.1.0", + "proc-macro-crate 3.2.0", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "scale-info", @@ -8735,7 +9325,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8748,7 +9338,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "hash-db", "log", @@ -8757,7 +9347,7 @@ dependencies = [ "rand", "smallvec", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-panic-handler", "sp-trie", "thiserror", @@ -8768,10 +9358,10 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "aes-gcm", - "curve25519-dalek 4.1.3", + "curve25519-dalek", "ed25519-dalek", "hkdf", "parity-scale-codec", @@ -8782,39 +9372,39 @@ dependencies = [ "sp-application-crypto", "sp-core", "sp-crypto-hashing", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-runtime", - "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", - "x25519-dalek 2.0.1", + "x25519-dalek", ] [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8826,7 +9416,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "parity-scale-codec", @@ -8838,29 +9428,29 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "tracing", "tracing-core", - "tracing-subscriber 0.2.25", + "tracing-subscriber", ] [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ "parity-scale-codec", "tracing", "tracing-core", - "tracing-subscriber 0.3.18", + "tracing-subscriber", ] [[package]] name = "sp-transaction-pool" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "sp-api", "sp-runtime", @@ -8869,7 +9459,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "async-trait", "parity-scale-codec", @@ -8883,7 +9473,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "ahash 0.8.11", "hash-db", @@ -8896,7 +9486,7 @@ dependencies = [ "scale-info", "schnellru", "sp-core", - "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "thiserror", "tracing", "trie-db", @@ -8906,7 +9496,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8915,7 +9505,7 @@ dependencies = [ "serde", "sp-crypto-hashing-proc-macro", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-version-proc-macro", "thiserror", ] @@ -8923,18 +9513,18 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "parity-scale-codec", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -8946,8 +9536,9 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#6a5b6e03bfc8d0c6f5f05f3180313902c15aee84" +source = "git+https://github.com/paritytech/polkadot-sdk#310ef5ce1086affdc522c4d1736211de2a7dd99e" dependencies = [ + "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", @@ -8956,7 +9547,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -8964,7 +9555,7 @@ dependencies = [ "serde", "smallvec", "sp-arithmetic", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", ] [[package]] @@ -9000,9 +9591,9 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.47.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4743ce898933fbff7bbf414f497c459a782d496269644b3d650a398ae6a487ba" +checksum = "43fce22ed1df64d04b262351c8f9d5c6da4f76f79f25ad15529792f893fad25d" dependencies = [ "Inflector", "num-format", @@ -9053,6 +9644,26 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "str0m" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6706347e49b13373f7ddfafad47df7583ed52083d6fc8a594eb2c80497ef959d" +dependencies = [ + "combine", + "crc", + "fastrand", + "hmac 0.12.1", + "once_cell", + "openssl", + "openssl-sys", + "sctp-proto", + "serde", + "sha-1", + "thiserror", + "tracing", +] + [[package]] name = "strsim" version = "0.11.1" @@ -9067,9 +9678,9 @@ checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" [[package]] name = "strum" -version = "0.26.2" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros 0.26.4", ] @@ -9097,13 +9708,13 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9115,7 +9726,7 @@ dependencies = [ [[package]] name = "substrate-build-script-utils" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" [[package]] name = "substrate-fixed" @@ -9131,8 +9742,9 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ + "docify", "frame-system-rpc-runtime-api", "futures", "jsonrpsee", @@ -9150,9 +9762,11 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "hyper", + "http-body-util", + "hyper 1.4.1", + "hyper-util", "log", "prometheus", "thiserror", @@ -9162,14 +9776,15 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3#8d2f55dfe06bae13e9f47ccf587acfd3fb9cd923" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ - "array-bytes 6.2.3", + "array-bytes", "build-helper", "cargo_metadata", "console", "filetime", "frame-metadata", + "jobserver", "merkleized-metadata", "parity-scale-codec", "parity-wasm", @@ -9178,11 +9793,11 @@ dependencies = [ "sp-core", "sp-io", "sp-maybe-compressed-blob", - "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.10.0-rc3)", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1)", "sp-version", - "strum 0.26.2", + "strum 0.26.3", "tempfile", - "toml 0.8.14", + "toml 0.8.19", "walkdir", "wasm-opt", ] @@ -9197,7 +9812,7 @@ dependencies = [ "quote", "rayon", "subtensor-linting", - "syn 2.0.71", + "syn 2.0.77", "walkdir", ] @@ -9232,7 +9847,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -9242,7 +9857,7 @@ dependencies = [ "ahash 0.8.11", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -9252,7 +9867,7 @@ dependencies = [ "anyhow", "clap", "semver 1.0.23", - "toml_edit 0.22.14", + "toml_edit", ] [[package]] @@ -9263,9 +9878,9 @@ checksum = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee" [[package]] name = "subtle" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d0208408ba0c3df17ed26eb06992cb1a1268d41b2c0e12e65203fbe3972cee5" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -9280,9 +9895,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.71" +version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", @@ -9301,6 +9916,17 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.77", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -9330,20 +9956,21 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.14" +version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand", - "rustix 0.38.34", - "windows-sys 0.52.0", + "once_cell", + "rustix 0.38.37", + "windows-sys 0.59.0", ] [[package]] @@ -9361,7 +9988,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ - "rustix 0.38.34", + "rustix 0.38.37", "windows-sys 0.48.0", ] @@ -9373,22 +10000,22 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -9468,9 +10095,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -9483,32 +10110,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.38.0" +version = "1.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot 0.12.3", - "pin-project-lite 0.2.14", + "pin-project-lite", "signal-hook-registry", "socket2 0.5.7", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -9523,27 +10149,42 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", - "pin-project-lite 0.2.14", + "pin-project-lite", "tokio", "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +dependencies = [ + "futures-util", + "log", + "rustls 0.21.12", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "tungstenite", +] + [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", "futures-io", "futures-sink", - "pin-project-lite 0.2.14", + "pin-project-lite", "tokio", ] @@ -9558,47 +10199,36 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.14", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap 2.2.6", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.14" +version = "0.22.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +checksum = "3b072cee73c449a636ffd6f32bd8de3a9f7119139aff882f44943ce2986dc5cf" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.5.0", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.13", + "winnow", ] [[package]] @@ -9610,7 +10240,8 @@ dependencies = [ "futures-core", "futures-util", "pin-project", - "pin-project-lite 0.2.14", + "pin-project-lite", + "tokio", "tower-layer", "tower-service", "tracing", @@ -9618,33 +10249,31 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.4" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "pin-project-lite 0.2.14", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "pin-project-lite", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -9653,7 +10282,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", - "pin-project-lite 0.2.14", + "pin-project-lite", "tracing-attributes", "tracing-core", ] @@ -9666,7 +10295,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -9689,17 +10318,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -9711,65 +10329,33 @@ dependencies = [ "tracing-core", ] -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "ansi_term", - "chrono", - "lazy_static", - "matchers 0.0.1", - "parking_lot 0.11.2", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log 0.1.4", - "tracing-serde", -] - [[package]] name = "tracing-subscriber" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ - "matchers 0.1.0", + "matchers", "nu-ansi-term", "once_cell", + "parking_lot 0.12.3", "regex", "sharded-slab", "smallvec", "thread_local", + "time", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", ] [[package]] name = "trie-db" -version = "0.28.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff28e0f815c2fea41ebddf148e008b077d2faddb026c9555b29696114d602642" +checksum = "0c992b4f40c234a074d48a757efeabb1a6be88af84c0c23f7ca158950cb0ae7f" dependencies = [ "hash-db", - "hashbrown 0.13.2", "log", "rustc-hex", "smallvec", @@ -9793,7 +10379,7 @@ dependencies = [ "async-trait", "cfg-if", "data-encoding", - "enum-as-inner", + "enum-as-inner 0.5.1", "futures-channel", "futures-io", "futures-util", @@ -9810,24 +10396,50 @@ dependencies = [ "url", ] +[[package]] +name = "trust-dns-proto" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.6.1", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + [[package]] name = "trust-dns-resolver" -version = "0.22.0" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aff21aa4dcefb0a1afbfac26deb0adc93888c7d295fb63ab273ef276ba2b7cfe" +checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" dependencies = [ "cfg-if", "futures-util", "ipconfig", - "lazy_static", "lru-cache", + "once_cell", "parking_lot 0.12.3", + "rand", "resolv-conf", "smallvec", "thiserror", "tokio", "tracing", - "trust-dns-proto", + "trust-dns-proto 0.23.2", ] [[package]] @@ -9842,6 +10454,26 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f195fd851901624eee5a58c4bb2b4f06399148fcd0ed336e6f1cb60a9881df" +[[package]] +name = "tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 0.2.12", + "httparse", + "log", + "rand", + "rustls 0.21.12", + "sha1", + "thiserror", + "url", + "utf-8", +] + [[package]] name = "twox-hash" version = "1.6.3" @@ -9895,9 +10527,9 @@ checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" @@ -9916,9 +10548,9 @@ checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" [[package]] name = "unicode-xid" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "229730647fbc343e3a80e463c1db7f78f3855d3f3739bee0dda773c9a037c90a" [[package]] name = "universal-hash" @@ -9927,7 +10559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" dependencies = [ "crypto-common", - "subtle 2.6.0", + "subtle 2.6.1", ] [[package]] @@ -9942,6 +10574,16 @@ dependencies = [ "futures-util", ] +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" +dependencies = [ + "bytes", + "tokio-util", +] + [[package]] name = "untrusted" version = "0.7.1" @@ -9965,6 +10607,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8parse" version = "0.2.2" @@ -9985,9 +10633,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "void" @@ -10012,7 +10660,7 @@ dependencies = [ "digest 0.10.7", "rand", "rand_chacha", - "rand_core 0.6.4", + "rand_core", "sha2 0.10.8", "sha3", "thiserror", @@ -10038,12 +10686,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -10052,34 +10694,35 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" dependencies = [ "cfg-if", "js-sys", @@ -10089,9 +10732,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10099,22 +10742,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "wasm-instrument" @@ -10387,9 +11030,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" dependencies = [ "js-sys", "wasm-bindgen", @@ -10407,12 +11050,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.6" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "which" @@ -10423,14 +11063,14 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.34", + "rustix 0.38.37", ] [[package]] name = "wide" -version = "0.7.24" +version = "0.7.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a040b111774ab63a19ef46bbc149398ab372b4ccdcfd719e9814dbd7dfd76c8" +checksum = "b828f995bf1e9622031f8009f8481a85406ce1f4d4588ff746d872043e855690" dependencies = [ "bytemuck", "safe_arch", @@ -10460,11 +11100,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -10498,7 +11138,22 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -10525,7 +11180,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -10560,18 +11224,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -10588,9 +11252,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -10606,9 +11270,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -10624,15 +11288,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -10648,9 +11312,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -10666,9 +11330,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -10684,9 +11348,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -10702,24 +11366,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" - -[[package]] -name = "winnow" -version = "0.5.40" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.13" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -10745,55 +11400,76 @@ dependencies = [ [[package]] name = "x25519-dalek" -version = "1.1.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ - "curve25519-dalek 3.2.0", - "rand_core 0.5.1", + "curve25519-dalek", + "rand_core", + "serde", "zeroize", ] [[package]] -name = "x25519-dalek" -version = "2.0.1" +name = "x509-parser" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da" dependencies = [ - "curve25519-dalek 4.1.3", - "rand_core 0.6.4", - "serde", - "zeroize", + "asn1-rs 0.5.2", + "data-encoding", + "der-parser 8.2.0", + "lazy_static", + "nom", + "oid-registry 0.6.1", + "rusticata-macros", + "thiserror", + "time", ] [[package]] name = "x509-parser" -version = "0.14.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ecbeb7b67ce215e40e3cc7f2ff902f94a223acf44995934763467e7b1febc8" +checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" dependencies = [ - "asn1-rs", - "base64 0.13.1", + "asn1-rs 0.6.2", "data-encoding", - "der-parser", + "der-parser 9.0.0", "lazy_static", "nom", - "oid-registry", + "oid-registry 0.7.1", "rusticata-macros", "thiserror", "time", ] +[[package]] +name = "xml-rs" +version = "0.8.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af4e2e2f7cba5a093896c1e150fbfe177d1883e7448200efb81d40b9d339ef26" + +[[package]] +name = "xmltree" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" +dependencies = [ + "xml-rs", +] + [[package]] name = "yamux" -version = "0.10.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d9ba232399af1783a58d8eb26f6b5006fbefe2dc9ef36bd283324792d03ea5" +checksum = "9ed0164ae619f2dc144909a9f082187ebb5893693d8c0196e8085283ccd4b776" dependencies = [ "futures", "log", "nohash-hasher", "parking_lot 0.12.3", + "pin-project", "rand", "static_assertions", ] @@ -10809,22 +11485,23 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -10844,7 +11521,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.71", + "syn 2.0.77", ] [[package]] @@ -10887,9 +11564,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.11+zstd.1.5.6" +version = "2.0.13+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75652c55c0b6f3e6f12eb786fe1bc960396bf05a1eb3bf1f3691c3610ac2e6d4" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 4f162ca6b..981c50ad3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ enumflags2 = "0.7.9" futures = "0.3.30" hex = { version = "0.4", default-features = false } hex-literal = "0.4.1" -jsonrpsee = { version = "0.22.5", default-features = false } +jsonrpsee = { version = "0.24.4", default-features = false } log = { version = "0.4.21", default-features = false } memmap2 = "0.9.4" ndarray = { version = "0.15.6", default-features = false } @@ -78,81 +78,81 @@ walkdir = "2" subtensor-macros = { path = "support/macros" } -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-benchmarking-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -frame-executive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-metadata-hash-extension = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -frame-try-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-benchmarking-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +frame-executive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-metadata-hash-extension = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-support = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-system-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-system-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +frame-try-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } -pallet-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-insecure-randomness-collective-flip = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +pallet-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-insecure-randomness-collective-flip = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } -sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-client-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-grandpa-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-chain-spec-derive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-chain-spec = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-consensus-slots = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-executor = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-network = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-service = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-telemetry = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-client-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-consensus-grandpa-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-chain-spec-derive = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-chain-spec = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-consensus-slots = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-executor = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-network = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-service = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-telemetry = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-block-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-blockchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-genesis-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-inherents = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-session = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-storage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -sp-tracing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-version = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } -sp-weights = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3", default-features = false } +sp-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-block-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-blockchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-consensus = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sp-consensus-aura = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sp-genesis-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-core = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-inherents = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-io = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sp-offchain = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-session = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-std = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-storage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +sp-tracing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-transaction-pool = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-version = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } +sp-weights = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1", default-features = false } -substrate-build-script-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +substrate-build-script-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } substrate-fixed = { git = "https://github.com/opentensor/substrate-fixed.git", tag = "v0.5.9" } -substrate-frame-rpc-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } -substrate-wasm-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.10.0-rc3" } +substrate-frame-rpc-system = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } +substrate-wasm-builder = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "v1.16.0-rc1" } frame-metadata = "16" [profile.release] diff --git a/node/src/chain_spec/mod.rs b/node/src/chain_spec/mod.rs index bf34901ae..cf9ce274e 100644 --- a/node/src/chain_spec/mod.rs +++ b/node/src/chain_spec/mod.rs @@ -5,7 +5,7 @@ pub mod finney; pub mod localnet; pub mod testnet; -use node_subtensor_runtime::{AccountId, Block, RuntimeGenesisConfig, Signature, WASM_BINARY}; +use node_subtensor_runtime::{AccountId, Block, Signature, WASM_BINARY}; use sc_chain_spec_derive::ChainSpecExtension; use sc_service::ChainType; use sp_consensus_aura::sr25519::AuthorityId as AuraId; @@ -32,7 +32,7 @@ pub struct Extensions { } /// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type. -pub type ChainSpec = sc_service::GenericChainSpec; +pub type ChainSpec = sc_service::GenericChainSpec; /// Generate a crypto pair from seed. pub fn get_from_seed(seed: &str) -> ::Public { diff --git a/node/src/command.rs b/node/src/command.rs index 2423d1456..3c85937b0 100644 --- a/node/src/command.rs +++ b/node/src/command.rs @@ -12,12 +12,15 @@ pub use frame_benchmarking_cli::{BenchmarkCmd, ExtrinsicFactory, SUBSTRATE_REFER pub use node_subtensor_runtime::EXISTENTIAL_DEPOSIT; #[cfg(feature = "runtime-benchmarks")] pub use sp_keyring::Sr25519Keyring; -#[cfg(feature = "runtime-benchmarks")] -use sp_runtime::traits::HashingFor; use node_subtensor_runtime::Block; use sc_cli::SubstrateCli; -use sc_service::{Configuration, PartialComponents}; +use sc_service::{config::{ + ExecutorConfiguration, + RpcConfiguration + }, + Configuration, PartialComponents +}; impl SubstrateCli for Cli { fn impl_name() -> String { @@ -149,7 +152,9 @@ pub fn run() -> sc_cli::Result<()> { ); } - cmd.run::, service::ExecutorDispatch>(config) + cmd.run_with_spec::, ()>(Some( + config.chain_spec, + )) } BenchmarkCmd::Block(cmd) => { let PartialComponents { client, .. } = service::new_partial(&config)?; @@ -206,20 +211,30 @@ pub fn run() -> sc_cli::Result<()> { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::(&config)) } - None => { - let runner = cli.create_runner(&cli.run)?; - runner.run_node_until_exit(|config| async move { + None => { + let runner = cli.create_runner(&cli.run)?; + runner.run_node_until_exit(|config| async move { let config = override_default_heap_pages(config, 60_000); - service::new_full(config).map_err(sc_cli::Error::Service) - }) - } + match config.network.network_backend { + sc_network::config::NetworkBackendType::Libp2p => service::new_full::< + sc_network::NetworkWorker< + node_subtensor_runtime::opaque::Block, + ::Hash, + >, + >(config) + .map_err(sc_cli::Error::Service), + sc_network::config::NetworkBackendType::Litep2p => + service::new_full::(config) + .map_err(sc_cli::Error::Service), + } + }) + }, } } /// Override default heap pages fn override_default_heap_pages(config: Configuration, pages: u64) -> Configuration { Configuration { - default_heap_pages: Some(pages), impl_name: config.impl_name, impl_version: config.impl_version, role: config.role, @@ -232,20 +247,7 @@ fn override_default_heap_pages(config: Configuration, pages: u64) -> Configurati state_pruning: config.state_pruning, blocks_pruning: config.blocks_pruning, chain_spec: config.chain_spec, - wasm_method: config.wasm_method, wasm_runtime_overrides: config.wasm_runtime_overrides, - rpc_addr: config.rpc_addr, - rpc_max_connections: config.rpc_max_connections, - rpc_cors: config.rpc_cors, - rpc_methods: config.rpc_methods, - rpc_max_request_size: config.rpc_max_request_size, - rpc_max_response_size: config.rpc_max_response_size, - rpc_id_provider: config.rpc_id_provider, - rpc_max_subs_per_conn: config.rpc_max_subs_per_conn, - rpc_port: config.rpc_port, - rpc_message_buffer_capacity: config.rpc_message_buffer_capacity, - rpc_batch_config: config.rpc_batch_config, - rpc_rate_limit: config.rpc_rate_limit, prometheus_config: config.prometheus_config, telemetry_endpoints: config.telemetry_endpoints, offchain_worker: config.offchain_worker, @@ -254,11 +256,30 @@ fn override_default_heap_pages(config: Configuration, pages: u64) -> Configurati dev_key_seed: config.dev_key_seed, tracing_targets: config.tracing_targets, tracing_receiver: config.tracing_receiver, - max_runtime_instances: config.max_runtime_instances, announce_block: config.announce_block, data_path: config.data_path, base_path: config.base_path, - informant_output_format: config.informant_output_format, - runtime_cache_size: config.runtime_cache_size, + executor: ExecutorConfiguration { + default_heap_pages: Some(pages), + wasm_method: config.executor.wasm_method, + max_runtime_instances: config.executor.max_runtime_instances, + runtime_cache_size: config.executor.runtime_cache_size, + }, + rpc: RpcConfiguration { + addr: config.rpc.addr, + max_connections: config.rpc.max_connections, + cors: config.rpc.cors, + methods: config.rpc.methods, + max_request_size: config.rpc.max_request_size, + max_response_size: config.rpc.max_response_size, + id_provider: config.rpc.id_provider, + max_subs_per_conn: config.rpc.max_subs_per_conn, + port: config.rpc.port, + message_buffer_capacity: config.rpc.message_buffer_capacity, + batch_config: config.rpc.batch_config, + rate_limit: config.rpc.rate_limit, + rate_limit_whitelisted_ips: config.rpc.rate_limit_whitelisted_ips, + rate_limit_trust_proxy_headers: config.rpc.rate_limit_trust_proxy_headers, + }, } } diff --git a/node/src/rpc.rs b/node/src/rpc.rs index 54f82447f..7563bf834 100644 --- a/node/src/rpc.rs +++ b/node/src/rpc.rs @@ -15,8 +15,6 @@ use sp_api::ProvideRuntimeApi; use sp_block_builder::BlockBuilder; use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata}; -pub use sc_rpc_api::DenyUnsafe; - /// Dependencies for GRANDPA pub struct GrandpaDeps { /// Voting round info. @@ -37,8 +35,6 @@ pub struct FullDeps { pub client: Arc, /// Transaction pool instance. pub pool: Arc

, - /// Whether to deny unsafe calls - pub deny_unsafe: DenyUnsafe, /// Grandpa block import setup. pub grandpa: GrandpaDeps, /// Backend used by the node. @@ -72,7 +68,6 @@ where let FullDeps { client, pool, - deny_unsafe, grandpa, _backend: _, } = deps; @@ -80,7 +75,7 @@ where // Custom RPC methods for Paratensor module.merge(SubtensorCustom::new(client.clone()).into_rpc())?; - module.merge(System::new(client.clone(), pool.clone(), deny_unsafe).into_rpc())?; + module.merge(System::new(client.clone(), pool.clone()).into_rpc())?; module.merge(TransactionPayment::new(client).into_rpc())?; let GrandpaDeps { diff --git a/node/src/service.rs b/node/src/service.rs index 9a19ae354..253227430 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -6,9 +6,8 @@ use sc_client_api::{Backend, BlockBackend}; use sc_consensus_aura::{ImportQueueParams, SlotProportion, StartAuraParams}; use sc_consensus_grandpa::SharedVoterState; use sc_consensus_slots::BackoffAuthoringOnFinalizedHeadLagging; -use sc_executor::sp_wasm_interface::{Function, HostFunctionRegistry, HostFunctions}; -pub use sc_executor::NativeElseWasmExecutor; -use sc_service::{error::Error as ServiceError, Configuration, TaskManager, WarpSyncParams}; +pub use sc_executor::WasmExecutor; +use sc_service::{error::Error as ServiceError, Configuration, TaskManager, WarpSyncConfig}; use sc_telemetry::{Telemetry, TelemetryWorker}; use sc_transaction_pool_api::OffchainTransactionPoolFactory; use sp_consensus_aura::sr25519::AuthorityPair as AuraPair; @@ -18,42 +17,11 @@ use std::{sync::Arc, time::Duration}; /// imported and generated. const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; -// Our native executor instance. -pub struct ExecutorDispatch; - -// appeasing the compiler, this is a no-op -impl HostFunctions for ExecutorDispatch { - fn host_functions() -> Vec<&'static dyn Function> { - vec![] - } - - fn register_static(_registry: &mut T) -> core::result::Result<(), T::Error> - where - T: HostFunctionRegistry, - { - Ok(()) - } -} - -impl sc_executor::NativeExecutionDispatch for ExecutorDispatch { - // Only enable the benchmarking host functions when we actually want to benchmark. - #[cfg(feature = "runtime-benchmarks")] - type ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions; - // Otherwise we only use the default Substrate host functions. - #[cfg(not(feature = "runtime-benchmarks"))] - type ExtendHostFunctions = (); - - fn dispatch(method: &str, data: &[u8]) -> Option> { - node_subtensor_runtime::api::dispatch(method, data) - } - - fn native_version() -> sc_executor::NativeVersion { - node_subtensor_runtime::native_version() - } -} - -pub(crate) type FullClient = - sc_service::TFullClient>; +pub(crate) type FullClient = sc_service::TFullClient< + Block, + RuntimeApi, + WasmExecutor +>; type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; @@ -90,7 +58,7 @@ pub fn new_partial( }) .transpose()?; - let executor = sc_service::new_native_or_wasm_executor(config); + let executor = sc_service::new_wasm_executor::(&config.executor); let (client, backend, keystore_container, task_manager) = sc_service::new_full_parts::( @@ -163,7 +131,11 @@ pub fn new_partial( } // Builds a new service for a full client. -pub fn new_full(config: Configuration) -> Result { +pub fn new_full< + N: sc_network::NetworkBackend::Hash>, +>( + config: Configuration +) -> Result { let sc_service::PartialComponents { client, backend, @@ -175,7 +147,12 @@ pub fn new_full(config: Configuration) -> Result { other: (block_import, grandpa_link, mut telemetry), } = new_partial(&config)?; - let mut net_config = sc_network::config::FullNetworkConfiguration::new(&config.network); + let mut net_config = sc_network::config::FullNetworkConfiguration::< + Block, + ::Hash, + N, + >::new(&config.network, config.prometheus_registry().cloned()); + let metrics = N::register_notification_metrics(config.prometheus_registry()); let grandpa_protocol_name = sc_consensus_grandpa::protocol_standard_name( &client @@ -186,8 +163,13 @@ pub fn new_full(config: Configuration) -> Result { &config.chain_spec, ); - let (grandpa_protocol_config, grandpa_notification_service) = - sc_consensus_grandpa::grandpa_peers_set_config(grandpa_protocol_name.clone()); + let peer_store_handle = net_config.peer_store_handle(); + let (grandpa_protocol_config, grandpa_notification_service) = + sc_consensus_grandpa::grandpa_peers_set_config::<_, N>( + grandpa_protocol_name.clone(), + metrics.clone(), + peer_store_handle, + ); net_config.add_notification_protocol(grandpa_protocol_config); let warp_sync = Arc::new(sc_consensus_grandpa::warp_proof::NetworkProvider::new( @@ -205,8 +187,9 @@ pub fn new_full(config: Configuration) -> Result { spawn_handle: task_manager.spawn_handle(), import_queue, block_announce_validator_builder: None, - warp_sync_params: Some(WarpSyncParams::WithProvider(warp_sync)), - block_relay: None, + warp_sync_config: Some(WarpSyncConfig::WithProvider(warp_sync)), + block_relay: None, + metrics, })?; if config.offchain_worker.enabled { @@ -221,7 +204,7 @@ pub fn new_full(config: Configuration) -> Result { transaction_pool: Some(OffchainTransactionPoolFactory::new( transaction_pool.clone(), )), - network_provider: network.clone(), + network_provider: Arc::new(network.clone()), enable_http_requests: true, custom_extensions: |_| vec![], }) @@ -239,7 +222,7 @@ pub fn new_full(config: Configuration) -> Result { let shared_authority_set = grandpa_link.shared_authority_set().clone(); let shared_voter_state = SharedVoterState::empty(); - let role = config.role.clone(); + let role = config.role; let force_authoring = config.force_authoring; let backoff_authoring_blocks = Some(BackoffAuthoringOnFinalizedHeadLagging { unfinalized_slack: 6, @@ -254,11 +237,10 @@ pub fn new_full(config: Configuration) -> Result { let pool = transaction_pool.clone(); Box::new( - move |deny_unsafe, subscription_executor: sc_rpc::SubscriptionTaskExecutor| { + move |subscription_executor: sc_rpc::SubscriptionTaskExecutor| { let deps = crate::rpc::FullDeps { client: client.clone(), pool: pool.clone(), - deny_unsafe, grandpa: crate::rpc::GrandpaDeps { shared_voter_state: shared_voter_state.clone(), shared_authority_set: shared_authority_set.clone(), diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 270b0a069..118238a0f 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -15,12 +15,16 @@ use codec::{Decode, Encode, MaxEncodedLen}; use frame_support::traits::Imbalance; use frame_support::{ dispatch::DispatchResultWithPostInfo, - genesis_builder_helper::{build_config, create_default_config}, + genesis_builder_helper::{build_state, get_preset}, pallet_prelude::Get, - traits::{fungible::HoldConsideration, Contains, LinearStoragePrice, OnUnbalanced}, + traits::{ + fungible::{ + DecreaseIssuance, HoldConsideration, Imbalance as FungibleImbalance, IncreaseIssuance, + }, + Contains, LinearStoragePrice, OnUnbalanced + }, }; use frame_system::{EnsureNever, EnsureRoot, EnsureRootWithSuccess, RawOrigin}; -use pallet_balances::NegativeImbalance; use pallet_commitments::CanCommit; use pallet_grandpa::{ fg_primitives, AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList, @@ -37,7 +41,7 @@ use sp_runtime::{ AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, NumberFor, One, Verify, }, transaction_validity::{TransactionSource, TransactionValidity}, - ApplyExtrinsicResult, MultiSignature, + AccountId32, ApplyExtrinsicResult, MultiSignature, }; use sp_std::cmp::Ordering; use sp_std::prelude::*; @@ -64,7 +68,7 @@ pub use frame_support::{ pub use frame_system::Call as SystemCall; pub use pallet_balances::Call as BalancesCall; pub use pallet_timestamp::Call as TimestampCall; -use pallet_transaction_payment::{CurrencyAdapter, Multiplier}; +use pallet_transaction_payment::{FungibleAdapter, Multiplier}; #[cfg(any(feature = "std", test))] pub use sp_runtime::BuildStorage; pub use sp_runtime::{Perbill, Permill}; @@ -394,8 +398,22 @@ parameter_types! { /// Deduct the transaction fee from the Subtensor Pallet TotalIssuance when dropping the transaction /// fee. pub struct TransactionFeeHandler; -impl OnUnbalanced> for TransactionFeeHandler { - fn on_nonzero_unbalanced(credit: NegativeImbalance) { +impl + OnUnbalanced< + FungibleImbalance< + u64, + DecreaseIssuance>, + IncreaseIssuance>, + >, + > for TransactionFeeHandler +{ + fn on_nonzero_unbalanced( + credit: FungibleImbalance< + u64, + DecreaseIssuance>, + IncreaseIssuance>, + >, + ) { let ti_before = pallet_subtensor::TotalIssuance::::get(); pallet_subtensor::TotalIssuance::::put(ti_before.saturating_sub(credit.peek())); drop(credit); @@ -406,7 +424,7 @@ impl pallet_transaction_payment::Config for Runtime { type RuntimeEvent = RuntimeEvent; //type TransactionByteFee = TransactionByteFee; - type OnChargeTransaction = CurrencyAdapter; + type OnChargeTransaction = FungibleAdapter; // Convert dispatch weight to a chargeable fee. type WeightToFee = LinearWeightToFee; @@ -1107,11 +1125,12 @@ mod benches { [frame_benchmarking, BaselineBench::] [frame_system, SystemBench::] [pallet_balances, Balances] - [pallet_subtensor, SubtensorModule] [pallet_timestamp, Timestamp] + [pallet_sudo, Sudo] [pallet_registry, Registry] [pallet_commitments, Commitments] [pallet_admin_utils, AdminUtils] + [pallet_subtensor, SubtensorModule] ); } @@ -1165,15 +1184,19 @@ impl_runtime_apis! { } } - impl sp_genesis_builder::GenesisBuilder for Runtime { - fn create_default_config() -> Vec { - create_default_config::() - } + impl sp_genesis_builder::GenesisBuilder for Runtime { + fn build_state(config: Vec) -> sp_genesis_builder::Result { + build_state::(config) + } - fn build_config(config: Vec) -> sp_genesis_builder::Result { - build_config::(config) - } - } + fn get_preset(id: &Option) -> Option> { + get_preset::(id, |_| None) + } + + fn preset_names() -> Vec { + vec![] + } + } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { fn validate_transaction( diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 850a314d8..567514652 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -25,13 +25,13 @@ if [ "$fast_blocks" == "False" ]; then echo "fast_blocks is Off" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" - : "${FEATURES:="pow-faucet runtime-benchmarks"}" + : "${FEATURES:="pow-faucet"}" else # Block of code to execute if fast_blocks is not False echo "fast_blocks is On" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" - : "${FEATURES:="pow-faucet runtime-benchmarks fast-blocks"}" + : "${FEATURES:="pow-faucet fast-blocks"}" fi SPEC_PATH="${SCRIPT_DIR}/specs/" @@ -56,6 +56,10 @@ echo "*** Building chainspec..." "$BASE_DIR/target/release/node-subtensor" build-spec --disable-default-bootnode --raw --chain $CHAIN >$FULL_PATH echo "*** Chainspec built and output to file" +# generate node keys +$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/alice +$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/bob + if [ $NO_PURGE -eq 1 ]; then echo "*** Purging previous state skipped..." else From 060787a91785847705b38a901b817f05ce762e19 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 14:37:13 -0400 Subject: [PATCH 101/213] cargo fmt --- .../procedural-fork/src/no_bound/default.rs | 244 +++++++++--------- 1 file changed, 123 insertions(+), 121 deletions(-) diff --git a/support/procedural-fork/src/no_bound/default.rs b/support/procedural-fork/src/no_bound/default.rs index 1c0d90531..3f896da35 100644 --- a/support/procedural-fork/src/no_bound/default.rs +++ b/support/procedural-fork/src/no_bound/default.rs @@ -27,132 +27,134 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let impl_ = - match input.data { - Data::Struct(struct_) => match struct_.fields { - Fields::Named(named) => { - let fields = named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span() => - #ident: ::core::default::Default::default() - } - }); + let impl_ = match input.data { + Data::Struct(struct_) => match struct_.fields { + Fields::Named(named) => { + let fields = named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span() => + #ident: ::core::default::Default::default() + } + }); + + quote!(Self { #( #fields, )* }) + } + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(Self( #( #fields, )* )) + } + Fields::Unit => { + quote!(Self) + } + }, + Data::Enum(enum_) => { + if enum_.variants.is_empty() { + return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") + .to_compile_error() + .into(); + } - quote!(Self { #( #fields, )* }) - } - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() + // all #[default] attrs with the variant they're on; i.e. a var + let default_variants = enum_ + .variants + .into_iter() + .filter(|variant| { + variant + .attrs + .iter() + .any(|attr| attr.path().is_ident("default")) + }) + .collect::>(); + + match &*default_variants { + [] => return syn::Error::new( + name.clone().span(), + "no default declared, make a variant default by placing `#[default]` above it", + ) + .into_compile_error() + .into(), + // only one variant with the #[default] attribute set + [default_variant] => { + let variant_attrs = default_variant + .attrs + .iter() + .filter(|a| a.path().is_ident("default")) + .collect::>(); + + // check that there is only one #[default] attribute on the variant + if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { + let mut err = + syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); + + err.combine(syn::Error::new_spanned( + first_attr, + "`#[default]` used here", + )); + + err.extend([second_attr].into_iter().chain(additional_attrs).map( + |variant| { + syn::Error::new_spanned(variant, "`#[default]` used again here") + }, + )); + + return err.into_compile_error().into(); + } + + let variant_ident = &default_variant.ident; + + let fully_qualified_variant_path = quote!(Self::#variant_ident); + + match &default_variant.fields { + Fields::Named(named) => { + let fields = + named.named.iter().map(|field| &field.ident).map(|ident| { + quote_spanned! {ident.span()=> + #ident: ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path { #( #fields, )* }) } - }); - - quote!(Self( #( #fields, )* )) - } - Fields::Unit => { - quote!(Self) - } - }, - Data::Enum(enum_) => { - if enum_.variants.is_empty() { - return syn::Error::new_spanned(name, "cannot derive Default for an empty enum") - .to_compile_error() - .into(); + Fields::Unnamed(unnamed) => { + let fields = unnamed.unnamed.iter().map(|field| { + quote_spanned! {field.span()=> + ::core::default::Default::default() + } + }); + + quote!(#fully_qualified_variant_path( #( #fields, )* )) + } + Fields::Unit => fully_qualified_variant_path, + } } + [first, additional @ ..] => { + let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); - // all #[default] attrs with the variant they're on; i.e. a var - let default_variants = enum_ - .variants - .into_iter() - .filter(|variant| { - variant - .attrs - .iter() - .any(|attr| attr.path().is_ident("default")) - }) - .collect::>(); - - match &*default_variants { - [] => return syn::Error::new( - name.clone().span(), - "no default declared, make a variant default by placing `#[default]` above it", - ) - .into_compile_error() - .into(), - // only one variant with the #[default] attribute set - [default_variant] => { - let variant_attrs = default_variant - .attrs - .iter() - .filter(|a| a.path().is_ident("default")) - .collect::>(); - - // check that there is only one #[default] attribute on the variant - if let [first_attr, second_attr, additional_attrs @ ..] = &*variant_attrs { - let mut err = - syn::Error::new(Span::call_site(), "multiple `#[default]` attributes"); - - err.combine(syn::Error::new_spanned(first_attr, "`#[default]` used here")); - - err.extend([second_attr].into_iter().chain(additional_attrs).map( - |variant| { - syn::Error::new_spanned(variant, "`#[default]` used again here") - }, - )); - - return err.into_compile_error().into() - } - - let variant_ident = &default_variant.ident; - - let fully_qualified_variant_path = quote!(Self::#variant_ident); - - match &default_variant.fields { - Fields::Named(named) => { - let fields = - named.named.iter().map(|field| &field.ident).map(|ident| { - quote_spanned! {ident.span()=> - #ident: ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path { #( #fields, )* }) - }, - Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().map(|field| { - quote_spanned! {field.span()=> - ::core::default::Default::default() - } - }); - - quote!(#fully_qualified_variant_path( #( #fields, )* )) - }, - Fields::Unit => fully_qualified_variant_path, - } - }, - [first, additional @ ..] => { - let mut err = syn::Error::new(Span::call_site(), "multiple declared defaults"); - - err.combine(syn::Error::new_spanned(first, "first default")); - - err.extend( - additional - .into_iter() - .map(|variant| syn::Error::new_spanned(variant, "additional default")), - ); - - return err.into_compile_error().into() - }, - } - } - Data::Union(union_) => { - return syn::Error::new_spanned( - union_.union_token, - "Union type not supported by `derive(DefaultNoBound)`", - ) - .to_compile_error() - .into() + err.combine(syn::Error::new_spanned(first, "first default")); + + err.extend( + additional + .into_iter() + .map(|variant| syn::Error::new_spanned(variant, "additional default")), + ); + + return err.into_compile_error().into(); + } } - }; + } + Data::Union(union_) => { + return syn::Error::new_spanned( + union_.union_token, + "Union type not supported by `derive(DefaultNoBound)`", + ) + .to_compile_error() + .into() + } + }; quote!( const _: () = { From be0312cbf2f2106a1c10c9349508eb942f5b9bbf Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 19 Sep 2024 14:37:53 -0400 Subject: [PATCH 102/213] fix newline --- Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index a9788a222..a5b7f2f61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,8 @@ members = [ "runtime", "support/tools", "support/macros", - "support/linting", "support/procedural-fork", + "support/linting", + "support/procedural-fork", ] resolver = "2" From 3a02677c30f0a8351df62b12c2185f73dcdb0472 Mon Sep 17 00:00:00 2001 From: Liam Date: Fri, 20 Sep 2024 11:04:15 +0100 Subject: [PATCH 103/213] allow tiny ti delta --- pallets/subtensor/src/utils/try_state.rs | 16 +++++++++++++--- runtime/src/lib.rs | 7 +++++-- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/pallets/subtensor/src/utils/try_state.rs b/pallets/subtensor/src/utils/try_state.rs index 4763c0484..3c01a9b64 100644 --- a/pallets/subtensor/src/utils/try_state.rs +++ b/pallets/subtensor/src/utils/try_state.rs @@ -38,10 +38,20 @@ impl Pallet { .saturating_add(total_staked) .saturating_add(total_subnet_locked); - // Verify that the calculated total issuance matches the stored TotalIssuance + // Verify the diff between calculated TI and actual TI is less than delta + // + // These values can be off slightly due to float rounding errors. + // They are corrected every runtime upgrade. + const DELTA: u64 = 1000; + let diff = if TotalIssuance::::get() > expected_total_issuance { + TotalIssuance::::get().checked_sub(expected_total_issuance) + } else { + expected_total_issuance.checked_sub(TotalIssuance::::get()) + } + .expect("LHS > RHS"); ensure!( - TotalIssuance::::get() == expected_total_issuance, - "TotalIssuance accounting discrepancy", + diff <= DELTA, + "TotalIssuance diff greater than allowable delta", ); Ok(()) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 9622c6e2a..f50a2cd1b 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -1081,10 +1081,13 @@ pub type SignedExtra = ( frame_metadata_hash_extension::CheckMetadataHash, ); -type Migrations = +type Migrations = ( + // Leave this migration in the runtime, so every runtime upgrade tiny rounding errors (fractions of fractions + // of a cent) are cleaned up. These tiny rounding errors occur due to floating point coversion. pallet_subtensor::migrations::migrate_init_total_issuance::initialise_total_issuance::Migration< Runtime, - >; + >, +); // Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = From 5611770512c0ef56cec5eead5185d25f76ca15f4 Mon Sep 17 00:00:00 2001 From: Keith Date: Wed, 28 Aug 2024 21:23:15 -0400 Subject: [PATCH 104/213] Add token argument to the publish script --- scripts/publish.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/publish.sh b/scripts/publish.sh index 3eb0fc6a5..8b2671787 100644 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -1,28 +1,28 @@ #!/bin/bash set -ex cd support/macros -cargo publish +cargo publish --token $1 cd ../.. cd pallets/commitments -cargo publish +cargo publish --token $1 cd .. cd collective -cargo publish +cargo publish --token $1 cd .. cd registry -cargo publish +cargo publish --token $1 cd .. cd subtensor -cargo publish +cargo publish --token $1 cd runtime-api -cargo publish +cargo publish --token $1 cd ../.. cd admin-utils -cargo publish +cargo publish --token $1 cd ../.. cd runtime -cargo publish +cargo publish --token $1 cd .. cd node -cargo publish +cargo publish --token $1 echo "published successfully." From d9e7c5e34fc0e3fefb8f46ae7ef9315a88c1b30d Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 19:36:07 -0400 Subject: [PATCH 105/213] Rebase onto devnet-ready --- pallets/subtensor/src/benchmarks.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pallets/subtensor/src/benchmarks.rs b/pallets/subtensor/src/benchmarks.rs index 4915bb3ac..c9ff37686 100644 --- a/pallets/subtensor/src/benchmarks.rs +++ b/pallets/subtensor/src/benchmarks.rs @@ -311,8 +311,14 @@ benchmarks! { let amount: u64 = 1; let amount_to_be_staked = 100_000_000_000_000u64; Subtensor::::add_balance_to_coldkey_account(&coldkey.clone(), amount_to_be_staked); +<<<<<<< HEAD assert_ok!(Subtensor::::register_network(RawOrigin::Signed(coldkey.clone()).into())); }: dissolve_network(RawOrigin::Root, coldkey.clone(), 1) +======= + assert_ok!(Subtensor::::register_network(RawOrigin::Signed(coldkey.clone()).into(), None)); + let c1 = coldkey.clone(); + }: dissolve_network(RawOrigin::Signed(c1), coldkey, 1) +>>>>>>> 3404a5bc (fix benchmark_dissolve_network) // swap_hotkey { From d0ce4d2b036f310a6b20d7d044a35da3b411459f Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 27 Aug 2024 14:31:31 -0400 Subject: [PATCH 106/213] simplify cargo test CI step, always check benchmarks + other feats --- .github/workflows/check-rust.yml | 53 ++------------------------------ 1 file changed, 2 insertions(+), 51 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 797ad4df4..b088744cb 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -254,57 +254,8 @@ jobs: with: key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} - - name: cargo test --workspace - run: cargo test --workspace - - # runs cargo test --workspace --features=runtime-benchmarks - cargo-test-benchmarks: - name: cargo test w/benchmarks - runs-on: SubtensorCI - strategy: - matrix: - rust-branch: - - stable - rust-target: - - x86_64-unknown-linux-gnu - # - x86_64-apple-darwin - os: - - ubuntu-latest - # - macos-latest - include: - - os: ubuntu-latest - # - os: macos-latest - env: - RELEASE_NAME: development - # RUSTFLAGS: -A warnings - RUSTV: ${{ matrix.rust-branch }} - RUST_BACKTRACE: full - RUST_BIN_DIR: target/${{ matrix.rust-target }} - SKIP_WASM_BUILD: 1 - TARGET: ${{ matrix.rust-target }} - steps: - - name: Check-out repository under $GITHUB_WORKSPACE - uses: actions/checkout@v4 - - - name: Install dependencies - run: | - sudo apt-get update && - sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler - - - name: Install Rust ${{ matrix.rust-branch }} - uses: actions-rs/toolchain@v1.0.6 - with: - toolchain: ${{ matrix.rust-branch }} - components: rustfmt, clippy - profile: minimal - - - name: Utilize Rust shared cached - uses: Swatinem/rust-cache@v2.2.1 - with: - key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} - - - name: cargo test --workspace --features=runtime-benchmarks - run: cargo test --workspace --features=runtime-benchmarks + - name: cargo test --workspace --all-features + run: cargo test --workspace --all-features # ensures cargo fix has no trivial changes that can be applied cargo-fix: From 9fe01d5e5fdf3aad87a5c682bc1a521ddb1f7e4f Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:38:17 -0400 Subject: [PATCH 107/213] add root weights proxy --- runtime/src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 118238a0f..8a01832d0 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -647,6 +647,7 @@ pub enum ProxyType { Registration, Transfer, SmallTransfer, + RootWeights, } // Transfers below SMALL_TRANSFER_LIMIT are considered small transfers pub const SMALL_TRANSFER_LIMIT: Balance = 500_000_000; // 0.5 TAO @@ -691,6 +692,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) + | RuntimeCall::RootWeights(..) ), ProxyType::Triumvirate => matches!( c, @@ -713,6 +715,10 @@ impl InstanceFilter for ProxyType { RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::register { .. }) ), + ProxyType::RootWeights => matches!( + c, + RuntimeCall::SubtensorModule(pallet_subtensor::Call::set_root_weights { .. }) + ), } } fn is_superset(&self, o: &Self) -> bool { From 8a1683f691e9acf6aef1b6c06bf12856c3b24f65 Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:38:26 -0400 Subject: [PATCH 108/213] fmt --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 8a01832d0..af7114598 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -692,7 +692,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) - | RuntimeCall::RootWeights(..) + | RuntimeCall::RootWeights(..) ), ProxyType::Triumvirate => matches!( c, From 6840702ba45b8e1e8aa1cfe23326d5375f0bd39c Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Tue, 27 Aug 2024 17:45:11 -0400 Subject: [PATCH 109/213] fix typo --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index af7114598..1cd1613ed 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -692,7 +692,7 @@ impl InstanceFilter for ProxyType { | RuntimeCall::SubtensorModule(pallet_subtensor::Call::root_register { .. }) | RuntimeCall::SubtensorModule(pallet_subtensor::Call::burned_register { .. }) | RuntimeCall::Triumvirate(..) - | RuntimeCall::RootWeights(..) + | RuntimeCall::SubtensorModule(pallet_subtensor::Call::set_root_weights { .. }) ), ProxyType::Triumvirate => matches!( c, From d6e5f964c5c02e5f181303b0eaa03daba0c3c962 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 19:38:02 -0400 Subject: [PATCH 110/213] Rebase onto devnet-ready --- Dockerfile | 2 +- node/src/service.rs | 39 +++++++++++++++++++++++++++++++++++++++ scripts/build.sh | 3 +++ scripts/localnet.sh | 8 ++++---- 4 files changed, 47 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2dd2e2370..9edb2749b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ COPY . /build WORKDIR /build # Build the project -RUN cargo build -p node-subtensor --profile production --features="runtime-benchmarks metadata-hash" --locked +RUN cargo build -p node-subtensor --profile production --features="metadata-hash" --locked # Verify the binary was produced RUN test -e /build/target/production/node-subtensor diff --git a/node/src/service.rs b/node/src/service.rs index 253227430..039bd942e 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -17,11 +17,50 @@ use std::{sync::Arc, time::Duration}; /// imported and generated. const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; +<<<<<<< HEAD pub(crate) type FullClient = sc_service::TFullClient< Block, RuntimeApi, WasmExecutor >; +======= +// Our native executor instance. +pub struct ExecutorDispatch; + +// appeasing the compiler, this is a no-op +impl HostFunctions for ExecutorDispatch { + fn host_functions() -> Vec<&'static dyn Function> { + vec![] + } + + fn register_static(_registry: &mut T) -> core::result::Result<(), T::Error> + where + T: HostFunctionRegistry, + { + Ok(()) + } +} + +impl sc_executor::NativeExecutionDispatch for ExecutorDispatch { + // Always enable runtime benchmark host functions, the genesis state + // was built with them so we're stuck with them forever. + // + // They're just a noop, never actually get used if the runtime was not compiled with + // `runtime-benchmarks`. + type ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions; + + fn dispatch(method: &str, data: &[u8]) -> Option> { + node_subtensor_runtime::api::dispatch(method, data) + } + + fn native_version() -> sc_executor::NativeVersion { + node_subtensor_runtime::native_version() + } +} + +pub(crate) type FullClient = + sc_service::TFullClient>; +>>>>>>> f37d9c1b (fix node dep on runtime-benchmarks) type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; diff --git a/scripts/build.sh b/scripts/build.sh index 68bc62c68..be69f801d 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -1,2 +1,5 @@ cargo build --profile production --features "metadata-hash" +<<<<<<< HEAD +======= +>>>>>>> f37d9c1b (fix node dep on runtime-benchmarks) diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 567514652..85c4edfa2 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -3,10 +3,10 @@ # Check if `--no-purge` passed as a parameter NO_PURGE=0 for arg in "$@"; do - if [ "$arg" = "--no-purge" ]; then - NO_PURGE=1 - break - fi + if [ "$arg" = "--no-purge" ]; then + NO_PURGE=1 + break + fi done # Determine the directory this script resides in. This allows invoking it from any location. From db4164890253ddc7c10e907556b84bd422c79201 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:01:26 -0400 Subject: [PATCH 111/213] import original script --- scripts/merged_script.sh | 102 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 scripts/merged_script.sh diff --git a/scripts/merged_script.sh b/scripts/merged_script.sh new file mode 100644 index 000000000..030fd8efa --- /dev/null +++ b/scripts/merged_script.sh @@ -0,0 +1,102 @@ +#!/bin/bash + + +usage() { + echo "Usage: $0 -v " + exit 1 +} + +while getopts ":v:" opt; do + case ${opt} in + v) + version=${OPTARG} + ;; + \?) + usage + ;; + esac +done + + +if [ -z "$version" ]; then + usage +fi + +echo "[INFO] Starting the process for version $version" + + +echo "[INFO] Building the project with wasm-pack" + +# Build command +wasm-pack build --release + +# Wasm Blob file path +artifact_path="pkg/my_wasm_project_bg.wasm" + + +if [ ! -f "$artifact_path" ]; then + echo "[ERROR] Artifact not found: $artifact_path" + exit 1 +fi + +echo "[INFO] Artifact found at $artifact_path" + +git checkout main + +git fetch origin --tags + +latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1)) + +if [ -z "$latest_tag" ]; then + echo "No tags found in the repository." + exit 1 +fi + +commits=$(git log ${latest_tag}..main --pretty=format:"%H %ci %s" --reverse) + +# List PRs merged since the last release +pr_list=() +while read -r hash date time timezone message; do + if [[ $message =~ Merge\ pull\ request\ \#([0-9]+) ]]; then + pr_number=${BASH_REMATCH[1]} + pr_list+=("$pr_number") + fi +done <<< "$commits" + + +if [ ${#pr_list[@]} -eq 0 ]; then + echo "[ERROR] No PRs found since the last release" + exit 1 +fi + +echo -e "[INFO] PRs found: ${pr_list[*]}" + + +get_pr_title() { + local pr_number=$1 + gh pr view "$pr_number" --json title --jq '.title' +} + + +formatted_pr_list="PR numbers and their titles merged into main since the last tag ($latest_tag):\n\n" + +for pr_number in "${pr_list[@]}"; do + pr_title=$(get_pr_title "$pr_number") + formatted_pr_list+="* PR #${pr_number} - ${pr_title}\n" +done + + +formatted_pr_list=$(printf "%b" "$formatted_pr_list") + +echo -e "$formatted_pr_list" + + +echo "[INFO] Creating a new release with version $version and uploading the artifact" +if gh release create "$version" "$artifact_path" --title "$version" --notes "$formatted_pr_list"; then + echo "[INFO] Release created successfully" +else + echo "[ERROR] Failed to create the release" + exit 1 +fi + +echo "[INFO] Release $version created and tagged successfully." From 1ffd1fda79a8ef4768ffe6d72cdbdf4fc1cffc9c Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:09:58 -0400 Subject: [PATCH 112/213] scaffold --- scripts/release_notes.rs | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100755 scripts/release_notes.rs diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs new file mode 100755 index 000000000..c9cb6e7b9 --- /dev/null +++ b/scripts/release_notes.rs @@ -0,0 +1,6 @@ +#!/usr/bin/env rust-script +// ^ `cargo install rust-script` to be able to run this script + +fn main() { + println!("hello world"); +} From d837a6f6ca3d28c8fdb60810df83c4b88701aaf1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 10:19:30 -0400 Subject: [PATCH 113/213] parsing of previous tag --- scripts/release_notes.rs | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index c9cb6e7b9..7277a1cb7 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -1,6 +1,24 @@ #!/usr/bin/env rust-script // ^ `cargo install rust-script` to be able to run this script +use core::fmt::Display; +use std::{env, process::Command}; + +fn eval(cmd: impl Display) -> String { + let output = Command::new("sh") + .arg("-c") + .arg(cmd.to_string()) + .output() + .expect("failed to execute process"); + String::from_utf8(output.stdout).unwrap().trim().to_string() +} + fn main() { - println!("hello world"); + let previous_tag = env::var("PREVIOUS_TAG").unwrap_or_else(|_| { + eval("git describe --abbrev=0 --tags $(git rev-list --tags --skip=1 --max-count=1)") + }); + if previous_tag.is_empty() { + panic!("PREVIOUS_TAG is not specified or invalid"); + } + println!("Previous tag: {}", previous_tag); } From 28c1ede88426df37b230bfd813523c9663beff6b Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 15:26:02 -0400 Subject: [PATCH 114/213] parse network and all tags --- scripts/release_notes.rs | 38 ++++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 7277a1cb7..d3b42665c 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -1,7 +1,7 @@ #!/usr/bin/env rust-script // ^ `cargo install rust-script` to be able to run this script -use core::fmt::Display; +use core::{fmt::Display, str::FromStr}; use std::{env, process::Command}; fn eval(cmd: impl Display) -> String { @@ -13,12 +13,34 @@ fn eval(cmd: impl Display) -> String { String::from_utf8(output.stdout).unwrap().trim().to_string() } -fn main() { - let previous_tag = env::var("PREVIOUS_TAG").unwrap_or_else(|_| { - eval("git describe --abbrev=0 --tags $(git rev-list --tags --skip=1 --max-count=1)") - }); - if previous_tag.is_empty() { - panic!("PREVIOUS_TAG is not specified or invalid"); +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum Network { + Mainnet, + Testnet, +} + +impl FromStr for Network { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "mainnet" => Ok(Network::Mainnet), + "testnet" => Ok(Network::Testnet), + _ => Err(()), + } } - println!("Previous tag: {}", previous_tag); +} + +fn main() { + let network = env::var("NETWORK") + .unwrap_or_else(|_| "mainnet".to_string()) + .parse::() + .unwrap_or_else(|_| panic!("Invalid NETWORK value")); + println!("Network: {:?}", network); + + let all_tags = env::var("PREVIOUS_TAG") + .unwrap_or_else(|_| eval("git tag --sort=-creatordate")) + .split("\n") + .map(|s| s.trim().to_string()) + .collect::>(); } From 9323877954b90a8a18c13e1fb9662099866721a3 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 16:03:41 -0400 Subject: [PATCH 115/213] resolve previous tag --- scripts/release_notes.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index d3b42665c..28279807e 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -43,4 +43,17 @@ fn main() { .split("\n") .map(|s| s.trim().to_string()) .collect::>(); + + let previous_tag = match network { + Network::Mainnet => all_tags + .iter() + .find(|tag| tag.starts_with("v") && !tag.ends_with("-pre-release")) + .expect("could not find a valid mainnet tag!"), + Network::Testnet => all_tags + .iter() + .find(|tag| tag.starts_with("v") && tag.ends_with("-pre-release")) + .expect("could not find a valid testnet tag!"), + }; + + println!("Previous Release Tag: {}", previous_tag); } From 9daafe1423d3fb92f2dd27c6c36e7fc858a7cca2 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 16:50:00 -0400 Subject: [PATCH 116/213] narrow down to filtered merges --- scripts/release_notes.rs | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 28279807e..99f94a7c7 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -54,6 +54,32 @@ fn main() { .find(|tag| tag.starts_with("v") && tag.ends_with("-pre-release")) .expect("could not find a valid testnet tag!"), }; + println!("Previous release tag: {}", previous_tag); - println!("Previous Release Tag: {}", previous_tag); + println!(""); + println!( + "Generating release notes for all merges since {}...", + previous_tag, + ); + + let merges = eval(format!( + "git log --merges --pretty=format:'%s' {}..HEAD", + previous_tag + )) + .split("\n") + .map(|s| s.trim().to_string()) + .filter(|s| { + !s.is_empty() + && s.starts_with("Merge pull request #") + && !s.ends_with("from opentensor/devnet-ready") + && !s.ends_with("from opentensor/testnet-ready") + && !s.ends_with("from opentensor/devnet") + && !s.ends_with("from opentensor/testnet") + }) + .collect::>(); + + println!(""); + println!("Filtered merges:\n{}", merges.join("\n")); + + println!(""); } From c9bfdd24534560ddac308822991bd5522ed56ad7 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 17:48:42 -0400 Subject: [PATCH 117/213] branch selection, fallible eval method --- scripts/release_notes.rs | 39 +++++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 99f94a7c7..54b5b3dec 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -4,13 +4,23 @@ use core::{fmt::Display, str::FromStr}; use std::{env, process::Command}; -fn eval(cmd: impl Display) -> String { +fn eval(cmd: impl Display, print: bool) -> Result { + if print { + println!("$ {}", cmd); + } let output = Command::new("sh") .arg("-c") .arg(cmd.to_string()) .output() .expect("failed to execute process"); - String::from_utf8(output.stdout).unwrap().trim().to_string() + if print { + println!("{}", String::from_utf8(output.stdout.clone()).unwrap()); + eprintln!("{}", String::from_utf8(output.stderr.clone()).unwrap()); + } + if !output.status.success() { + return Err(String::from_utf8(output.stderr).unwrap()); + } + Ok(String::from_utf8(output.stdout).unwrap().trim().to_string()) } #[derive(Copy, Clone, PartialEq, Eq, Debug)] @@ -39,7 +49,7 @@ fn main() { println!("Network: {:?}", network); let all_tags = env::var("PREVIOUS_TAG") - .unwrap_or_else(|_| eval("git tag --sort=-creatordate")) + .unwrap_or_else(|_| eval("git tag --sort=-creatordate", false).unwrap()) .split("\n") .map(|s| s.trim().to_string()) .collect::>(); @@ -56,16 +66,29 @@ fn main() { }; println!("Previous release tag: {}", previous_tag); + let branch = env::var("BRANCH").unwrap_or( + match network { + Network::Mainnet => "main", + Network::Testnet => "testnet", + } + .to_string(), + ); + println!("Branch: {}", branch); + eval(format!("git checkout {}", branch), true).unwrap(); + println!(""); println!( "Generating release notes for all merges since {}...", previous_tag, ); - - let merges = eval(format!( - "git log --merges --pretty=format:'%s' {}..HEAD", - previous_tag - )) + let merges = eval( + format!( + "git log --merges --pretty=format:'%s' {}..HEAD", + previous_tag + ), + false, + ) + .unwrap() .split("\n") .map(|s| s.trim().to_string()) .filter(|s| { From 550f547e3ceb2803411ed5c42e4b54d0f2ad0dfc Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 17:51:12 -0400 Subject: [PATCH 118/213] tweak --- scripts/release_notes.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 54b5b3dec..c990c9db4 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -76,7 +76,6 @@ fn main() { println!("Branch: {}", branch); eval(format!("git checkout {}", branch), true).unwrap(); - println!(""); println!( "Generating release notes for all merges since {}...", previous_tag, From b57dafeb7fd159cad24090ded74dcd53370d5fd7 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 9 Sep 2024 18:47:09 -0400 Subject: [PATCH 119/213] WIP --- scripts/release_notes.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index c990c9db4..3192a7e27 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -68,13 +68,12 @@ fn main() { let branch = env::var("BRANCH").unwrap_or( match network { - Network::Mainnet => "main", - Network::Testnet => "testnet", + Network::Mainnet => "testnet", + Network::Testnet => "devnet", } .to_string(), ); println!("Branch: {}", branch); - eval(format!("git checkout {}", branch), true).unwrap(); println!( "Generating release notes for all merges since {}...", @@ -82,8 +81,9 @@ fn main() { ); let merges = eval( format!( - "git log --merges --pretty=format:'%s' {}..HEAD", - previous_tag + "git log --merges --pretty=format:'%s' {}..{}", + previous_tag, + branch // Replace HEAD with branch variable ), false, ) From 9219693ba4a7fc7225c32960dc4da4b11c8981f9 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 09:52:56 -0400 Subject: [PATCH 120/213] get PR titles --- scripts/release_notes.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 3192a7e27..fec8c88df 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -104,4 +104,21 @@ fn main() { println!("Filtered merges:\n{}", merges.join("\n")); println!(""); + let pr_numbers = merges + .iter() + .map(|s| s.split(" ").collect::>()[3].trim_start_matches("#")) + .collect::>(); + println!("PR numbers:\n{}", pr_numbers.join("\n")); + + println!(""); + let pr_titles = pr_numbers + .iter() + .map(|pr_number| { + eval(format!("gh pr view {} --json title", pr_number), false) + .unwrap() + .trim() + .to_string() + }) + .collect::>(); + println!("PR titles:\n{}", pr_titles.join("\n")); } From f00159a24e5c5d9b3f59303f5aab2379172166c3 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 10:35:15 -0400 Subject: [PATCH 121/213] resolve PR titles properly --- scripts/release_notes.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index fec8c88df..02d391593 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -108,17 +108,28 @@ fn main() { .iter() .map(|s| s.split(" ").collect::>()[3].trim_start_matches("#")) .collect::>(); - println!("PR numbers:\n{}", pr_numbers.join("\n")); + println!("PR numbers:\n{:?}", pr_numbers); println!(""); + println!("Fetching PR titles..."); let pr_titles = pr_numbers .iter() .map(|pr_number| { - eval(format!("gh pr view {} --json title", pr_number), false) + print!("#{}: ", pr_number); + let title = eval(format!("gh pr view {} --json title", pr_number), false) .unwrap() .trim() - .to_string() + .to_string(); + if !title.starts_with("{\"title\":\"") { + panic!("Malformed PR title: {}", title); + } + let title = title + .trim_start_matches("{\"title\":\"") + .trim_end_matches("\"}") + .trim() + .to_string(); + println!("{}", title); + title }) .collect::>(); - println!("PR titles:\n{}", pr_titles.join("\n")); } From a91843529b0e72ffbf4caa7ae74d879b97b4f26a Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:25:37 -0400 Subject: [PATCH 122/213] authors working :tada: --- scripts/release_notes.rs | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 02d391593..72ca53ada 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -132,4 +132,38 @@ fn main() { title }) .collect::>(); + + println!(""); + println!("Fetching PR authors..."); + let pr_authors = pr_numbers + .iter() + .map(|pr_number| { + print!("#{}: ", pr_number); + let author = eval( + format!("gh pr view {} --json author | jq .author.login", pr_number), + false, + ) + .unwrap() + .trim() + .trim_start_matches("\"") + .trim_end_matches("\"") + .to_string(); + println!("{}", author); + author + }) + .collect::>(); + + println!(""); + println!("generated release notes:"); + let release_notes = "\n## What's Changed\n".to_string(); + let release_notes = release_notes + + &pr_numbers + .iter() + .zip(pr_titles.iter()) + .zip(pr_authors.iter()) + .map(|((pr_number, pr_title), pr_author)| { + format!("- {} in #{} by @{}\n", pr_title, pr_number, pr_author) + }) + .collect::(); + println!("{}", release_notes); } From e81e5ea32e582c312496398e1a77531d6d5a2310 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:37:19 -0400 Subject: [PATCH 123/213] working --- scripts/release_notes.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 72ca53ada..90ef17868 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -82,8 +82,7 @@ fn main() { let merges = eval( format!( "git log --merges --pretty=format:'%s' {}..{}", - previous_tag, - branch // Replace HEAD with branch variable + branch, previous_tag, ), false, ) From 26c8d19fc51919e1577c6af15c95160550b35c4a Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 11:58:36 -0400 Subject: [PATCH 124/213] write release notes to /tmp/release_notes.md --- scripts/release_notes.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/release_notes.rs b/scripts/release_notes.rs index 90ef17868..73cbde4d9 100755 --- a/scripts/release_notes.rs +++ b/scripts/release_notes.rs @@ -154,7 +154,7 @@ fn main() { println!(""); println!("generated release notes:"); - let release_notes = "\n## What's Changed\n".to_string(); + let release_notes = "## What's Changed\n".to_string(); let release_notes = release_notes + &pr_numbers .iter() @@ -165,4 +165,9 @@ fn main() { }) .collect::(); println!("{}", release_notes); + + println!(""); + println!("writing release notes to /tmp/release_notes.md"); + std::fs::write("/tmp/release_notes.md", release_notes).unwrap(); + println!("done!"); } From 322618c4bd01322f080f1e1416c870ad26222ba1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:13:06 -0400 Subject: [PATCH 125/213] automatically re-run deployment check when labels changed --- .github/workflows/check-devnet.yml | 2 ++ .github/workflows/check-finney.yml | 2 ++ .github/workflows/check-testnet.yml | 2 ++ 3 files changed, 6 insertions(+) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 3d7f17723..1a7ae4aa3 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,6 +3,8 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index 23ef93cdf..d6ffd2ba4 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -3,6 +3,8 @@ name: Finney Deploy Check on: pull_request: branches: [finney, main] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index c18b45ac2..6c3ebcfdf 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -3,6 +3,8 @@ name: Testnet Deploy Check on: pull_request: branches: [testnet, testnet-ready] + pull_request_target: + types: [labeled, unlabeled] env: CARGO_TERM_COLOR: always From 9b2803742e70d24e0405225c60d001c0bacba0c2 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:20:48 -0400 Subject: [PATCH 126/213] tweak --- .github/workflows/check-testnet.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 6c3ebcfdf..39847767b 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -13,7 +13,7 @@ jobs: check-spec-version: name: Check spec_version bump runs-on: SubtensorCI - if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-spec-version-bump') }} + if: ${{ github.event.pull_request.labels | contains('no-spec-version-bump') == false }} steps: - name: Dependencies run: | From 40323e8043a5e2e682aefe8f4363431400ea2ba1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:33:10 -0400 Subject: [PATCH 127/213] debug --- .github/workflows/check-testnet.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 39847767b..09963b152 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -10,6 +10,13 @@ env: CARGO_TERM_COLOR: always jobs: + debug-labels: + name: Debug Labels + runs-on: SubtensorCI + steps: + - name: Debug Labels + run: | + echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From ff84f27ec0c93e87013d87fbc6bceb6641c71553 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:36:15 -0400 Subject: [PATCH 128/213] bump CI From dce490849e0f0cc5168ac1693147e04e43b7afb4 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:37:56 -0400 Subject: [PATCH 129/213] whoops --- .github/workflows/check-devnet.yml | 7 +++++++ .github/workflows/check-testnet.yml | 7 ------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 1a7ae4aa3..65a4c88dc 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -10,6 +10,13 @@ env: CARGO_TERM_COLOR: always jobs: + debug-labels: + name: Debug Labels + runs-on: SubtensorCI + steps: + - name: Debug Labels + run: | + echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 09963b152..39847767b 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -10,13 +10,6 @@ env: CARGO_TERM_COLOR: always jobs: - debug-labels: - name: Debug Labels - runs-on: SubtensorCI - steps: - - name: Debug Labels - run: | - echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From 4f6113d876511e59b4ebbc117e76497815ec65c1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:39:27 -0400 Subject: [PATCH 130/213] fix conditional --- .github/workflows/check-testnet.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 39847767b..6c3ebcfdf 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -13,7 +13,7 @@ jobs: check-spec-version: name: Check spec_version bump runs-on: SubtensorCI - if: ${{ github.event.pull_request.labels | contains('no-spec-version-bump') == false }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-spec-version-bump') }} steps: - name: Dependencies run: | From 5c1d194b6327e29a8a227a409bcefc7ea51484f0 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:39:45 -0400 Subject: [PATCH 131/213] remove label debug step --- .github/workflows/check-devnet.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 65a4c88dc..1a7ae4aa3 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -10,13 +10,6 @@ env: CARGO_TERM_COLOR: always jobs: - debug-labels: - name: Debug Labels - runs-on: SubtensorCI - steps: - - name: Debug Labels - run: | - echo "Labels: ${{ toJson(github.event.pull_request.labels) }}" check-spec-version: name: Check spec_version bump runs-on: SubtensorCI From 75d5c1660a69bad7f43a0553be43ce22f058e10e Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:43:28 -0400 Subject: [PATCH 132/213] tweak triggers --- .github/workflows/check-devnet.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 1a7ae4aa3..b27542d94 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,8 +3,9 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] - pull_request_target: types: [labeled, unlabeled] + push: + branches: [devnet, devnet-ready] env: CARGO_TERM_COLOR: always From da7054faef46f8432001d19cf60bd42fb39caa6b Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:46:53 -0400 Subject: [PATCH 133/213] check on-push From 0686dd29c5928dc52c4913cba6eda316b84d129f Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:48:45 -0400 Subject: [PATCH 134/213] fix --- .github/workflows/check-devnet.yml | 4 +--- .github/workflows/check-finney.yml | 3 +-- .github/workflows/check-testnet.yml | 3 +-- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index b27542d94..2cb586348 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -3,9 +3,7 @@ name: Devnet Deploy Check on: pull_request: branches: [devnet, devnet-ready] - types: [labeled, unlabeled] - push: - branches: [devnet, devnet-ready] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index d6ffd2ba4..318ce85de 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -3,8 +3,7 @@ name: Finney Deploy Check on: pull_request: branches: [finney, main] - pull_request_target: - types: [labeled, unlabeled] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 6c3ebcfdf..95277c94a 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -3,8 +3,7 @@ name: Testnet Deploy Check on: pull_request: branches: [testnet, testnet-ready] - pull_request_target: - types: [labeled, unlabeled] + types: [labeled, unlabeled, synchronize] env: CARGO_TERM_COLOR: always From f5053817d7c1d02484856f2335b9bf13bbf33f26 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 12:49:43 -0400 Subject: [PATCH 135/213] remove merged_script.sh --- scripts/merged_script.sh | 102 --------------------------------------- 1 file changed, 102 deletions(-) delete mode 100644 scripts/merged_script.sh diff --git a/scripts/merged_script.sh b/scripts/merged_script.sh deleted file mode 100644 index 030fd8efa..000000000 --- a/scripts/merged_script.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash - - -usage() { - echo "Usage: $0 -v " - exit 1 -} - -while getopts ":v:" opt; do - case ${opt} in - v) - version=${OPTARG} - ;; - \?) - usage - ;; - esac -done - - -if [ -z "$version" ]; then - usage -fi - -echo "[INFO] Starting the process for version $version" - - -echo "[INFO] Building the project with wasm-pack" - -# Build command -wasm-pack build --release - -# Wasm Blob file path -artifact_path="pkg/my_wasm_project_bg.wasm" - - -if [ ! -f "$artifact_path" ]; then - echo "[ERROR] Artifact not found: $artifact_path" - exit 1 -fi - -echo "[INFO] Artifact found at $artifact_path" - -git checkout main - -git fetch origin --tags - -latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1)) - -if [ -z "$latest_tag" ]; then - echo "No tags found in the repository." - exit 1 -fi - -commits=$(git log ${latest_tag}..main --pretty=format:"%H %ci %s" --reverse) - -# List PRs merged since the last release -pr_list=() -while read -r hash date time timezone message; do - if [[ $message =~ Merge\ pull\ request\ \#([0-9]+) ]]; then - pr_number=${BASH_REMATCH[1]} - pr_list+=("$pr_number") - fi -done <<< "$commits" - - -if [ ${#pr_list[@]} -eq 0 ]; then - echo "[ERROR] No PRs found since the last release" - exit 1 -fi - -echo -e "[INFO] PRs found: ${pr_list[*]}" - - -get_pr_title() { - local pr_number=$1 - gh pr view "$pr_number" --json title --jq '.title' -} - - -formatted_pr_list="PR numbers and their titles merged into main since the last tag ($latest_tag):\n\n" - -for pr_number in "${pr_list[@]}"; do - pr_title=$(get_pr_title "$pr_number") - formatted_pr_list+="* PR #${pr_number} - ${pr_title}\n" -done - - -formatted_pr_list=$(printf "%b" "$formatted_pr_list") - -echo -e "$formatted_pr_list" - - -echo "[INFO] Creating a new release with version $version and uploading the artifact" -if gh release create "$version" "$artifact_path" --title "$version" --notes "$formatted_pr_list"; then - echo "[INFO] Release created successfully" -else - echo "[ERROR] Failed to create the release" - exit 1 -fi - -echo "[INFO] Release $version created and tagged successfully." From 9b233cbb84ca368210034c18519769b3f9e57d36 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 10 Sep 2024 17:43:14 -0400 Subject: [PATCH 136/213] fix workspace lint error --- build.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.rs b/build.rs index 10cac0ea7..4739a8aca 100644 --- a/build.rs +++ b/build.rs @@ -75,7 +75,9 @@ fn collect_rust_files(dir: &Path) -> Vec { let mut rust_files = Vec::new(); for entry in WalkDir::new(dir) { - let entry = entry.unwrap(); + let Ok(entry) = entry else { + continue; + }; let path = entry.path(); // Skip any path that contains "target" directory From 5ba62d0d6f0bdd91441306a89e3fef16714c7b36 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Thu, 19 Sep 2024 11:49:01 -0400 Subject: [PATCH 137/213] Update to Polkadot SDK 1.16.0-rc1, wip: benchmarks don't work --- node/src/service.rs | 39 --------------------------------------- 1 file changed, 39 deletions(-) diff --git a/node/src/service.rs b/node/src/service.rs index 039bd942e..253227430 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -17,50 +17,11 @@ use std::{sync::Arc, time::Duration}; /// imported and generated. const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; -<<<<<<< HEAD pub(crate) type FullClient = sc_service::TFullClient< Block, RuntimeApi, WasmExecutor >; -======= -// Our native executor instance. -pub struct ExecutorDispatch; - -// appeasing the compiler, this is a no-op -impl HostFunctions for ExecutorDispatch { - fn host_functions() -> Vec<&'static dyn Function> { - vec![] - } - - fn register_static(_registry: &mut T) -> core::result::Result<(), T::Error> - where - T: HostFunctionRegistry, - { - Ok(()) - } -} - -impl sc_executor::NativeExecutionDispatch for ExecutorDispatch { - // Always enable runtime benchmark host functions, the genesis state - // was built with them so we're stuck with them forever. - // - // They're just a noop, never actually get used if the runtime was not compiled with - // `runtime-benchmarks`. - type ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions; - - fn dispatch(method: &str, data: &[u8]) -> Option> { - node_subtensor_runtime::api::dispatch(method, data) - } - - fn native_version() -> sc_executor::NativeVersion { - node_subtensor_runtime::native_version() - } -} - -pub(crate) type FullClient = - sc_service::TFullClient>; ->>>>>>> f37d9c1b (fix node dep on runtime-benchmarks) type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; From 457b497eba5c308a3aecc4848d283d45b17a05b5 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 19:43:33 -0400 Subject: [PATCH 138/213] Format --- node/src/command.rs | 26 +++++++++---------- node/src/service.rs | 39 +++++++++++++---------------- pallets/subtensor/src/benchmarks.rs | 6 ----- runtime/src/lib.rs | 24 +++++++++--------- 4 files changed, 42 insertions(+), 53 deletions(-) diff --git a/node/src/command.rs b/node/src/command.rs index 3c85937b0..0f3914239 100644 --- a/node/src/command.rs +++ b/node/src/command.rs @@ -15,11 +15,9 @@ pub use sp_keyring::Sr25519Keyring; use node_subtensor_runtime::Block; use sc_cli::SubstrateCli; -use sc_service::{config::{ - ExecutorConfiguration, - RpcConfiguration - }, - Configuration, PartialComponents +use sc_service::{ + config::{ExecutorConfiguration, RpcConfiguration}, + Configuration, PartialComponents, }; impl SubstrateCli for Cli { @@ -152,9 +150,9 @@ pub fn run() -> sc_cli::Result<()> { ); } - cmd.run_with_spec::, ()>(Some( - config.chain_spec, - )) + cmd.run_with_spec::, ()>(Some( + config.chain_spec, + )) } BenchmarkCmd::Block(cmd) => { let PartialComponents { client, .. } = service::new_partial(&config)?; @@ -211,11 +209,11 @@ pub fn run() -> sc_cli::Result<()> { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::(&config)) } - None => { - let runner = cli.create_runner(&cli.run)?; - runner.run_node_until_exit(|config| async move { + None => { + let runner = cli.create_runner(&cli.run)?; + runner.run_node_until_exit(|config| async move { let config = override_default_heap_pages(config, 60_000); - match config.network.network_backend { + match config.network.network_backend { sc_network::config::NetworkBackendType::Libp2p => service::new_full::< sc_network::NetworkWorker< node_subtensor_runtime::opaque::Block, @@ -227,8 +225,8 @@ pub fn run() -> sc_cli::Result<()> { service::new_full::(config) .map_err(sc_cli::Error::Service), } - }) - }, + }) + } } } diff --git a/node/src/service.rs b/node/src/service.rs index 253227430..cc0bf2862 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -17,11 +17,8 @@ use std::{sync::Arc, time::Duration}; /// imported and generated. const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; -pub(crate) type FullClient = sc_service::TFullClient< - Block, - RuntimeApi, - WasmExecutor ->; +pub(crate) type FullClient = + sc_service::TFullClient>; type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; @@ -134,7 +131,7 @@ pub fn new_partial( pub fn new_full< N: sc_network::NetworkBackend::Hash>, >( - config: Configuration + config: Configuration, ) -> Result { let sc_service::PartialComponents { client, @@ -147,12 +144,12 @@ pub fn new_full< other: (block_import, grandpa_link, mut telemetry), } = new_partial(&config)?; - let mut net_config = sc_network::config::FullNetworkConfiguration::< - Block, - ::Hash, - N, - >::new(&config.network, config.prometheus_registry().cloned()); - let metrics = N::register_notification_metrics(config.prometheus_registry()); + let mut net_config = sc_network::config::FullNetworkConfiguration::< + Block, + ::Hash, + N, + >::new(&config.network, config.prometheus_registry().cloned()); + let metrics = N::register_notification_metrics(config.prometheus_registry()); let grandpa_protocol_name = sc_consensus_grandpa::protocol_standard_name( &client @@ -164,12 +161,12 @@ pub fn new_full< ); let peer_store_handle = net_config.peer_store_handle(); - let (grandpa_protocol_config, grandpa_notification_service) = - sc_consensus_grandpa::grandpa_peers_set_config::<_, N>( - grandpa_protocol_name.clone(), - metrics.clone(), - peer_store_handle, - ); + let (grandpa_protocol_config, grandpa_notification_service) = + sc_consensus_grandpa::grandpa_peers_set_config::<_, N>( + grandpa_protocol_name.clone(), + metrics.clone(), + peer_store_handle, + ); net_config.add_notification_protocol(grandpa_protocol_config); let warp_sync = Arc::new(sc_consensus_grandpa::warp_proof::NetworkProvider::new( @@ -187,9 +184,9 @@ pub fn new_full< spawn_handle: task_manager.spawn_handle(), import_queue, block_announce_validator_builder: None, - warp_sync_config: Some(WarpSyncConfig::WithProvider(warp_sync)), - block_relay: None, - metrics, + warp_sync_config: Some(WarpSyncConfig::WithProvider(warp_sync)), + block_relay: None, + metrics, })?; if config.offchain_worker.enabled { diff --git a/pallets/subtensor/src/benchmarks.rs b/pallets/subtensor/src/benchmarks.rs index c9ff37686..4915bb3ac 100644 --- a/pallets/subtensor/src/benchmarks.rs +++ b/pallets/subtensor/src/benchmarks.rs @@ -311,14 +311,8 @@ benchmarks! { let amount: u64 = 1; let amount_to_be_staked = 100_000_000_000_000u64; Subtensor::::add_balance_to_coldkey_account(&coldkey.clone(), amount_to_be_staked); -<<<<<<< HEAD assert_ok!(Subtensor::::register_network(RawOrigin::Signed(coldkey.clone()).into())); }: dissolve_network(RawOrigin::Root, coldkey.clone(), 1) -======= - assert_ok!(Subtensor::::register_network(RawOrigin::Signed(coldkey.clone()).into(), None)); - let c1 = coldkey.clone(); - }: dissolve_network(RawOrigin::Signed(c1), coldkey, 1) ->>>>>>> 3404a5bc (fix benchmark_dissolve_network) // swap_hotkey { diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 1cd1613ed..f0f73e1d0 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -21,7 +21,7 @@ use frame_support::{ fungible::{ DecreaseIssuance, HoldConsideration, Imbalance as FungibleImbalance, IncreaseIssuance, }, - Contains, LinearStoragePrice, OnUnbalanced + Contains, LinearStoragePrice, OnUnbalanced, }, }; use frame_system::{EnsureNever, EnsureRoot, EnsureRootWithSuccess, RawOrigin}; @@ -1190,19 +1190,19 @@ impl_runtime_apis! { } } - impl sp_genesis_builder::GenesisBuilder for Runtime { - fn build_state(config: Vec) -> sp_genesis_builder::Result { - build_state::(config) - } + impl sp_genesis_builder::GenesisBuilder for Runtime { + fn build_state(config: Vec) -> sp_genesis_builder::Result { + build_state::(config) + } - fn get_preset(id: &Option) -> Option> { - get_preset::(id, |_| None) - } + fn get_preset(id: &Option) -> Option> { + get_preset::(id, |_| None) + } - fn preset_names() -> Vec { - vec![] - } - } + fn preset_names() -> Vec { + vec![] + } + } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { fn validate_transaction( From 001bec23c14ad2f27b4b6dd9fab3e6265252c168 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 19:51:10 -0400 Subject: [PATCH 139/213] Remove unused import in tests --- pallets/commitments/src/tests.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/pallets/commitments/src/tests.rs b/pallets/commitments/src/tests.rs index 7449003f4..058b5faf0 100644 --- a/pallets/commitments/src/tests.rs +++ b/pallets/commitments/src/tests.rs @@ -1,6 +1,5 @@ #![allow(non_camel_case_types)] -use super::*; use crate as pallet_commitments; use frame_support::derive_impl; use frame_support::traits::ConstU64; From bf5e375017432a89c071701f6a0641f6d8e919cf Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 20:08:11 -0400 Subject: [PATCH 140/213] Revert changes pulled from main --- .github/workflows/check-finney.yml | 6 +- .../subtensor/src/coinbase/run_coinbase.rs | 72 +- pallets/subtensor/src/lib.rs | 37 +- .../migrate_fix_pending_emission.rs | 501 ------- pallets/subtensor/src/migrations/mod.rs | 1 - pallets/subtensor/src/rpc_info/neuron_info.rs | 11 +- pallets/subtensor/src/staking/add_stake.rs | 6 +- pallets/subtensor/src/staking/helpers.rs | 6 - pallets/subtensor/src/staking/remove_stake.rs | 5 - pallets/subtensor/src/subnets/uids.rs | 2 +- pallets/subtensor/src/subnets/weights.rs | 4 +- pallets/subtensor/src/swap/swap_coldkey.rs | 21 +- pallets/subtensor/src/swap/swap_hotkey.rs | 44 +- pallets/subtensor/src/utils/try_state.rs | 2 +- pallets/subtensor/tests/children.rs | 191 --- pallets/subtensor/tests/coinbase.rs | 1296 +---------------- pallets/subtensor/tests/migration.rs | 159 +- pallets/subtensor/tests/staking.rs | 103 -- pallets/subtensor/tests/swap_coldkey.rs | 38 - pallets/subtensor/tests/swap_hotkey.rs | 89 -- pallets/subtensor/tests/weights.rs | 6 +- 21 files changed, 74 insertions(+), 2526 deletions(-) delete mode 100644 pallets/subtensor/src/migrations/migrate_fix_pending_emission.rs diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index 318ce85de..c24f7587d 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -31,7 +31,7 @@ jobs: - name: Check that spec_version has been bumped run: | - spec_version=$(PATH=$PATH:$HOME/.cargo/.bin substrate-spec-version ${{ vars.NUCLEUS_ARCHIVE_NODE }} | tr -d '\n') + spec_version=$(PATH=$PATH:$HOME/.cargo/.bin substrate-spec-version wss://entrypoint-finney.opentensor.ai:443 | tr -d '\n') echo "network spec_version: $spec_version" : ${spec_version:?bad spec version} local_spec_version=$(cargo run -p node-subtensor-runtime --bin spec_version | tr -d '\n') @@ -50,6 +50,6 @@ jobs: uses: "paritytech/try-runtime-gha@v0.1.0" with: runtime-package: "node-subtensor-runtime" - node-uri: ${{ vars.NUCLEUS_ARCHIVE_NODE }} + node-uri: "wss://entrypoint-finney.opentensor.ai:443" checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" + extra-args: "--disable-spec-version-check --no-weight-warnings" \ No newline at end of file diff --git a/pallets/subtensor/src/coinbase/run_coinbase.rs b/pallets/subtensor/src/coinbase/run_coinbase.rs index 4486356a4..751767db9 100644 --- a/pallets/subtensor/src/coinbase/run_coinbase.rs +++ b/pallets/subtensor/src/coinbase/run_coinbase.rs @@ -48,9 +48,6 @@ impl Pallet { // --- 3. Drain the subnet block emission and accumulate it as subnet emission, which increases until the tempo is reached in #4. // subnet_blockwise_emission -> subnet_pending_emission for netuid in subnets.clone().iter() { - if *netuid == 0 { - continue; - } // --- 3.1 Get the network's block-wise emission amount. // This value is newly minted TAO which has not reached staking accounts yet. let subnet_blockwise_emission: u64 = EmissionValues::::get(*netuid); @@ -249,20 +246,6 @@ impl Pallet { }); } - /// Calculates the nonviable stake for a nominator. - /// The nonviable stake is the stake that was added by the nominator since the last emission drain. - /// This stake will not receive emission until the next emission drain. - /// Note: if the stake delta is below zero, we return zero. We don't allow more stake than the nominator has. - pub fn get_nonviable_stake(hotkey: &T::AccountId, nominator: &T::AccountId) -> u64 { - let stake_delta = StakeDeltaSinceLastEmissionDrain::::get(hotkey, nominator); - if stake_delta.is_negative() { - 0 - } else { - // Should never fail the into, but we handle it anyway. - stake_delta.try_into().unwrap_or(u64::MAX) - } - } - //. --- 4. Drains the accumulated hotkey emission through to the nominators. The hotkey takes a proportion of the emission. /// The remainder is drained through to the nominators keeping track of the last stake increase event to ensure that the hotkey does not /// gain more emission than it's stake since the last drain. @@ -282,67 +265,68 @@ impl Pallet { // --- 1.0 Drain the hotkey emission. PendingdHotkeyEmission::::insert(hotkey, 0); - // --- 2 Update the block value to the current block number. + // --- 2 Retrieve the last time this hotkey's emissions were drained. + let last_emission_drain: u64 = LastHotkeyEmissionDrain::::get(hotkey); + + // --- 3 Update the block value to the current block number. LastHotkeyEmissionDrain::::insert(hotkey, block_number); - // --- 3 Retrieve the total stake for the hotkey from all nominations. + // --- 4 Retrieve the total stake for the hotkey from all nominations. let total_hotkey_stake: u64 = Self::get_total_stake_for_hotkey(hotkey); - // --- 4 Calculate the emission take for the hotkey. + // --- 5 Calculate the emission take for the hotkey. let take_proportion: I64F64 = I64F64::from_num(Delegates::::get(hotkey)) .saturating_div(I64F64::from_num(u16::MAX)); let hotkey_take: u64 = (take_proportion.saturating_mul(I64F64::from_num(emission))).to_num::(); - // --- 5 Compute the remaining emission after deducting the hotkey's take. + // --- 6 Compute the remaining emission after deducting the hotkey's take. let emission_minus_take: u64 = emission.saturating_sub(hotkey_take); - // --- 6 Calculate the remaining emission after the hotkey's take. + // --- 7 Calculate the remaining emission after the hotkey's take. let mut remainder: u64 = emission_minus_take; - // --- 7 Iterate over each nominator and get all viable stake. + // --- 8 Iterate over each nominator and get all viable stake. let mut total_viable_nominator_stake: u64 = total_hotkey_stake; - for (nominator, _) in Stake::::iter_prefix(hotkey) { - let nonviable_nomintaor_stake = Self::get_nonviable_stake(hotkey, &nominator); - - total_viable_nominator_stake = - total_viable_nominator_stake.saturating_sub(nonviable_nomintaor_stake); + for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { + if LastAddStakeIncrease::::get(hotkey, nominator) > last_emission_drain { + total_viable_nominator_stake = + total_viable_nominator_stake.saturating_sub(nominator_stake); + } } - // --- 8 Iterate over each nominator. + // --- 9 Iterate over each nominator. if total_viable_nominator_stake != 0 { for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { - // --- 9 Skip emission for any stake the was added by the nominator since the last emission drain. - // This means the nominator will get emission on existing stake, but not on new stake, until the next emission drain. - let viable_nominator_stake = - nominator_stake.saturating_sub(Self::get_nonviable_stake(hotkey, &nominator)); + // --- 10 Check if the stake was manually increased by the user since the last emission drain for this hotkey. + // If it was, skip this nominator as they will not receive their proportion of the emission. + if LastAddStakeIncrease::::get(hotkey, nominator.clone()) > last_emission_drain { + continue; + } - // --- 10 Calculate this nominator's share of the emission. - let nominator_emission: I64F64 = I64F64::from_num(viable_nominator_stake) + // --- 11 Calculate this nominator's share of the emission. + let nominator_emission: I64F64 = I64F64::from_num(emission_minus_take) + .saturating_mul(I64F64::from_num(nominator_stake)) .checked_div(I64F64::from_num(total_viable_nominator_stake)) - .unwrap_or(I64F64::from_num(0)) - .saturating_mul(I64F64::from_num(emission_minus_take)); + .unwrap_or(I64F64::from_num(0)); - // --- 11 Increase the stake for the nominator. + // --- 12 Increase the stake for the nominator. Self::increase_stake_on_coldkey_hotkey_account( &nominator, hotkey, nominator_emission.to_num::(), ); - // --- 12* Record event and Subtract the nominator's emission from the remainder. + // --- 13* Record event and Subtract the nominator's emission from the remainder. total_new_tao = total_new_tao.saturating_add(nominator_emission.to_num::()); remainder = remainder.saturating_sub(nominator_emission.to_num::()); } } - // --- 13 Finally, add the stake to the hotkey itself, including its take and the remaining emission. + // --- 14 Finally, add the stake to the hotkey itself, including its take and the remaining emission. let hotkey_new_tao: u64 = hotkey_take.saturating_add(remainder); Self::increase_stake_on_hotkey_account(hotkey, hotkey_new_tao); - // --- 14 Reset the stake delta for the hotkey. - let _ = StakeDeltaSinceLastEmissionDrain::::clear_prefix(hotkey, u32::MAX, None); - // --- 15 Record new tao creation event and return the amount created. total_new_tao = total_new_tao.saturating_add(hotkey_new_tao); total_new_tao @@ -398,4 +382,4 @@ impl Pallet { let remainder = block_plus_netuid.rem_euclid(tempo_plus_one); (tempo as u64).saturating_sub(remainder) } -} +} \ No newline at end of file diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 6c8da7a69..28b76d6c6 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -226,11 +226,6 @@ pub mod pallet { 0 } #[pallet::type_value] - /// Default stake delta. - pub fn DefaultStakeDelta() -> i128 { - 0 - } - #[pallet::type_value] /// Default stakes per interval. pub fn DefaultStakesPerInterval() -> (u64, u64) { (0, 0) @@ -775,18 +770,6 @@ pub mod pallet { DefaultAccountTake, >; #[pallet::storage] - /// Map ( hot, cold ) --> stake: i128 | Stake added/removed since last emission drain. - pub type StakeDeltaSinceLastEmissionDrain = StorageDoubleMap< - _, - Blake2_128Concat, - T::AccountId, - Identity, - T::AccountId, - i128, - ValueQuery, - DefaultStakeDelta, - >; - #[pallet::storage] /// DMAP ( parent, netuid ) --> Vec<(proportion,child)> pub type ChildKeys = StorageDoubleMap< _, @@ -1267,7 +1250,7 @@ pub mod pallet { /// Returns the transaction priority for setting weights. pub fn get_priority_set_weights(hotkey: &T::AccountId, netuid: u16) -> u64 { if let Ok(uid) = Self::get_uid_for_net_and_hotkey(netuid, hotkey) { - let _stake = Self::get_stake_for_hotkey_on_subnet(hotkey, netuid); + let _stake = Self::get_total_stake_for_hotkey(hotkey); let current_block_number: u64 = Self::get_current_block_as_u64(); let default_priority: u64 = current_block_number.saturating_sub(Self::get_last_update_for_uid(netuid, uid)); @@ -1277,9 +1260,9 @@ pub mod pallet { } /// Is the caller allowed to set weights - pub fn check_weights_min_stake(hotkey: &T::AccountId, netuid: u16) -> bool { + pub fn check_weights_min_stake(hotkey: &T::AccountId) -> bool { // Blacklist weights transactions for low stake peers. - Self::get_stake_for_hotkey_on_subnet(hotkey, netuid) >= Self::get_weights_min_stake() + Self::get_total_stake_for_hotkey(hotkey) >= Self::get_weights_min_stake() } /// Helper function to check if register is allowed @@ -1372,8 +1355,8 @@ where Pallet::::get_priority_set_weights(who, netuid) } - pub fn check_weights_min_stake(who: &T::AccountId, netuid: u16) -> bool { - Pallet::::check_weights_min_stake(who, netuid) + pub fn check_weights_min_stake(who: &T::AccountId) -> bool { + Pallet::::check_weights_min_stake(who) } } @@ -1411,7 +1394,7 @@ where ) -> TransactionValidity { match call.is_sub_type() { Some(Call::commit_weights { netuid, .. }) => { - if Self::check_weights_min_stake(who, *netuid) { + if Self::check_weights_min_stake(who) { let priority: u64 = Self::get_priority_set_weights(who, *netuid); Ok(ValidTransaction { priority, @@ -1423,7 +1406,7 @@ where } } Some(Call::reveal_weights { netuid, .. }) => { - if Self::check_weights_min_stake(who, *netuid) { + if Self::check_weights_min_stake(who) { let priority: u64 = Self::get_priority_set_weights(who, *netuid); Ok(ValidTransaction { priority, @@ -1435,7 +1418,7 @@ where } } Some(Call::set_weights { netuid, .. }) => { - if Self::check_weights_min_stake(who, *netuid) { + if Self::check_weights_min_stake(who) { let priority: u64 = Self::get_priority_set_weights(who, *netuid); Ok(ValidTransaction { priority, @@ -1447,7 +1430,7 @@ where } } Some(Call::set_root_weights { netuid, hotkey, .. }) => { - if Self::check_weights_min_stake(hotkey, *netuid) { + if Self::check_weights_min_stake(hotkey) { let priority: u64 = Self::get_priority_set_weights(hotkey, *netuid); Ok(ValidTransaction { priority, @@ -1677,4 +1660,4 @@ impl CollectiveInterface for () { fn add_vote(_: &T, _: H, _: P, _: bool) -> Result { Ok(true) } -} +} \ No newline at end of file diff --git a/pallets/subtensor/src/migrations/migrate_fix_pending_emission.rs b/pallets/subtensor/src/migrations/migrate_fix_pending_emission.rs deleted file mode 100644 index b5e833aeb..000000000 --- a/pallets/subtensor/src/migrations/migrate_fix_pending_emission.rs +++ /dev/null @@ -1,501 +0,0 @@ -use super::*; -use alloc::string::String; -use frame_support::{traits::Get, weights::Weight}; -use sp_core::crypto::Ss58Codec; -use sp_runtime::AccountId32; - -fn get_account_id_from_ss58(ss58_str: &str) -> Result { - let account = - AccountId32::from_ss58check(ss58_str).map_err(|_| codec::Error::from("Invalid SS58"))?; - let onchain_account = T::AccountId::decode(&mut account.as_ref())?; - - Ok(onchain_account) -} - -/** - * Migrates the pending emissions from the old hotkey to the new hotkey. - * Also migrates the stake entry of (old_hotkey, 0x000) to the pending emissions of the new hotkey. - */ -fn migrate_pending_emissions_including_null_stake( - old_hotkey: &T::AccountId, - new_hotkey: &T::AccountId, - migration_account: &T::AccountId, -) -> Weight { - let mut weight = T::DbWeight::get().reads(0); - let null_account = &DefaultAccount::::get(); - weight.saturating_accrue(T::DbWeight::get().reads(1)); - - // Get the pending emissions for the OLD hotkey - let pending_emissions_old: u64 = PendingdHotkeyEmission::::get(old_hotkey); - PendingdHotkeyEmission::::remove(old_hotkey); - weight.saturating_accrue(T::DbWeight::get().reads(1)); - - // Get the stake for the 0x000 key - let null_stake = Stake::::get(old_hotkey, null_account); - weight.saturating_accrue(T::DbWeight::get().reads(1)); - // Remove - Stake::::remove(old_hotkey, null_account); - weight.saturating_accrue(T::DbWeight::get().writes(1)); - - let new_total_coldkey_stake = - TotalColdkeyStake::::get(null_account).saturating_sub(null_stake); - if new_total_coldkey_stake == 0 { - TotalColdkeyStake::::remove(null_account); - } else { - TotalColdkeyStake::::insert(null_account, new_total_coldkey_stake); - } - weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 1)); - - let new_staking_hotkeys = StakingHotkeys::::get(null_account); - let new_staking_hotkeys = new_staking_hotkeys - .into_iter() - .filter(|hk| hk != old_hotkey) - .collect::>(); - StakingHotkeys::::insert(null_account, new_staking_hotkeys); - weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 1)); - - // Insert the stake from the null account to the MIGRATION account under the OLD hotkey - Stake::::insert(old_hotkey, migration_account, null_stake); - TotalColdkeyStake::::insert( - migration_account, - TotalColdkeyStake::::get(migration_account).saturating_add(null_stake), - ); - let mut new_staking_hotkeys = StakingHotkeys::::get(migration_account); - if !new_staking_hotkeys.contains(old_hotkey) { - new_staking_hotkeys.push(old_hotkey.clone()); - } - StakingHotkeys::::insert(migration_account, new_staking_hotkeys); - weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 3)); - - // Get the pending emissions for the NEW hotkey - let pending_emissions_new: u64 = PendingdHotkeyEmission::::get(new_hotkey); - weight.saturating_accrue(T::DbWeight::get().reads(1)); - - // Add the pending emissions for the new hotkey and the old hotkey - PendingdHotkeyEmission::::insert( - new_hotkey, - pending_emissions_new.saturating_add(pending_emissions_old), - ); - weight.saturating_accrue(T::DbWeight::get().writes(1)); - - weight -} - -// This executes the migration to fix the pending emissions -// This also migrates the stake entry of (old_hotkey, 0x000) to the Migration Account for -// both the old hotkeys. -pub fn do_migrate_fix_pending_emission() -> Weight { - // Initialize the weight with one read operation. - let mut weight = T::DbWeight::get().reads(1); - - let taostats_old_hotkey = "5Hddm3iBFD2GLT5ik7LZnT3XJUnRnN8PoeCFgGQgawUVKNm8"; - let taostats_new_hotkey = "5GKH9FPPnWSUoeeTJp19wVtd84XqFW4pyK2ijV2GsFbhTrP1"; - let migration_coldkey = "5GeRjQYsobRWFnrbBmGe5ugme3rfnDVF69N45YtdBpUFsJG8"; - - let taostats_old_hk_account = get_account_id_from_ss58::(taostats_old_hotkey); - let taostats_new_hk_account = get_account_id_from_ss58::(taostats_new_hotkey); - let migration_ck_account = get_account_id_from_ss58::(migration_coldkey); - - match ( - taostats_old_hk_account, - taostats_new_hk_account, - migration_ck_account.clone(), - ) { - (Ok(taostats_old_hk_acct), Ok(taostats_new_hk_acct), Ok(migration_ck_account)) => { - weight.saturating_accrue(migrate_pending_emissions_including_null_stake::( - &taostats_old_hk_acct, - &taostats_new_hk_acct, - &migration_ck_account, - )); - log::info!("Migrated pending emissions from taostats old hotkey to new hotkey"); - } - _ => { - log::warn!("Failed to get account id from ss58 for taostats hotkeys"); - return weight; - } - } - - let datura_old_hotkey = "5FKstHjZkh4v3qAMSBa1oJcHCLjxYZ8SNTSz1opTv4hR7gVB"; - let datura_new_hotkey = "5GP7c3fFazW9GXK8Up3qgu2DJBk8inu4aK9TZy3RuoSWVCMi"; - - let datura_old_hk_account = get_account_id_from_ss58::(datura_old_hotkey); - let datura_new_hk_account = get_account_id_from_ss58::(datura_new_hotkey); - - match ( - datura_old_hk_account, - datura_new_hk_account, - migration_ck_account, - ) { - (Ok(datura_old_hk_acct), Ok(datura_new_hk_acct), Ok(migration_ck_account)) => { - weight.saturating_accrue(migrate_pending_emissions_including_null_stake::( - &datura_old_hk_acct, - &datura_new_hk_acct, - &migration_ck_account, - )); - log::info!("Migrated pending emissions from datura old hotkey to new hotkey"); - } - _ => { - log::warn!("Failed to get account id from ss58 for datura hotkeys"); - return weight; - } - } - - weight -} - -/// Collection of storage item formats from the previous storage version. -/// -/// Required so we can read values in the v0 storage format during the migration. -#[cfg(feature = "try-runtime")] -mod v0 { - use subtensor_macros::freeze_struct; - - #[freeze_struct("2228babfc0580c62")] - #[derive(codec::Encode, codec::Decode, Clone, PartialEq, Debug)] - pub struct OldStorage { - pub total_issuance_before: u64, - pub total_stake_before: u64, - pub expected_taostats_new_hk_pending_emission: u64, - pub expected_datura_new_hk_pending_emission: u64, - pub old_migration_stake_taostats: u64, - pub old_null_stake_taostats: u64, - pub old_migration_stake_datura: u64, - pub old_null_stake_datura: u64, - } -} - -impl Pallet { - #[cfg(feature = "try-runtime")] - fn check_null_stake_invariants( - old_storage: v0::OldStorage, - ) -> Result<(), sp_runtime::TryRuntimeError> { - let null_account = &DefaultAccount::::get(); - - let taostats_old_hotkey = "5Hddm3iBFD2GLT5ik7LZnT3XJUnRnN8PoeCFgGQgawUVKNm8"; - let taostats_new_hotkey = "5GKH9FPPnWSUoeeTJp19wVtd84XqFW4pyK2ijV2GsFbhTrP1"; - let migration_coldkey = "5GeRjQYsobRWFnrbBmGe5ugme3rfnDVF69N45YtdBpUFsJG8"; - - let taostats_old_hk_account = &get_account_id_from_ss58::(taostats_old_hotkey); - let taostats_new_hk_account = &get_account_id_from_ss58::(taostats_new_hotkey); - let migration_ck_account = &get_account_id_from_ss58::(migration_coldkey); - - let old = old_storage; - let null_stake_total = old - .old_null_stake_taostats - .saturating_add(old.old_null_stake_datura) - .saturating_add(old.old_migration_stake_taostats) - .saturating_add(old.old_migration_stake_datura); - - match ( - taostats_old_hk_account, - taostats_new_hk_account, - migration_ck_account, - ) { - (Ok(taostats_old_hk_acct), Ok(taostats_new_hk_acct), Ok(migration_ck_acct)) => { - // Check the pending emission is added to new the TaoStats hotkey - assert_eq!( - PendingdHotkeyEmission::::get(taostats_new_hk_acct), - old.expected_taostats_new_hk_pending_emission - ); - - assert_eq!(PendingdHotkeyEmission::::get(taostats_old_hk_acct), 0); - - assert_eq!(Stake::::get(taostats_old_hk_acct, null_account), 0); - - assert!(StakingHotkeys::::get(migration_ck_acct).contains(taostats_old_hk_acct)); - - assert_eq!( - Self::get_stake_for_coldkey_and_hotkey(null_account, taostats_old_hk_acct), - 0 - ); - - // Check the total hotkey stake is the same - assert_eq!( - TotalHotkeyStake::::get(taostats_old_hk_acct), - old.old_null_stake_taostats - .saturating_add(old.old_migration_stake_taostats) - ); - - let new_null_stake_taostats = - Self::get_stake_for_coldkey_and_hotkey(migration_ck_acct, taostats_old_hk_acct); - - assert_eq!( - new_null_stake_taostats, - old.old_null_stake_taostats - .saturating_add(old.old_migration_stake_taostats) - ); - } - _ => { - log::warn!("Failed to get account id from ss58 for taostats hotkeys"); - return Err("Failed to get account id from ss58 for taostats hotkeys".into()); - } - } - - let datura_old_hotkey = "5FKstHjZkh4v3qAMSBa1oJcHCLjxYZ8SNTSz1opTv4hR7gVB"; - let datura_new_hotkey = "5GP7c3fFazW9GXK8Up3qgu2DJBk8inu4aK9TZy3RuoSWVCMi"; - - let datura_old_hk_account = &get_account_id_from_ss58::(datura_old_hotkey); - let datura_new_hk_account = &get_account_id_from_ss58::(datura_new_hotkey); - - match ( - datura_old_hk_account, - datura_new_hk_account, - migration_ck_account, - ) { - (Ok(datura_old_hk_acct), Ok(datura_new_hk_acct), Ok(migration_ck_acct)) => { - // Check the pending emission is added to new Datura hotkey - assert_eq!( - crate::PendingdHotkeyEmission::::get(datura_new_hk_acct), - old.expected_datura_new_hk_pending_emission - ); - - // Check the pending emission is removed from old ones - assert_eq!(PendingdHotkeyEmission::::get(datura_old_hk_acct), 0); - - // Check the stake entry is removed - assert_eq!(Stake::::get(datura_old_hk_acct, null_account), 0); - - assert!(StakingHotkeys::::get(migration_ck_acct).contains(datura_old_hk_acct)); - - assert_eq!( - Self::get_stake_for_coldkey_and_hotkey(null_account, datura_old_hk_acct), - 0 - ); - - // Check the total hotkey stake is the same - assert_eq!( - TotalHotkeyStake::::get(datura_old_hk_acct), - old.old_null_stake_datura - .saturating_add(old.old_migration_stake_datura) - ); - - let new_null_stake_datura = - Self::get_stake_for_coldkey_and_hotkey(migration_ck_acct, datura_old_hk_acct); - - assert_eq!( - new_null_stake_datura, - old.old_null_stake_datura - .saturating_add(old.old_migration_stake_datura) - ); - } - _ => { - log::warn!("Failed to get account id from ss58 for datura hotkeys"); - return Err("Failed to get account id from ss58 for datura hotkeys".into()); - } - } - - match migration_ck_account { - Ok(migration_ck_acct) => { - // Check the migration key has stake with both *old* hotkeys - assert_eq!( - TotalColdkeyStake::::get(migration_ck_acct), - null_stake_total - ); - } - _ => { - log::warn!("Failed to get account id from ss58 for migration coldkey"); - return Err("Failed to get account id from ss58 for migration coldkey".into()); - } - } - - // Check the total issuance is the SAME following migration (no TAO issued) - let expected_total_issuance = old.total_issuance_before; - let expected_total_stake = old.total_stake_before; - assert_eq!(Self::get_total_issuance(), expected_total_issuance); - - // Check total stake is the SAME following the migration (no new TAO staked) - assert_eq!(TotalStake::::get(), expected_total_stake); - // Check the total stake maps are updated following the migration (removal of old null_account stake entries) - assert_eq!(TotalColdkeyStake::::get(null_account), 0); - - // Check staking hotkeys is updated - assert_eq!(StakingHotkeys::::get(null_account), vec![]); - - Ok(()) - } -} - -pub mod migration { - use frame_support::pallet_prelude::Weight; - use frame_support::traits::OnRuntimeUpgrade; - use sp_core::Get; - - use super::*; - - pub struct Migration(PhantomData); - - #[cfg(feature = "try-runtime")] - fn get_old_storage_values() -> Result { - log::info!("Getting old storage values for migration"); - - let null_account = &DefaultAccount::::get(); - let migration_coldkey = "5GeRjQYsobRWFnrbBmGe5ugme3rfnDVF69N45YtdBpUFsJG8"; - let migration_account = &get_account_id_from_ss58::(migration_coldkey); - - let taostats_old_hotkey = "5Hddm3iBFD2GLT5ik7LZnT3XJUnRnN8PoeCFgGQgawUVKNm8"; - let taostats_new_hotkey = "5GKH9FPPnWSUoeeTJp19wVtd84XqFW4pyK2ijV2GsFbhTrP1"; - - let taostats_old_hk_account = &get_account_id_from_ss58::(taostats_old_hotkey); - let taostats_new_hk_account = &get_account_id_from_ss58::(taostats_new_hotkey); - - let total_issuance_before = crate::Pallet::::get_total_issuance(); - let mut expected_taostats_new_hk_pending_emission: u64 = 0; - let mut expected_datura_new_hk_pending_emission: u64 = 0; - let (old_null_stake_taostats, old_migration_stake_taostats) = match ( - taostats_old_hk_account, - taostats_new_hk_account, - migration_account, - ) { - (Ok(taostats_old_hk_acct), Ok(taostats_new_hk_acct), Ok(migration_acct)) => { - expected_taostats_new_hk_pending_emission = - expected_taostats_new_hk_pending_emission - .saturating_add(PendingdHotkeyEmission::::get(taostats_old_hk_acct)) - .saturating_add(PendingdHotkeyEmission::::get(taostats_new_hk_acct)); - - Ok::<(u64, u64), sp_runtime::TryRuntimeError>(( - crate::Pallet::::get_stake_for_coldkey_and_hotkey( - null_account, - taostats_old_hk_acct, - ), - crate::Pallet::::get_stake_for_coldkey_and_hotkey( - migration_acct, - taostats_old_hk_acct, - ), - )) - } - _ => { - log::warn!("Failed to get account id from ss58 for taostats hotkeys"); - Err("Failed to get account id from ss58 for taostats hotkeys".into()) - } - }?; - - let datura_old_hotkey = "5FKstHjZkh4v3qAMSBa1oJcHCLjxYZ8SNTSz1opTv4hR7gVB"; - let datura_new_hotkey = "5GP7c3fFazW9GXK8Up3qgu2DJBk8inu4aK9TZy3RuoSWVCMi"; - - let datura_old_hk_account = &get_account_id_from_ss58::(datura_old_hotkey); - let datura_new_hk_account = &get_account_id_from_ss58::(datura_new_hotkey); - - let (old_null_stake_datura, old_migration_stake_datura) = match ( - datura_old_hk_account, - datura_new_hk_account, - migration_account, - ) { - (Ok(datura_old_hk_acct), Ok(datura_new_hk_acct), Ok(migration_acct)) => { - expected_datura_new_hk_pending_emission = expected_datura_new_hk_pending_emission - .saturating_add(PendingdHotkeyEmission::::get(datura_old_hk_acct)) - .saturating_add(PendingdHotkeyEmission::::get(datura_new_hk_acct)); - - Ok::<(u64, u64), sp_runtime::TryRuntimeError>(( - crate::Pallet::::get_stake_for_coldkey_and_hotkey( - null_account, - datura_old_hk_acct, - ), - crate::Pallet::::get_stake_for_coldkey_and_hotkey( - migration_acct, - datura_old_hk_acct, - ), - )) - } - _ => { - log::warn!("Failed to get account id from ss58 for datura hotkeys"); - Err("Failed to get account id from ss58 for datura hotkeys".into()) - } - }?; - - let total_stake_before: u64 = crate::Pallet::::get_total_stake(); - - let result = v0::OldStorage { - total_issuance_before, - total_stake_before, - expected_taostats_new_hk_pending_emission, - expected_datura_new_hk_pending_emission, - old_migration_stake_taostats, - old_null_stake_taostats, - old_migration_stake_datura, - old_null_stake_datura, - }; - - log::info!("Got old storage values for migration"); - - Ok(result) - } - - impl OnRuntimeUpgrade for Migration { - /// Runs the migration to fix the pending emissions. - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result, sp_runtime::TryRuntimeError> { - use codec::Encode; - - // Get the old storage values - match get_old_storage_values::() { - Ok(old_storage) => { - log::info!("Successfully got old storage values for migration"); - let encoded = old_storage.encode(); - - Ok(encoded) - } - Err(e) => { - log::error!("Failed to get old storage values for migration: {:?}", e); - Err("Failed to get old storage values for migration".into()) - } - } - } - - // Runs the migrate function for the fix_pending_emission migration - fn on_runtime_upgrade() -> Weight { - let migration_name = b"fix_pending_emission".to_vec(); - - // Initialize the weight with one read operation. - let mut weight = T::DbWeight::get().reads(1); - - // Check if the migration has already run - if HasMigrationRun::::get(&migration_name) { - log::info!( - "Migration '{:?}' has already run. Skipping.", - migration_name - ); - return Weight::zero(); - } - - log::info!( - "Running migration '{}'", - String::from_utf8_lossy(&migration_name) - ); - - // Run the migration - weight.saturating_accrue( - migrations::migrate_fix_pending_emission::do_migrate_fix_pending_emission::(), - ); - - // Mark the migration as completed - HasMigrationRun::::insert(&migration_name, true); - weight.saturating_accrue(T::DbWeight::get().writes(1)); - - log::info!( - "Migration '{:?}' completed. Marked in storage.", - String::from_utf8_lossy(&migration_name) - ); - - // Return the migration weight. - weight - } - - /// Performs post-upgrade checks to ensure the migration was successful. - /// - /// This function is only compiled when the "try-runtime" feature is enabled. - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: Vec) -> Result<(), sp_runtime::TryRuntimeError> { - use codec::Decode; - - let old_storage: v0::OldStorage = - v0::OldStorage::decode(&mut &state[..]).map_err(|_| { - sp_runtime::TryRuntimeError::Other("Failed to decode old value from storage") - })?; - - // Verify that all null stake invariants are satisfied after the migration - crate::Pallet::::check_null_stake_invariants(old_storage)?; - - Ok(()) - } - } -} diff --git a/pallets/subtensor/src/migrations/mod.rs b/pallets/subtensor/src/migrations/mod.rs index d62983f96..6036b23e0 100644 --- a/pallets/subtensor/src/migrations/mod.rs +++ b/pallets/subtensor/src/migrations/mod.rs @@ -3,7 +3,6 @@ pub mod migrate_chain_identity; pub mod migrate_create_root_network; pub mod migrate_delete_subnet_21; pub mod migrate_delete_subnet_3; -pub mod migrate_fix_pending_emission; pub mod migrate_fix_total_coldkey_stake; pub mod migrate_init_total_issuance; pub mod migrate_populate_owned_hotkeys; diff --git a/pallets/subtensor/src/rpc_info/neuron_info.rs b/pallets/subtensor/src/rpc_info/neuron_info.rs index be367a566..cadd4b6e3 100644 --- a/pallets/subtensor/src/rpc_info/neuron_info.rs +++ b/pallets/subtensor/src/rpc_info/neuron_info.rs @@ -1,5 +1,6 @@ use super::*; use frame_support::pallet_prelude::{Decode, Encode}; +use frame_support::storage::IterableStorageDoubleMap; extern crate alloc; use codec::Compact; @@ -178,10 +179,12 @@ impl Pallet { let last_update = Self::get_last_update_for_uid(netuid, uid); let validator_permit = Self::get_validator_permit_for_uid(netuid, uid); - let stake: Vec<(T::AccountId, Compact)> = vec![( - coldkey.clone(), - Self::get_stake_for_hotkey_on_subnet(&hotkey, netuid).into(), - )]; + let stake: Vec<(T::AccountId, Compact)> = + as IterableStorageDoubleMap>::iter_prefix( + hotkey.clone(), + ) + .map(|(coldkey, stake)| (coldkey, stake.into())) + .collect(); let neuron = NeuronInfoLite { hotkey: hotkey.clone(), diff --git a/pallets/subtensor/src/staking/add_stake.rs b/pallets/subtensor/src/staking/add_stake.rs index 72d8374bc..c9cbd7e04 100644 --- a/pallets/subtensor/src/staking/add_stake.rs +++ b/pallets/subtensor/src/staking/add_stake.rs @@ -70,10 +70,8 @@ impl Pallet { Error::::StakeRateLimitExceeded ); - // Track this addition in the stake delta. - StakeDeltaSinceLastEmissionDrain::::mutate(&hotkey, &coldkey, |stake_delta| { - *stake_delta = stake_delta.saturating_add_unsigned(stake_to_be_added as u128); - }); + // Set the last time the stake increased for nominator drain protection. + LastAddStakeIncrease::::insert(&hotkey, &coldkey, Self::get_current_block_as_u64()); // If coldkey is not owner of the hotkey, it's a nomination stake. if !Self::coldkey_owns_hotkey(&coldkey, &hotkey) { diff --git a/pallets/subtensor/src/staking/helpers.rs b/pallets/subtensor/src/staking/helpers.rs index 9fd60ea51..0328d94e6 100644 --- a/pallets/subtensor/src/staking/helpers.rs +++ b/pallets/subtensor/src/staking/helpers.rs @@ -297,9 +297,6 @@ impl Pallet { staking_hotkeys.retain(|h| h != hotkey); StakingHotkeys::::insert(coldkey, staking_hotkeys); - // Update stake delta - StakeDeltaSinceLastEmissionDrain::::remove(hotkey, coldkey); - current_stake } @@ -434,9 +431,6 @@ impl Pallet { // Add the balance to the coldkey account. Self::add_balance_to_coldkey_account(&delegate_coldkey_i, stake_i); - - // Remove stake delta - StakeDeltaSinceLastEmissionDrain::::remove(hotkey, &delegate_coldkey_i); } } } diff --git a/pallets/subtensor/src/staking/remove_stake.rs b/pallets/subtensor/src/staking/remove_stake.rs index 587583f5e..4118e8d07 100644 --- a/pallets/subtensor/src/staking/remove_stake.rs +++ b/pallets/subtensor/src/staking/remove_stake.rs @@ -76,11 +76,6 @@ impl Pallet { // We remove the balance from the hotkey. Self::decrease_stake_on_coldkey_hotkey_account(&coldkey, &hotkey, stake_to_be_removed); - // Track this removal in the stake delta. - StakeDeltaSinceLastEmissionDrain::::mutate(&hotkey, &coldkey, |stake_delta| { - *stake_delta = stake_delta.saturating_sub_unsigned(stake_to_be_removed as u128); - }); - // We add the balance to the coldkey. If the above fails we will not credit this coldkey. Self::add_balance_to_coldkey_account(&coldkey, stake_to_be_removed); diff --git a/pallets/subtensor/src/subnets/uids.rs b/pallets/subtensor/src/subnets/uids.rs index 0dd3b3ddc..fff358f1c 100644 --- a/pallets/subtensor/src/subnets/uids.rs +++ b/pallets/subtensor/src/subnets/uids.rs @@ -117,7 +117,7 @@ impl Pallet { /// pub fn get_stake_for_uid_and_subnetwork(netuid: u16, neuron_uid: u16) -> u64 { if let Ok(hotkey) = Self::get_hotkey_for_net_and_uid(netuid, neuron_uid) { - Self::get_stake_for_hotkey_on_subnet(&hotkey, netuid) + Self::get_total_stake_for_hotkey(&hotkey) } else { 0 } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index bcb70e183..1a53e44cc 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -226,9 +226,9 @@ impl Pallet { Error::::HotKeyNotRegisteredInSubNet ); - // --- 6. Check to see if the hotkey has enough stake to set weights. + // --- 6. Check to see if the hotkey has enought stake to set weights. ensure!( - Self::check_weights_min_stake(&hotkey, netuid), + Self::get_total_stake_for_hotkey(&hotkey) >= Self::get_weights_min_stake(), Error::::NotEnoughStakeToSetWeights ); diff --git a/pallets/subtensor/src/swap/swap_coldkey.rs b/pallets/subtensor/src/swap/swap_coldkey.rs index 4742c3fca..bcbd2a330 100644 --- a/pallets/subtensor/src/swap/swap_coldkey.rs +++ b/pallets/subtensor/src/swap/swap_coldkey.rs @@ -169,20 +169,7 @@ impl Pallet { weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); } - // 4. Swap StakeDeltaSinceLastEmissionDrain - for hotkey in StakingHotkeys::::get(old_coldkey) { - let old_stake_delta = StakeDeltaSinceLastEmissionDrain::::get(&hotkey, old_coldkey); - let new_stake_delta = StakeDeltaSinceLastEmissionDrain::::get(&hotkey, new_coldkey); - StakeDeltaSinceLastEmissionDrain::::insert( - &hotkey, - new_coldkey, - new_stake_delta.saturating_add(old_stake_delta), - ); - StakeDeltaSinceLastEmissionDrain::::remove(&hotkey, old_coldkey); - weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - } - - // 5. Swap total coldkey stake. + // 4. Swap total coldkey stake. // TotalColdkeyStake: MAP ( coldkey ) --> u64 | Total stake of the coldkey. let old_coldkey_stake: u64 = TotalColdkeyStake::::get(old_coldkey); // Get the stake of the new coldkey. @@ -196,7 +183,7 @@ impl Pallet { ); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - // 6. Swap StakingHotkeys. + // 5. Swap StakingHotkeys. // StakingHotkeys: MAP ( coldkey ) --> Vec | Hotkeys staking for the coldkey. let old_staking_hotkeys: Vec = StakingHotkeys::::get(old_coldkey); let mut new_staking_hotkeys: Vec = StakingHotkeys::::get(new_coldkey); @@ -210,7 +197,7 @@ impl Pallet { StakingHotkeys::::insert(new_coldkey, new_staking_hotkeys); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - // 7. Swap hotkey owners. + // 6. Swap hotkey owners. // Owner: MAP ( hotkey ) --> coldkey | Owner of the hotkey. // OwnedHotkeys: MAP ( coldkey ) --> Vec | Hotkeys owned by the coldkey. let old_owned_hotkeys: Vec = OwnedHotkeys::::get(old_coldkey); @@ -229,7 +216,7 @@ impl Pallet { OwnedHotkeys::::insert(new_coldkey, new_owned_hotkeys); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - // 8. Transfer remaining balance. + // 7. Transfer remaining balance. // Balance: MAP ( coldkey ) --> u64 | Balance of the coldkey. // Transfer any remaining balance from old_coldkey to new_coldkey let remaining_balance = Self::get_coldkey_balance(old_coldkey); diff --git a/pallets/subtensor/src/swap/swap_hotkey.rs b/pallets/subtensor/src/swap/swap_hotkey.rs index 54095d5fb..793e34bff 100644 --- a/pallets/subtensor/src/swap/swap_hotkey.rs +++ b/pallets/subtensor/src/swap/swap_hotkey.rs @@ -206,30 +206,21 @@ impl Pallet { Delegates::::insert(new_hotkey, old_delegate_take); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); } - - // 9. swap PendingdHotkeyEmission - if PendingdHotkeyEmission::::contains_key(old_hotkey) { - let old_pending_hotkey_emission = PendingdHotkeyEmission::::get(old_hotkey); - PendingdHotkeyEmission::::remove(old_hotkey); - PendingdHotkeyEmission::::insert(new_hotkey, old_pending_hotkey_emission); - weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - } - - // 10. Swap all subnet specific info. + // 9. Swap all subnet specific info. let all_netuids: Vec = Self::get_all_subnet_netuids(); for netuid in all_netuids { - // 10.1 Remove the previous hotkey and insert the new hotkey from membership. + // 9.1 Remove the previous hotkey and insert the new hotkey from membership. // IsNetworkMember( hotkey, netuid ) -> bool -- is the hotkey a subnet member. let is_network_member: bool = IsNetworkMember::::get(old_hotkey, netuid); IsNetworkMember::::remove(old_hotkey, netuid); IsNetworkMember::::insert(new_hotkey, netuid, is_network_member); weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); - // 10.2 Swap Uids + Keys. + // 9.2 Swap Uids + Keys. // Keys( netuid, hotkey ) -> uid -- the uid the hotkey has in the network if it is a member. // Uids( netuid, hotkey ) -> uid -- the uids that the hotkey has. if is_network_member { - // 10.2.1 Swap the UIDS + // 9.2.1 Swap the UIDS if let Ok(old_uid) = Uids::::try_get(netuid, old_hotkey) { Uids::::remove(netuid, old_hotkey); Uids::::insert(netuid, new_hotkey, old_uid); @@ -241,7 +232,7 @@ impl Pallet { } } - // 10.3 Swap Prometheus. + // 9.3 Swap Prometheus. // Prometheus( netuid, hotkey ) -> prometheus -- the prometheus data that a hotkey has in the network. if is_network_member { if let Ok(old_prometheus_info) = Prometheus::::try_get(netuid, old_hotkey) { @@ -251,7 +242,7 @@ impl Pallet { } } - // 10.4. Swap axons. + // 9.4. Swap axons. // Axons( netuid, hotkey ) -> axon -- the axon that the hotkey has. if is_network_member { if let Ok(old_axon_info) = Axons::::try_get(netuid, old_hotkey) { @@ -261,7 +252,7 @@ impl Pallet { } } - // 10.5 Swap WeightCommits + // 9.5 Swap WeightCommits // WeightCommits( hotkey ) --> Vec -- the weight commits for the hotkey. if is_network_member { if let Ok(old_weight_commits) = WeightCommits::::try_get(netuid, old_hotkey) { @@ -271,7 +262,7 @@ impl Pallet { } } - // 10.6. Swap the subnet loaded emission. + // 9.6. Swap the subnet loaded emission. // LoadedEmission( netuid ) --> Vec<(hotkey, u64)> -- the loaded emission for the subnet. if is_network_member { if let Some(mut old_loaded_emission) = LoadedEmission::::get(netuid) { @@ -287,7 +278,7 @@ impl Pallet { } } - // 11. Swap Stake. + // 10. Swap Stake. // Stake( hotkey, coldkey ) -> stake -- the stake that the hotkey controls on behalf of the coldkey. let stakes: Vec<(T::AccountId, u64)> = Stake::::iter_prefix(old_hotkey).collect(); // Clear the entire old prefix here. @@ -317,7 +308,7 @@ impl Pallet { weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 1)); } - // 12. Swap ChildKeys. + // 11. Swap ChildKeys. // ChildKeys( parent, netuid ) --> Vec<(proportion,child)> -- the child keys of the parent. for netuid in Self::get_all_subnet_netuids() { // Get the children of the old hotkey for this subnet @@ -328,7 +319,7 @@ impl Pallet { ChildKeys::::insert(new_hotkey, netuid, my_children); } - // 13. Swap ParentKeys. + // 12. Swap ParentKeys. // ParentKeys( child, netuid ) --> Vec<(proportion,parent)> -- the parent keys of the child. for netuid in Self::get_all_subnet_netuids() { // Get the parents of the old hotkey for this subnet @@ -352,19 +343,6 @@ impl Pallet { } } - // 13. Swap Stake Delta for all coldkeys. - for (coldkey, stake_delta) in StakeDeltaSinceLastEmissionDrain::::iter_prefix(old_hotkey) - { - let new_stake_delta = StakeDeltaSinceLastEmissionDrain::::get(new_hotkey, &coldkey); - StakeDeltaSinceLastEmissionDrain::::insert( - new_hotkey, - &coldkey, - new_stake_delta.saturating_add(stake_delta), - ); - StakeDeltaSinceLastEmissionDrain::::remove(old_hotkey, &coldkey); - weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); - } - // Return successful after swapping all the relevant terms. Ok(()) } diff --git a/pallets/subtensor/src/utils/try_state.rs b/pallets/subtensor/src/utils/try_state.rs index 10abf970f..4763c0484 100644 --- a/pallets/subtensor/src/utils/try_state.rs +++ b/pallets/subtensor/src/utils/try_state.rs @@ -17,7 +17,7 @@ impl Pallet { // Calculate the total staked amount let mut total_staked: u64 = 0; - for (_hotkey, _coldkey, stake) in Stake::::iter() { + for (_account, _netuid, stake) in Stake::::iter() { total_staked = total_staked.saturating_add(stake); } diff --git a/pallets/subtensor/tests/children.rs b/pallets/subtensor/tests/children.rs index 0182888c0..2b99030ab 100644 --- a/pallets/subtensor/tests/children.rs +++ b/pallets/subtensor/tests/children.rs @@ -3237,194 +3237,3 @@ fn test_rank_trust_incentive_calculation_with_parent_child() { }); } - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test children -- test_childkey_set_weights_single_parent --exact --nocapture -#[test] -fn test_childkey_set_weights_single_parent() { - new_test_ext(1).execute_with(|| { - let netuid: u16 = 1; - add_network(netuid, 1, 0); - - // Define hotkeys - let parent: U256 = U256::from(1); - let child: U256 = U256::from(2); - let weight_setter: U256 = U256::from(3); - - // Define coldkeys with more readable names - let coldkey_parent: U256 = U256::from(100); - let coldkey_child: U256 = U256::from(101); - let coldkey_weight_setter: U256 = U256::from(102); - - let stake_to_give_child = 109_999; - - // Register parent with minimal stake and child with high stake - SubtensorModule::add_balance_to_coldkey_account(&coldkey_parent, 1); - SubtensorModule::add_balance_to_coldkey_account(&coldkey_child, stake_to_give_child + 10); - SubtensorModule::add_balance_to_coldkey_account(&coldkey_weight_setter, 1_000_000); - - // Add neurons for parent, child and weight_setter - register_ok_neuron(netuid, parent, coldkey_parent, 1); - register_ok_neuron(netuid, child, coldkey_child, 1); - register_ok_neuron(netuid, weight_setter, coldkey_weight_setter, 1); - - SubtensorModule::increase_stake_on_coldkey_hotkey_account( - &coldkey_parent, - &parent, - stake_to_give_child, - ); - SubtensorModule::increase_stake_on_coldkey_hotkey_account( - &coldkey_weight_setter, - &weight_setter, - 1_000_000, - ); - - SubtensorModule::set_weights_set_rate_limit(netuid, 0); - - // Set parent-child relationship - assert_ok!(SubtensorModule::do_set_children( - RuntimeOrigin::signed(coldkey_parent), - parent, - netuid, - vec![(u64::MAX, child)] - )); - step_block(7200 + 1); - // Set weights on the child using the weight_setter account - let origin = RuntimeOrigin::signed(weight_setter); - let uids: Vec = vec![1]; // Only set weight for the child (UID 1) - let values: Vec = vec![u16::MAX]; // Use maximum value for u16 - let version_key = SubtensorModule::get_weights_version_key(netuid); - assert_ok!(SubtensorModule::set_weights( - origin, - netuid, - uids.clone(), - values.clone(), - version_key - )); - - // Set the min stake very high - SubtensorModule::set_weights_min_stake(stake_to_give_child * 5); - - // Check the child has less stake than required - assert!( - SubtensorModule::get_stake_for_hotkey_on_subnet(&child, netuid) - < SubtensorModule::get_weights_min_stake() - ); - - // Check the child cannot set weights - assert_noop!( - SubtensorModule::set_weights( - RuntimeOrigin::signed(child), - netuid, - uids.clone(), - values.clone(), - version_key - ), - Error::::NotEnoughStakeToSetWeights - ); - - assert!(!SubtensorModule::check_weights_min_stake(&child, netuid)); - - // Set a minimum stake to set weights - SubtensorModule::set_weights_min_stake(stake_to_give_child - 5); - - // Check if the stake for the child is above - assert!( - SubtensorModule::get_stake_for_hotkey_on_subnet(&child, netuid) - >= SubtensorModule::get_weights_min_stake() - ); - - // Check the child can set weights - assert_ok!(SubtensorModule::set_weights( - RuntimeOrigin::signed(child), - netuid, - uids, - values, - version_key - )); - - assert!(SubtensorModule::check_weights_min_stake(&child, netuid)); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test children -- test_set_weights_no_parent --exact --nocapture -#[test] -fn test_set_weights_no_parent() { - // Verify that a regular key without a parent delegation is effected by the minimum stake requirements - new_test_ext(1).execute_with(|| { - let netuid: u16 = 1; - add_network(netuid, 1, 0); - - let hotkey: U256 = U256::from(2); - let spare_hk: U256 = U256::from(3); - - let coldkey: U256 = U256::from(101); - let spare_ck = U256::from(102); - - let stake_to_give_child = 109_999; - - SubtensorModule::add_balance_to_coldkey_account(&coldkey, stake_to_give_child + 10); - - // Is registered - register_ok_neuron(netuid, hotkey, coldkey, 1); - // Register a spare key - register_ok_neuron(netuid, spare_hk, spare_ck, 1); - - SubtensorModule::increase_stake_on_coldkey_hotkey_account( - &coldkey, - &hotkey, - stake_to_give_child, - ); - - SubtensorModule::set_weights_set_rate_limit(netuid, 0); - - // Has stake and no parent - step_block(7200 + 1); - - let uids: Vec = vec![1]; // Set weights on the other hotkey - let values: Vec = vec![u16::MAX]; // Use maximum value for u16 - let version_key = SubtensorModule::get_weights_version_key(netuid); - - // Set the min stake very high - SubtensorModule::set_weights_min_stake(stake_to_give_child * 5); - - // Check the key has less stake than required - assert!( - SubtensorModule::get_stake_for_hotkey_on_subnet(&hotkey, netuid) - < SubtensorModule::get_weights_min_stake() - ); - - // Check the hotkey cannot set weights - assert_noop!( - SubtensorModule::set_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - values.clone(), - version_key - ), - Error::::NotEnoughStakeToSetWeights - ); - - assert!(!SubtensorModule::check_weights_min_stake(&hotkey, netuid)); - - // Set a minimum stake to set weights - SubtensorModule::set_weights_min_stake(stake_to_give_child - 5); - - // Check if the stake for the hotkey is above - assert!( - SubtensorModule::get_stake_for_hotkey_on_subnet(&hotkey, netuid) - >= SubtensorModule::get_weights_min_stake() - ); - - // Check the hotkey can set weights - assert_ok!(SubtensorModule::set_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids, - values, - version_key - )); - - assert!(SubtensorModule::check_weights_min_stake(&hotkey, netuid)); - }); -} diff --git a/pallets/subtensor/tests/coinbase.rs b/pallets/subtensor/tests/coinbase.rs index f82ebd0ed..a6c1acde1 100644 --- a/pallets/subtensor/tests/coinbase.rs +++ b/pallets/subtensor/tests/coinbase.rs @@ -1,11 +1,8 @@ #![allow(unused, clippy::indexing_slicing, clippy::panic, clippy::unwrap_used)] use crate::mock::*; mod mock; -use frame_support::assert_ok; - -use pallet_subtensor::TargetStakesPerInterval; +// use frame_support::{assert_err, assert_ok}; use sp_core::U256; -use substrate_fixed::types::I64F64; // Test the ability to hash all sorts of hotkeys. #[test] @@ -157,1294 +154,3 @@ fn test_set_and_get_hotkey_emission_tempo() { assert_eq!(updated_tempo, new_tempo); }); } - -// Test getting nonviable stake -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test coinbase test_get_nonviable_stake -- --nocapture -#[test] -fn test_get_nonviable_stake() { - new_test_ext(1).execute_with(|| { - let netuid = 1u16; - let delegate_coldkey = U256::from(1); - let delegate_hotkey = U256::from(2); - let delegator = U256::from(3); - - let owner_added_stake = 123; - let owner_removed_stake = 456; - let owner_stake = 1_000 + owner_removed_stake; - // Add more than removed to test that the delta is updated correctly - let owner_adds_more_stake = owner_removed_stake + 1; - - let delegator_added_stake = 999; - - // Set stake rate limit very high - TargetStakesPerInterval::::put(1e9 as u64); - - add_network(netuid, 0, 0); - register_ok_neuron(netuid, delegate_hotkey, delegate_coldkey, 0); - // Give extra stake to the owner - SubtensorModule::increase_stake_on_coldkey_hotkey_account( - &delegate_coldkey, - &delegate_hotkey, - owner_stake, - ); - - // Register as a delegate - assert_ok!(SubtensorModule::become_delegate( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey - )); - - // Verify that the key starts with 0 nonviable stake - assert_eq!( - SubtensorModule::get_nonviable_stake(&delegate_hotkey, &delegate_coldkey), - 0 - ); - - // Give the coldkey some balance; extra just in-case - SubtensorModule::add_balance_to_coldkey_account( - &delegate_coldkey, - owner_added_stake + owner_adds_more_stake, - ); - - // Add some stake - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_added_stake - )); - - // Verify the nonviable stake is the same as the added stake - assert_eq!( - SubtensorModule::get_nonviable_stake(&delegate_hotkey, &delegate_coldkey), - owner_added_stake - ); - - // Add some stake from a delegator - SubtensorModule::add_balance_to_coldkey_account(&delegator, delegator_added_stake); - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegator), - delegate_hotkey, - delegator_added_stake - )); - - // Verify that the nonviable stake doesn't change when a different account adds stake - assert_eq!( - SubtensorModule::get_nonviable_stake(&delegate_hotkey, &delegate_coldkey), - owner_added_stake - ); - - // Remove some stake - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_removed_stake - )); - - // The stake delta is negative, so the nonviable stake should be 0 - assert_eq!( - SubtensorModule::get_nonviable_stake(&delegate_hotkey, &delegate_coldkey), - 0 - ); - - // Add more stake than was removed - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_adds_more_stake - )); - - // Verify that the nonviable stake is the net of the operations - assert_eq!( - SubtensorModule::get_nonviable_stake(&delegate_hotkey, &delegate_coldkey), - owner_adds_more_stake - owner_removed_stake + owner_added_stake - ); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_overflow -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_overflow() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let vali_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - vali_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 5e9 as u64); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 5e9 as u64); - let initial_stake = 5e9 as u64; - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - // 5. Set emission and verify initial states - let to_emit = 20_000e9 as u64; - SubtensorModule::set_emission_values(&[netuid], vec![to_emit]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), to_emit); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!( - SubtensorModule::get_total_stake_for_hotkey(&hotkey), - initial_stake * 2 - ); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), to_emit); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let hotkey_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - hotkey_stake, - nominator1_stake, - nominator2_stake - ); - - // 9. Verify distribution - let total_emission = to_emit * 2; // to_emit per block for 2 blocks - let hotkey_emission = (I64F64::from_num(total_emission) / I64F64::from_num(u16::MAX) - * I64F64::from_num(vali_take)) - .to_num::(); - let remaining_emission = total_emission - hotkey_emission; - let nominator_emission = remaining_emission / 2; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: {}", - hotkey_emission, - nominator_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", hotkey_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - let expected_hotkey_stake = 4_000e9 as u64; - let eps = 0.5e9 as u64; - assert!( - hotkey_stake >= expected_hotkey_stake - eps - && hotkey_stake <= expected_hotkey_stake + eps, - "Hotkey stake mismatch - expected: {}, actual: {}", - expected_hotkey_stake, - hotkey_stake - ); - assert_eq!( - nominator1_stake, - initial_stake + nominator_emission, - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - initial_stake + nominator_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - assert_eq!( - total_stake, - initial_stake + initial_stake + total_emission, - "Total stake mismatch" - ); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_no_deltas -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_no_deltas() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 100); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 100); - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - // 5. Set emission and verify initial states - SubtensorModule::set_emission_values(&[netuid], vec![10]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), 10); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!(SubtensorModule::get_total_stake_for_hotkey(&hotkey), 200); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 10); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let hotkey_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - hotkey_stake, - nominator1_stake, - nominator2_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = 20; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - let nominator_emission = remaining_emission / 2; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: {}", - hotkey_emission, - nominator_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", hotkey_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - assert_eq!(hotkey_stake, 2, "Hotkey stake mismatch"); - assert_eq!( - nominator1_stake, - 100 + nominator_emission, - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - 100 + nominator_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - assert_eq!(total_stake, 200 + total_emission, "Total stake mismatch"); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_with_positive_delta -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_with_positive_delta() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 100); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 100); - - // Do an add_stake for nominator 1 - assert_ok!(SubtensorModule::do_add_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - 123 - )); // We should not expect this to impact the emissions - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - let nominator1_stake_before = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - assert_eq!(nominator1_stake_before, 100 + 123); // The stake should include the added stake - - // 5. Set emission and verify initial states - SubtensorModule::set_emission_values(&[netuid], vec![10]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), 10); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!( - SubtensorModule::get_total_stake_for_hotkey(&hotkey), - 200 + 123 - ); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 10); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let hotkey_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - hotkey_stake, - nominator1_stake, - nominator2_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = 20; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - let nominator_emission = remaining_emission / 2; - // Notice that nominator emission is equal for both nominators, even though nominator1 added stake - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: {}", - hotkey_emission, - nominator_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", hotkey_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - assert_eq!(hotkey_stake, 2, "Hotkey stake mismatch"); - assert_eq!( - nominator1_stake, - 100 + 123 + nominator_emission, - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - 100 + nominator_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - // Includes the added stake from nominator1 - assert_eq!( - total_stake, - 200 + 123 + total_emission, - "Total stake mismatch" - ); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_with_negative_delta -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_with_negative_delta() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 100); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 100); - - // Do an remove_stake for nominator 1 - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - 12 - )); // We should expect the emissions to be impacted; - // The viable stake should be the *new* stake for nominator 1 - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - let nominator_1_stake_before = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - // Notice that nominator1 stake is the new stake, including the removed stake - assert_eq!(nominator_1_stake_before, 100 - 12); - - // 5. Set emission and verify initial states - SubtensorModule::set_emission_values(&[netuid], vec![10]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), 10); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!( - SubtensorModule::get_total_stake_for_hotkey(&hotkey), - 200 - 12 - ); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 10); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let delegate_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}, Total Hotkey Stake: {}", - delegate_stake, - nominator1_stake, - nominator2_stake, - total_hotkey_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = 20; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - - let nominator_1_emission = remaining_emission * nominator1_stake / total_hotkey_stake; - let nominator_2_emission = remaining_emission * nominator2_stake / total_hotkey_stake; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: 1;{}, 2;{}", - hotkey_emission, - nominator_1_emission, - nominator_2_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", delegate_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - assert_eq!(delegate_stake, 2, "Hotkey stake mismatch"); - assert_eq!( - nominator1_stake, - 100 - 12 + nominator_1_emission, - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - 100 + nominator_2_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - assert_eq!( - total_stake, - 200 - 12 + total_emission, - "Total stake mismatch" - ); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_with_neutral_delta -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_with_neutral_delta() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 100); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 100); - - // Do an remove_stake for nominator 1 - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - 12 - )); - // Do an add_stake for nominator 1 of the same amount - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - 12 - )); // The viable stake should match the initial stake, because the delta is 0 - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - let nominator1_stake_before = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - // Notice that nominator1 stake is the unchanged from the initial stake - assert_eq!(nominator1_stake_before, 100); - - // 5. Set emission and verify initial states - SubtensorModule::set_emission_values(&[netuid], vec![10]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), 10); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!(SubtensorModule::get_total_stake_for_hotkey(&hotkey), 200); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 10); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let delegate_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}, Total Hotkey Stake: {}", - delegate_stake, - nominator1_stake, - nominator2_stake, - total_hotkey_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = 20; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - - let nominator_1_emission = remaining_emission * nominator1_stake / total_hotkey_stake; - let nominator_2_emission = remaining_emission * nominator2_stake / total_hotkey_stake; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: 1;{}, 2;{}", - hotkey_emission, - nominator_1_emission, - nominator_2_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", delegate_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - assert_eq!(delegate_stake, 2, "Hotkey stake mismatch"); - assert_eq!( - nominator1_stake, - 100 + nominator_1_emission, // We expect the emission to be calculated based on the initial stake - // Because the delta is 0. - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - 100 + nominator_2_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - assert_eq!(total_stake, 200 + total_emission, "Total stake mismatch"); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_with_net_positive_delta -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_with_net_positive_delta() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 100); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 100); - - let initial_nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let intial_total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let initial_nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - assert_eq!(initial_nominator1_stake, initial_nominator2_stake); // Initial stakes should be equal - - let removed_stake = 12; - // Do an add_stake for nominator 1 of MORE than was removed - let added_stake = removed_stake + 1; - let net_change: i128 = i128::from(added_stake) - i128::from(removed_stake); // Positive net change - - // Do an remove_stake for nominator 1 - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - removed_stake - )); - - // Do an add_stake for nominator 1 of MORE than was removed - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - added_stake - )); // We should expect the emissions to be impacted; - // The viable stake should be the same initial stake for nominator 1 - // NOT the new stake amount, because the delta is net positive - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - let nominator_1_stake_before = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - // Notice that nominator1 stake is the new stake, including the removed stake - assert_eq!( - nominator_1_stake_before, - u64::try_from(100 + net_change).unwrap() - ); - - // 5. Set emission and verify initial states - SubtensorModule::set_emission_values(&[netuid], vec![10]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), 10); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!( - SubtensorModule::get_total_stake_for_hotkey(&hotkey), - u64::try_from(200 + net_change).unwrap() - ); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 10); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let delegate_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}, Total Hotkey Stake: {}", - delegate_stake, - nominator1_stake, - nominator2_stake, - total_hotkey_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = 20; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - - // We expect to distribute using the initial stake for nominator 1; because the delta is net positive - // We also use the INITIAL total hotkey stake - let nominator_1_emission = - remaining_emission * initial_nominator1_stake / intial_total_hotkey_stake; - let nominator_2_emission = - remaining_emission * initial_nominator2_stake / intial_total_hotkey_stake; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: 1;{}, 2;{}", - hotkey_emission, - nominator_1_emission, - nominator_2_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", delegate_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Assertions - assert_eq!(delegate_stake, 2, "Hotkey stake mismatch"); - assert_eq!( - nominator1_stake, - u64::try_from( - net_change - .checked_add_unsigned(100 + nominator_1_emission as u128) - .unwrap() - ) - .unwrap(), - "Nominator1 stake mismatch" - ); - assert_eq!( - nominator2_stake, - initial_nominator2_stake + nominator_2_emission, - "Nominator2 stake mismatch" - ); - - // 10. Check total stake - assert_eq!( - total_stake, - u64::try_from( - net_change - .checked_add_unsigned(200 + total_emission as u128) - .unwrap() - ) - .unwrap(), - "Total stake mismatch" - ); - - log::debug!("Test completed"); - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --package pallet-subtensor --test coinbase test_coinbase_nominator_drainage_with_net_negative_delta -- --nocapture -#[test] -fn test_coinbase_nominator_drainage_with_net_negative_delta() { - new_test_ext(1).execute_with(|| { - // 1. Set up the network and accounts - let netuid: u16 = 1; - let hotkey = U256::from(0); - let coldkey = U256::from(3); - let nominator1 = U256::from(1); - let nominator2 = U256::from(2); - - log::debug!("Setting up network with netuid: {}", netuid); - log::debug!("Hotkey: {:?}, Coldkey: {:?}", hotkey, coldkey); - log::debug!("Nominators: {:?}, {:?}", nominator1, nominator2); - - // 2. Create network and register neuron - add_network(netuid, 1, 0); - register_ok_neuron(netuid, hotkey, coldkey, 100000); - SubtensorModule::create_account_if_non_existent(&coldkey, &hotkey); - - log::debug!("Network created and neuron registered"); - - // 3. Set up balances and stakes - SubtensorModule::add_balance_to_coldkey_account(&coldkey, 1000); - SubtensorModule::add_balance_to_coldkey_account(&nominator1, 1500); - SubtensorModule::add_balance_to_coldkey_account(&nominator2, 1500); - - log::debug!("Balances added to accounts"); - - // 4. Make the hotkey a delegate - let val_take = (u16::MAX as u64 / 10); - assert_ok!(SubtensorModule::do_become_delegate( - RuntimeOrigin::signed(coldkey), - hotkey, - val_take as u16 - )); - - log::debug!("Hotkey became a delegate with minimum take"); - - // Add stakes for nominators - // Add the stake directly to their coldkey-hotkey account - // This bypasses the accounting in stake delta - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator1, &hotkey, 300); - SubtensorModule::increase_stake_on_coldkey_hotkey_account(&nominator2, &hotkey, 300); - - let initial_nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let intial_total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let initial_nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - assert_eq!(initial_nominator1_stake, initial_nominator2_stake); // Initial stakes should be equal - - let removed_stake = 220; - // Do an add_stake for nominator 1 of LESS than was removed - let added_stake = removed_stake - 188; - let net_change: i128 = i128::from(added_stake) - i128::from(removed_stake); // Negative net change - assert!(net_change < 0); - - // Do an remove_stake for nominator 1 - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - removed_stake - )); - - // Do an add_stake for nominator 1 of MORE than was removed - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(nominator1), - hotkey, - added_stake - )); // We should expect the emissions to be impacted; - // The viable stake should be the LESS than the initial stake for nominator 1 - // Which IS the new stake amount, because the delta is net negative - - // Log the stakes for hotkey, nominator1, and nominator2 - log::debug!( - "Initial stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}", - SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey), - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey) - ); - log::debug!("Stakes added for nominators"); - - let total_stake_before = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let nominator_1_stake_before = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - // Notice that nominator1 stake is the new stake, including the removed stake - assert_eq!( - nominator_1_stake_before, - u64::try_from(300 + net_change).unwrap() - ); - - // 5. Set emission and verify initial states - let to_emit = 10_000e9 as u64; - SubtensorModule::set_emission_values(&[netuid], vec![to_emit]).unwrap(); - assert_eq!(SubtensorModule::get_subnet_emission_value(netuid), to_emit); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - assert_eq!( - SubtensorModule::get_total_stake_for_hotkey(&hotkey), - u64::try_from(600 + net_change).unwrap() - ); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - - log::debug!("Emission set and initial states verified"); - - // 6. Set hotkey emission tempo - SubtensorModule::set_hotkey_emission_tempo(1); - log::debug!("Hotkey emission tempo set to 1"); - - // 7. Simulate blocks and check emissions - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), to_emit); - log::debug!( - "After first block, pending emission: {}", - SubtensorModule::get_pending_emission(netuid) - ); - - next_block(); - assert_eq!(SubtensorModule::get_pending_emission(netuid), 0); - assert_eq!(SubtensorModule::get_pending_hotkey_emission(&hotkey), 0); - log::debug!("After second block, pending emission drained"); - - // 8. Check final stakes - let delegate_stake = SubtensorModule::get_stake_for_coldkey_and_hotkey(&coldkey, &hotkey); - let total_hotkey_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - let nominator1_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator1, &hotkey); - let nominator2_stake = - SubtensorModule::get_stake_for_coldkey_and_hotkey(&nominator2, &hotkey); - - log::debug!( - "Final stakes - Hotkey: {}, Nominator1: {}, Nominator2: {}, Total Hotkey Stake: {}", - delegate_stake, - nominator1_stake, - nominator2_stake, - total_hotkey_stake - ); - - // 9. Verify distribution - let min_take = val_take; - let total_emission = to_emit * 2; // 10 per block for 2 blocks - let hotkey_emission = total_emission * min_take / u16::MAX as u64; - let remaining_emission = total_emission - hotkey_emission; - - // We expect to distribute using the NEW stake for nominator 1; because the delta is net negative - // We also use the INITIAL total hotkey stake - // Note: nominator_1_stake_before is the new stake for nominator 1, before the epochs run - let nominator_1_emission = - remaining_emission * nominator_1_stake_before / total_stake_before; - let nominator_2_emission = - remaining_emission * initial_nominator2_stake / total_stake_before; - - log::debug!( - "Calculated emissions - Hotkey: {}, Each Nominator: 1;{}, 2;{}", - hotkey_emission, - nominator_1_emission, - nominator_2_emission - ); - - // Debug: Print the actual stakes - log::debug!("Actual hotkey stake: {}", delegate_stake); - log::debug!("Actual nominator1 stake: {}", nominator1_stake); - log::debug!("Actual nominator2 stake: {}", nominator2_stake); - - // Debug: Check the total stake for the hotkey - let total_stake = SubtensorModule::get_total_stake_for_hotkey(&hotkey); - log::debug!("Total stake for hotkey: {}", total_stake); - - // Do a fuzzy check on the final stakes - let eps = 0.2e9 as u64; - - let expected_delegate_stake: u64 = 2_000e9 as u64; - assert!( - expected_delegate_stake - eps <= delegate_stake - && expected_delegate_stake + eps >= delegate_stake, - "Hotkey stake mismatch - Expected: {}, Actual: {}", - expected_delegate_stake, - delegate_stake - ); - - let expected_1_stake = u64::try_from( - net_change - .checked_add_unsigned((initial_nominator1_stake + nominator_1_emission) as u128) - .unwrap(), - ) - .unwrap(); - assert!( - expected_1_stake - eps <= nominator1_stake - && expected_1_stake + eps >= nominator1_stake, - "Nominator1 stake mismatch - Expected: {}, Actual: {}", - expected_1_stake, - nominator1_stake - ); - let expected_2_stake = initial_nominator2_stake + nominator_2_emission; - assert!( - expected_2_stake - eps <= nominator2_stake - && expected_2_stake + eps >= nominator2_stake, - "Nominator2 stake mismatch - Expected: {}, Actual: {}", - expected_2_stake, - nominator2_stake - ); - - // 10. Check total stake - assert_eq!( - total_stake, - u64::try_from( - net_change - .checked_add_unsigned( - (initial_nominator2_stake + initial_nominator1_stake + total_emission) - as u128 - ) - .unwrap() - ) - .unwrap(), - "Total stake mismatch" - ); - - log::debug!("Test completed"); - }); -} diff --git a/pallets/subtensor/tests/migration.rs b/pallets/subtensor/tests/migration.rs index 5cce9dfde..6c40d7d78 100644 --- a/pallets/subtensor/tests/migration.rs +++ b/pallets/subtensor/tests/migration.rs @@ -1,12 +1,10 @@ #![allow(unused, clippy::indexing_slicing, clippy::panic, clippy::unwrap_used)] mod mock; -use codec::{Decode, Encode}; use frame_support::{assert_ok, weights::Weight}; use frame_system::Config; use mock::*; use pallet_subtensor::*; -use sp_core::{crypto::Ss58Codec, U256}; -use substrate_fixed::types::extra::U2; +use sp_core::U256; #[test] fn test_initialise_ti() { @@ -432,158 +430,3 @@ fn run_migration_and_check(migration_name: &'static str) -> frame_support::weigh // Return the weight of the executed migration weight } - -fn run_pending_emissions_migration_and_check( - migration_name: &'static str, -) -> frame_support::weights::Weight { - use frame_support::traits::OnRuntimeUpgrade; - - // Execute the migration and store its weight - let weight: frame_support::weights::Weight = - pallet_subtensor::migrations::migrate_fix_pending_emission::migration::Migration::::on_runtime_upgrade(); - - // Check if the migration has been marked as completed - assert!(HasMigrationRun::::get( - migration_name.as_bytes().to_vec() - )); - - // Return the weight of the executed migration - weight -} - -fn get_account_id_from_ss58(ss58_str: &str) -> U256 { - let account_id = sp_core::crypto::AccountId32::from_ss58check(ss58_str).unwrap(); - let account_id = AccountId::decode(&mut account_id.as_ref()).unwrap(); - account_id -} - -// SKIP_WASM_BUILD=1 RUST_LOG=info cargo test --package pallet-subtensor --test migration -- test_migrate_fix_pending_emissions --exact --nocapture -#[test] -fn test_migrate_fix_pending_emissions() { - new_test_ext(1).execute_with(|| { - let migration_name = "fix_pending_emission"; - - let null_account = &U256::from(0); // The null account - - let taostats_old_hotkey = "5Hddm3iBFD2GLT5ik7LZnT3XJUnRnN8PoeCFgGQgawUVKNm8"; - let taostats_new_hotkey = "5GKH9FPPnWSUoeeTJp19wVtd84XqFW4pyK2ijV2GsFbhTrP1"; - - let taostats_old_hk_account: &AccountId = &get_account_id_from_ss58(taostats_old_hotkey); - let taostats_new_hk_account: &AccountId = &get_account_id_from_ss58(taostats_new_hotkey); - - let datura_old_hotkey = "5FKstHjZkh4v3qAMSBa1oJcHCLjxYZ8SNTSz1opTv4hR7gVB"; - let datura_new_hotkey = "5GP7c3fFazW9GXK8Up3qgu2DJBk8inu4aK9TZy3RuoSWVCMi"; - - let datura_old_hk_account: &AccountId = &get_account_id_from_ss58(datura_old_hotkey); - let datura_new_hk_account: &AccountId = &get_account_id_from_ss58(datura_new_hotkey); - - let migration_coldkey = "5GeRjQYsobRWFnrbBmGe5ugme3rfnDVF69N45YtdBpUFsJG8"; - let migration_account: &AccountId = &get_account_id_from_ss58(migration_coldkey); - - // "Issue" the TAO we're going to insert to stake - let null_stake_datura = 123_456_789; - let null_stake_tao_stats = 123_456_789; - let null_stake_total = null_stake_datura + null_stake_tao_stats; - SubtensorModule::set_total_issuance(null_stake_total); - TotalStake::::put(null_stake_total); - TotalColdkeyStake::::insert(null_account, null_stake_total); - TotalHotkeyStake::::insert(datura_old_hk_account, null_stake_datura); - TotalHotkeyStake::::insert(taostats_old_hk_account, null_stake_tao_stats); - - // Setup the old Datura hotkey with a pending emission - PendingdHotkeyEmission::::insert(datura_old_hk_account, 10_000); - // Setup the NEW Datura hotkey with a pending emission - PendingdHotkeyEmission::::insert(datura_new_hk_account, 123_456_789); - Stake::::insert(datura_old_hk_account, null_account, null_stake_datura); - let expected_datura_new_hk_pending_emission: u64 = 123_456_789 + 10_000; - - // Setup the old TaoStats hotkey with a pending emission - PendingdHotkeyEmission::::insert(taostats_old_hk_account, 987_654); - // Setup the new TaoStats hotkey with a pending emission - PendingdHotkeyEmission::::insert(taostats_new_hk_account, 100_000); - // Setup the old TaoStats hotkey with a null-key stake entry - Stake::::insert(taostats_old_hk_account, null_account, null_stake_tao_stats); - let expected_taostats_new_hk_pending_emission: u64 = 987_654 + 100_000; - - let total_issuance_before = SubtensorModule::get_total_issuance(); - - // Run migration - let first_weight = run_pending_emissions_migration_and_check(migration_name); - assert!(first_weight != Weight::zero()); - - // Check the pending emission is added to new Datura hotkey - assert_eq!( - PendingdHotkeyEmission::::get(datura_new_hk_account), - expected_datura_new_hk_pending_emission - ); - - // Check the pending emission is added to new the TaoStats hotkey - assert_eq!( - PendingdHotkeyEmission::::get(taostats_new_hk_account), - expected_taostats_new_hk_pending_emission - ); - - // Check the pending emission is removed from old ones - assert_eq!( - PendingdHotkeyEmission::::get(datura_old_hk_account), - 0 - ); - - assert_eq!( - PendingdHotkeyEmission::::get(taostats_old_hk_account), - 0 - ); - - // Check the stake entry is removed - assert_eq!(Stake::::get(datura_old_hk_account, null_account), 0); - assert_eq!(Stake::::get(taostats_old_hk_account, null_account), 0); - - // Check the total issuance is the SAME following migration (no TAO issued) - let expected_total_issuance = total_issuance_before; - assert_eq!( - SubtensorModule::get_total_issuance(), - expected_total_issuance - ); - - // Check total stake is the SAME following the migration (no new TAO staked) - assert_eq!(TotalStake::::get(), expected_total_issuance); - // Check the total stake maps are updated following the migration (removal of old null_account stake entries) - assert_eq!(TotalColdkeyStake::::get(null_account), 0); - assert_eq!( - SubtensorModule::get_stake_for_coldkey_and_hotkey(null_account, datura_old_hk_account), - 0 - ); - assert_eq!( - SubtensorModule::get_stake_for_coldkey_and_hotkey( - null_account, - taostats_old_hk_account - ), - 0 - ); - - // Check staking hotkeys is updated - assert_eq!(StakingHotkeys::::get(null_account), vec![]); - - // Check the migration key has stake with both *old* hotkeys - assert_eq!( - SubtensorModule::get_stake_for_coldkey_and_hotkey( - migration_account, - datura_old_hk_account - ), - null_stake_datura - ); - assert_eq!( - SubtensorModule::get_stake_for_coldkey_and_hotkey( - migration_account, - taostats_old_hk_account - ), - null_stake_tao_stats - ); - assert_eq!( - TotalColdkeyStake::::get(migration_account), - null_stake_total - ); - assert!(StakingHotkeys::::get(migration_account).contains(datura_old_hk_account)); - assert!(StakingHotkeys::::get(migration_account).contains(taostats_old_hk_account)); - }) -} diff --git a/pallets/subtensor/tests/staking.rs b/pallets/subtensor/tests/staking.rs index a55db996b..f053c7ca6 100644 --- a/pallets/subtensor/tests/staking.rs +++ b/pallets/subtensor/tests/staking.rs @@ -2306,106 +2306,3 @@ fn test_get_total_delegated_stake_exclude_owner_stake() { ); }); } - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test staking -- test_stake_delta_tracks_adds_and_removes --exact --nocapture -#[test] -fn test_stake_delta_tracks_adds_and_removes() { - new_test_ext(1).execute_with(|| { - let netuid = 1u16; - let delegate_coldkey = U256::from(1); - let delegate_hotkey = U256::from(2); - let delegator = U256::from(3); - - let owner_stake = 1000; - let owner_added_stake = 123; - let owner_removed_stake = 456; - // Add more than removed to test that the delta is updated correctly - let owner_adds_more_stake = owner_removed_stake + 1; - - let delegator_added_stake = 999; - - // Set stake rate limit very high - TargetStakesPerInterval::::put(1e9 as u64); - - add_network(netuid, 0, 0); - register_ok_neuron(netuid, delegate_hotkey, delegate_coldkey, 0); - // Give extra stake to the owner - SubtensorModule::increase_stake_on_coldkey_hotkey_account( - &delegate_coldkey, - &delegate_hotkey, - owner_stake, - ); - - // Register as a delegate - assert_ok!(SubtensorModule::become_delegate( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey - )); - - // Verify that the stake delta is empty - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(delegate_hotkey, delegate_coldkey), - 0 - ); - - // Give the coldkey some balance; extra just in case - SubtensorModule::add_balance_to_coldkey_account( - &delegate_coldkey, - owner_added_stake + owner_adds_more_stake, - ); - - // Add some stake - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_added_stake - )); - - // Verify that the stake delta is correct - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(delegate_hotkey, delegate_coldkey), - i128::from(owner_added_stake) - ); - - // Add some stake from a delegator - SubtensorModule::add_balance_to_coldkey_account(&delegator, delegator_added_stake); - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegator), - delegate_hotkey, - delegator_added_stake - )); - - // Verify that the stake delta is unchanged for the owner - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(delegate_hotkey, delegate_coldkey), - i128::from(owner_added_stake) - ); - - // Remove some stake - assert_ok!(SubtensorModule::remove_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_removed_stake - )); - - // Verify that the stake delta is correct - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(delegate_hotkey, delegate_coldkey), - i128::from(owner_added_stake).saturating_sub_unsigned(owner_removed_stake.into()) - ); - - // Add more stake than was removed - assert_ok!(SubtensorModule::add_stake( - RuntimeOrigin::signed(delegate_coldkey), - delegate_hotkey, - owner_adds_more_stake - )); - - // Verify that the stake delta is correct - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(delegate_hotkey, delegate_coldkey), - i128::from(owner_added_stake) - .saturating_add_unsigned((owner_adds_more_stake - owner_removed_stake).into()) - ); - }); -} diff --git a/pallets/subtensor/tests/swap_coldkey.rs b/pallets/subtensor/tests/swap_coldkey.rs index 37467646f..0fe601cab 100644 --- a/pallets/subtensor/tests/swap_coldkey.rs +++ b/pallets/subtensor/tests/swap_coldkey.rs @@ -1628,41 +1628,3 @@ fn test_coldkey_swap_no_identity_no_changes_newcoldkey_exists() { assert!(Identities::::get(new_coldkey).is_some()); }); } - -// SKIP_WASM_BUILD=1 RUST_LOG=info cargo test --test swap_coldkey -- test_coldkey_swap_stake_delta --exact --nocapture -#[test] -fn test_coldkey_swap_stake_delta() { - new_test_ext(1).execute_with(|| { - let old_coldkey = U256::from(3); - let new_coldkey = U256::from(4); - let hotkey = U256::from(5); - - let netuid = 1; - let burn_cost = 10; - let tempo = 1; - - // Give the old coldkey a stake delta on hotkey - StakeDeltaSinceLastEmissionDrain::::insert(hotkey, old_coldkey, 123); - // Give the new coldkey a stake delta on hotkey - StakeDeltaSinceLastEmissionDrain::::insert(hotkey, new_coldkey, 456); - let expected_stake_delta = 123 + 456; - // Add StakingHotkeys entry - StakingHotkeys::::insert(old_coldkey, vec![hotkey]); - - // Give balance for the swap fees - SubtensorModule::add_balance_to_coldkey_account(&old_coldkey, 100e9 as u64); - - // Perform the coldkey swap - assert_ok!(SubtensorModule::do_swap_coldkey(&old_coldkey, &new_coldkey)); - - // Ensure the stake delta is correctly transferred - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(hotkey, new_coldkey), - expected_stake_delta - ); - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(hotkey, old_coldkey), - 0 - ); - }); -} diff --git a/pallets/subtensor/tests/swap_hotkey.rs b/pallets/subtensor/tests/swap_hotkey.rs index 57f206452..bff738b86 100644 --- a/pallets/subtensor/tests/swap_hotkey.rs +++ b/pallets/subtensor/tests/swap_hotkey.rs @@ -9,7 +9,6 @@ use mock::*; use pallet_subtensor::*; use sp_core::H256; use sp_core::U256; -use sp_runtime::SaturatedConversion; // SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_swap_owner --exact --nocapture #[test] @@ -1116,91 +1115,3 @@ fn test_swap_complex_parent_child_structure() { ); }); } - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_hotkey_swap_stake_delta --exact --nocapture -#[test] -fn test_hotkey_swap_stake_delta() { - new_test_ext(1).execute_with(|| { - let old_hotkey = U256::from(3); - let new_hotkey = U256::from(4); - let coldkey = U256::from(7); - - let coldkeys = [U256::from(1), U256::from(2), U256::from(5)]; - - let mut weight = Weight::zero(); - - // Set up initial state - // Add stake delta for each coldkey and the old_hotkey - for &coldkey in coldkeys.iter() { - StakeDeltaSinceLastEmissionDrain::::insert( - old_hotkey, - coldkey, - (123 + coldkey.saturated_into::()), - ); - - StakingHotkeys::::insert(coldkey, vec![old_hotkey]); - } - - // Add stake delta for one coldkey and the new_hotkey - StakeDeltaSinceLastEmissionDrain::::insert(new_hotkey, coldkeys[0], 456); - // Add corresponding StakingHotkeys - StakingHotkeys::::insert(coldkeys[0], vec![old_hotkey, new_hotkey]); - - // Perform the swap - SubtensorModule::perform_hotkey_swap(&old_hotkey, &new_hotkey, &coldkey, &mut weight); - - // Ensure the stake delta is correctly transferred for each coldkey - // -- coldkey[0] maintains its stake delta from the new_hotkey and the old_hotkey - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(new_hotkey, coldkeys[0]), - 123 + coldkeys[0].saturated_into::() + 456 - ); - // -- coldkey[1..] maintains its stake delta from the old_hotkey - for &coldkey in coldkeys[1..].iter() { - assert_eq!( - StakeDeltaSinceLastEmissionDrain::::get(new_hotkey, coldkey), - 123 + coldkey.saturated_into::() - ); - assert!(!StakeDeltaSinceLastEmissionDrain::::contains_key( - old_hotkey, coldkey - )); - } - }); -} - -// SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_swap_hotkey_with_pending_emissions --exact --nocapture -#[test] -fn test_swap_hotkey_with_pending_emissions() { - new_test_ext(1).execute_with(|| { - let old_hotkey = U256::from(1); - let new_hotkey = U256::from(2); - let coldkey = U256::from(3); - let netuid = 0u16; - let mut weight = Weight::zero(); - - let pending_emission = 123_456_789u64; - - // Set up initial state - add_network(netuid, 0, 1); - - // Set up pending emissions - PendingdHotkeyEmission::::insert(old_hotkey, pending_emission); - // Verify the pending emissions are set - assert_eq!( - PendingdHotkeyEmission::::get(old_hotkey), - pending_emission - ); - // Verify the new hotkey does not have any pending emissions - assert!(!PendingdHotkeyEmission::::contains_key(new_hotkey)); - - // Perform the swap - SubtensorModule::perform_hotkey_swap(&old_hotkey, &new_hotkey, &coldkey, &mut weight); - - // Verify the pending emissions are transferred - assert_eq!( - PendingdHotkeyEmission::::get(new_hotkey), - pending_emission - ); - assert!(!PendingdHotkeyEmission::::contains_key(old_hotkey)); - }); -} diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 373c301be..214e3add0 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -477,11 +477,11 @@ fn test_set_weights_min_stake_failed() { // Check the signed extension function. assert_eq!(SubtensorModule::get_weights_min_stake(), 20_000_000_000_000); - assert!(!SubtensorModule::check_weights_min_stake(&hotkey, netuid)); + assert!(!SubtensorModule::check_weights_min_stake(&hotkey)); SubtensorModule::increase_stake_on_hotkey_account(&hotkey, 19_000_000_000_000); - assert!(!SubtensorModule::check_weights_min_stake(&hotkey, netuid)); + assert!(!SubtensorModule::check_weights_min_stake(&hotkey)); SubtensorModule::increase_stake_on_hotkey_account(&hotkey, 20_000_000_000_000); - assert!(SubtensorModule::check_weights_min_stake(&hotkey, netuid)); + assert!(SubtensorModule::check_weights_min_stake(&hotkey)); // Check that it fails at the pallet level. SubtensorModule::set_weights_min_stake(100_000_000_000_000); From a388875675e5da534874937204c84b107d831f61 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 20:09:07 -0400 Subject: [PATCH 141/213] Add a new line to check finney workflow --- .github/workflows/check-finney.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index c24f7587d..665c9c8a9 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -52,4 +52,4 @@ jobs: runtime-package: "node-subtensor-runtime" node-uri: "wss://entrypoint-finney.opentensor.ai:443" checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" \ No newline at end of file + extra-args: "--disable-spec-version-check --no-weight-warnings" From b1266d628d70268e7b15cc2f35537f9d98cd39a9 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 20:10:36 -0400 Subject: [PATCH 142/213] Cleanup newlines --- pallets/subtensor/src/coinbase/run_coinbase.rs | 2 +- pallets/subtensor/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pallets/subtensor/src/coinbase/run_coinbase.rs b/pallets/subtensor/src/coinbase/run_coinbase.rs index 751767db9..723edc423 100644 --- a/pallets/subtensor/src/coinbase/run_coinbase.rs +++ b/pallets/subtensor/src/coinbase/run_coinbase.rs @@ -382,4 +382,4 @@ impl Pallet { let remainder = block_plus_netuid.rem_euclid(tempo_plus_one); (tempo as u64).saturating_sub(remainder) } -} \ No newline at end of file +} diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 28b76d6c6..2985736c8 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -1660,4 +1660,4 @@ impl CollectiveInterface for () { fn add_vote(_: &T, _: H, _: P, _: bool) -> Result { Ok(true) } -} \ No newline at end of file +} From dfe987a5d95c76cfaf33bbc363ad8966c89310de Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Fri, 20 Sep 2024 20:16:36 -0400 Subject: [PATCH 143/213] clippy --- runtime/src/lib.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index f0f73e1d0..7d569264a 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -1099,12 +1099,10 @@ pub type SignedExtra = ( frame_metadata_hash_extension::CheckMetadataHash, ); -type Migrations = ( +type Migrations = pallet_subtensor::migrations::migrate_init_total_issuance::initialise_total_issuance::Migration< Runtime, - >, - pallet_subtensor::migrations::migrate_fix_pending_emission::migration::Migration, -); + >; // Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = From c368ce34c794249a20985a8cd1cd13de938871c4 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 23 Sep 2024 14:15:38 -0400 Subject: [PATCH 144/213] exclude procedural-fork tests --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index a5b7f2f61..4d77745a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ members = [ "support/linting", "support/procedural-fork", ] +exclude = ["support/procedural-fork"] resolver = "2" [workspace.lints.clippy] From 056413898bffea4073385fd9d68909d01ba366c8 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 23 Sep 2024 15:13:33 -0400 Subject: [PATCH 145/213] ignore procedural-fork tests via a cfg gate --- support/procedural-fork/src/lib.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/support/procedural-fork/src/lib.rs b/support/procedural-fork/src/lib.rs index cef3891a0..c7f1472b2 100644 --- a/support/procedural-fork/src/lib.rs +++ b/support/procedural-fork/src/lib.rs @@ -15,40 +15,60 @@ extern crate proc_macro; +#[cfg(not(test))] mod benchmark; +#[cfg(not(test))] mod construct_runtime; +#[cfg(not(test))] mod crate_version; +#[cfg(not(test))] mod derive_impl; +#[cfg(not(test))] mod dummy_part_checker; +#[cfg(not(test))] mod dynamic_params; +#[cfg(not(test))] mod key_prefix; +#[cfg(not(test))] mod match_and_insert; +#[cfg(not(test))] mod no_bound; +#[cfg(not(test))] mod pallet; +#[cfg(not(test))] mod pallet_error; +#[cfg(not(test))] mod runtime; +#[cfg(not(test))] mod storage_alias; +#[cfg(not(test))] mod transactional; +#[cfg(not(test))] mod tt_macro; - +#[cfg(not(test))] use std::{cell::RefCell, str::FromStr}; +#[cfg(not(test))] pub(crate) const INHERENT_INSTANCE_NAME: &str = "__InherentHiddenInstance"; /// The number of module instances supported by the runtime, starting at index 1, /// and up to `NUMBER_OF_INSTANCE`. +#[cfg(not(test))] pub(crate) const NUMBER_OF_INSTANCE: u8 = 16; thread_local! { /// A global counter, can be used to generate a relatively unique identifier. + #[cfg(not(test))] static COUNTER: RefCell = const { RefCell::new(Counter(0)) }; } /// Counter to generate a relatively unique identifier for macros. This is necessary because /// declarative macros gets hoisted to the crate root, which shares the namespace with other pallets /// containing the very same macros. +#[cfg(not(test))] struct Counter(u64); +#[cfg(not(test))] impl Counter { fn inc(&mut self) -> u64 { let ret = self.0; @@ -60,6 +80,7 @@ impl Counter { /// Get the value from the given environment variable set by cargo. /// /// The value is parsed into the requested destination type. +#[cfg(not(test))] fn get_cargo_env_var(version_env: &str) -> std::result::Result { let version = std::env::var(version_env) .unwrap_or_else(|_| panic!("`{}` is always set by cargo; qed", version_env)); @@ -69,10 +90,12 @@ fn get_cargo_env_var(version_env: &str) -> std::result::Result String { format!("CounterFor{}", prefix) } +#[cfg(not(test))] pub mod exports { pub mod benchmark { pub use crate::benchmark::*; From 4dd15c9a15ba4a403a5cf721b86712f7c5196cea Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 23 Sep 2024 15:15:12 -0400 Subject: [PATCH 146/213] fix lint allow --- Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 4d77745a8..faa8a783f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,6 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" -manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From c42864164721f80865a419489d61ba38f247d8af Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 23 Sep 2024 15:25:18 -0400 Subject: [PATCH 147/213] revert lint change --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index faa8a783f..4d77745a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,6 +43,7 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" +manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From 00f6a16b861f3726e8145612b411ad73d8ae824a Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 24 Sep 2024 12:52:13 -0400 Subject: [PATCH 148/213] Add rpc-external flag to localhost.sh for e2e tests --- runtime/src/lib.rs | 2 +- scripts/localnet.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 7d569264a..fee9e8770 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -146,7 +146,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 202, + spec_version: 196, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 85c4edfa2..1b6dad25c 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -77,6 +77,7 @@ alice_start=( --alice --port 30334 --rpc-port 9946 + --rpc-external --validator --rpc-cors=all --allow-private-ipv4 From 85a506a0d3474e335d5952650f8a441fba33fbdd Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 24 Sep 2024 18:12:14 -0400 Subject: [PATCH 149/213] Bump version to pass deploy check. Use --unsafe-force-node-key-generation instead of explicit key generation, add rpc-cors to Bob for e2e tests. --- runtime/src/lib.rs | 2 +- scripts/localnet.sh | 8 +++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index fee9e8770..810ba20e6 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -146,7 +146,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 196, + spec_version: 197, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 1b6dad25c..51e3d05a8 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -56,10 +56,6 @@ echo "*** Building chainspec..." "$BASE_DIR/target/release/node-subtensor" build-spec --disable-default-bootnode --raw --chain $CHAIN >$FULL_PATH echo "*** Chainspec built and output to file" -# generate node keys -$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/alice -$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/bob - if [ $NO_PURGE -eq 1 ]; then echo "*** Purging previous state skipped..." else @@ -77,11 +73,11 @@ alice_start=( --alice --port 30334 --rpc-port 9946 - --rpc-external --validator --rpc-cors=all --allow-private-ipv4 --discover-local + --unsafe-force-node-key-generation ) bob_start=( @@ -92,8 +88,10 @@ bob_start=( --port 30335 --rpc-port 9945 --validator + --rpc-cors=all --allow-private-ipv4 --discover-local + --unsafe-force-node-key-generation ) trap 'pkill -P $$' EXIT SIGINT SIGTERM From 961419308245a085669d1b0c00c46fc80a6ff755 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Polewicz?= Date: Wed, 25 Sep 2024 22:32:33 +0200 Subject: [PATCH 150/213] backport clippy silencing as instructed by formalized_tensor --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index f9a7968b9..4f162ca6b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,6 +41,7 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" +manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From eacda1edec13e3d44c9cb486bd7706d7e0553f0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Polewicz?= Date: Thu, 26 Sep 2024 11:26:50 +0300 Subject: [PATCH 151/213] Remove a small refactor in runtime/src/lib.rs --- runtime/src/lib.rs | 40 +++++++++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 6696259a3..b7a347c46 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -1369,7 +1369,13 @@ impl_runtime_apis! { } fn get_delegate(delegate_account_vec: Vec) -> Vec { - SubtensorModule::get_delegate(delegate_account_vec).map(|r| r.encode()).unwrap_or_default() + let _result = SubtensorModule::get_delegate(delegate_account_vec); + if _result.is_some() { + let result = _result.expect("Could not get DelegateInfo"); + result.encode() + } else { + vec![] + } } fn get_delegated(delegatee_account_vec: Vec) -> Vec { @@ -1385,7 +1391,13 @@ impl_runtime_apis! { } fn get_neuron_lite(netuid: u16, uid: u16) -> Vec { - SubtensorModule::get_neuron_lite(netuid, uid).map(|r| r.encode()).unwrap_or_default() + let _result = SubtensorModule::get_neuron_lite(netuid, uid); + if _result.is_some() { + let result = _result.expect("Could not get NeuronInfoLite"); + result.encode() + } else { + vec![] + } } fn get_neurons(netuid: u16) -> Vec { @@ -1394,13 +1406,25 @@ impl_runtime_apis! { } fn get_neuron(netuid: u16, uid: u16) -> Vec { - SubtensorModule::get_neuron(netuid, uid).map(|r| r.encode()).unwrap_or_default() + let _result = SubtensorModule::get_neuron(netuid, uid); + if _result.is_some() { + let result = _result.expect("Could not get NeuronInfo"); + result.encode() + } else { + vec![] + } } } impl subtensor_custom_rpc_runtime_api::SubnetInfoRuntimeApi for Runtime { fn get_subnet_info(netuid: u16) -> Vec { - SubtensorModule::get_subnet_info(netuid).map(|r| r.encode()).unwrap_or_default() + let _result = SubtensorModule::get_subnet_info(netuid); + if _result.is_some() { + let result = _result.expect("Could not get SubnetInfo"); + result.encode() + } else { + vec![] + } } fn get_subnets_info() -> Vec { @@ -1424,7 +1448,13 @@ impl_runtime_apis! { } fn get_subnet_hyperparams(netuid: u16) -> Vec { - SubtensorModule::get_subnet_hyperparams(netuid).map(|r| r.encode()).unwrap_or_default() + let _result = SubtensorModule::get_subnet_hyperparams(netuid); + if _result.is_some() { + let result = _result.expect("Could not get SubnetHyperparams"); + result.encode() + } else { + vec![] + } } } From 7214da1b8369d1b7ff24e5d67c661113cb4a3d9f Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 1 Oct 2024 11:03:11 -0400 Subject: [PATCH 152/213] update tag in update.sh to v1.16.0-rc1 --- support/procedural-fork/update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/support/procedural-fork/update.sh b/support/procedural-fork/update.sh index a8793b261..0cb933b33 100755 --- a/support/procedural-fork/update.sh +++ b/support/procedural-fork/update.sh @@ -5,7 +5,7 @@ set -e # Set the repository and tag REPO_URL="git@github.com:paritytech/polkadot-sdk.git" -POLKADOT_SDK_TAG="v1.10.0-rc3" +POLKADOT_SDK_TAG="v1.16.0-rc1" # Create a temporary directory for cloning TMP_DIR=$(mktemp -d) From ac6e87d2c2328a5cf6f127ed3d4e45c72c622065 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 1 Oct 2024 11:03:55 -0400 Subject: [PATCH 153/213] update procedural-fork to v1.16.0-rc1 --- support/procedural-fork/src/benchmark.rs | 103 +++++----- .../src/construct_runtime/expand/call.rs | 1 + .../src/construct_runtime/expand/inherent.rs | 6 +- .../src/construct_runtime/expand/metadata.rs | 4 +- .../src/construct_runtime/expand/origin.rs | 34 ++-- .../src/construct_runtime/mod.rs | 24 +-- .../src/construct_runtime/parse.rs | 37 ++-- support/procedural-fork/src/dynamic_params.rs | 4 +- .../src/pallet/expand/constants.rs | 9 +- .../src/pallet/expand/documentation.rs | 4 +- .../src/pallet/expand/error.rs | 44 +++-- .../src/pallet/expand/hooks.rs | 81 ++++---- .../src/pallet/expand/pallet_struct.rs | 14 +- .../src/pallet/expand/storage.rs | 67 +++---- .../src/pallet/expand/tasks.rs | 7 +- .../src/pallet/expand/tt_default_parts.rs | 4 +- .../src/pallet/parse/composite.rs | 4 - .../src/pallet/parse/config.rs | 187 ++++++++++-------- .../procedural-fork/src/pallet/parse/error.rs | 4 - .../src/pallet/parse/extra_constants.rs | 5 +- .../src/pallet/parse/genesis_build.rs | 9 +- .../src/pallet/parse/helper.rs | 18 +- .../procedural-fork/src/pallet/parse/hooks.rs | 9 +- .../src/pallet/parse/inherent.rs | 6 +- .../procedural-fork/src/pallet/parse/mod.rs | 19 +- .../src/pallet/parse/origin.rs | 8 +- .../src/pallet/parse/storage.rs | 6 +- .../procedural-fork/src/pallet/parse/tasks.rs | 8 +- .../src/pallet/parse/tests/tasks.rs | 4 +- .../src/pallet/parse/type_value.rs | 12 +- .../src/pallet/parse/validate_unsigned.rs | 17 +- .../procedural-fork/src/runtime/expand/mod.rs | 21 +- support/procedural-fork/src/runtime/mod.rs | 2 - .../procedural-fork/src/runtime/parse/mod.rs | 64 +++--- .../src/runtime/parse/pallet.rs | 154 ++++++++++++--- .../src/runtime/parse/pallet_decl.rs | 145 ++++++++++++-- .../src/runtime/parse/runtime_struct.rs | 4 +- 37 files changed, 679 insertions(+), 470 deletions(-) diff --git a/support/procedural-fork/src/benchmark.rs b/support/procedural-fork/src/benchmark.rs index 376200d6e..0eb3c330a 100644 --- a/support/procedural-fork/src/benchmark.rs +++ b/support/procedural-fork/src/benchmark.rs @@ -323,6 +323,24 @@ fn ensure_valid_return_type(item_fn: &ItemFn) -> Result<()> { Ok(()) } +/// Ensure that the passed statements do not contain any forbidden variable names +fn ensure_no_forbidden_variable_names(stmts: &[Stmt]) -> Result<()> { + const FORBIDDEN_VAR_NAMES: [&str; 2] = ["recording", "verify"]; + for stmt in stmts { + let Stmt::Local(l) = stmt else { continue }; + let Pat::Ident(ident) = &l.pat else { continue }; + if FORBIDDEN_VAR_NAMES.contains(&ident.ident.to_string().as_str()) { + return Err(Error::new( + ident.span(), + format!( + "Variables {FORBIDDEN_VAR_NAMES:?} are reserved for benchmarking internals.", + ), + )); + } + } + Ok(()) +} + /// Parses params such as `x: Linear<0, 1>` fn parse_params(item_fn: &ItemFn) -> Result> { let mut params: Vec = Vec::new(); @@ -481,9 +499,12 @@ impl BenchmarkDef { } }; + let setup_stmts = Vec::from(&item_fn.block.stmts[0..i]); + ensure_no_forbidden_variable_names(&setup_stmts)?; + Ok(BenchmarkDef { params, - setup_stmts: Vec::from(&item_fn.block.stmts[0..i]), + setup_stmts, call_def, verify_stmts, last_stmt, @@ -692,18 +713,16 @@ pub fn benchmarks( fn instance( &self, + recording: &mut impl #krate::Recording, components: &[(#krate::BenchmarkParameter, u32)], verify: bool, - ) -> Result< - #krate::__private::Box Result<(), #krate::BenchmarkError>>, - #krate::BenchmarkError, - > { + ) -> Result<(), #krate::BenchmarkError> { match self { #( Self::#benchmark_names => { <#benchmark_names as #krate::BenchmarkingSetup< #type_use_generics - >>::instance(&#benchmark_names, components, verify) + >>::instance(&#benchmark_names, recording, components, verify) } ) * @@ -794,17 +813,7 @@ pub fn benchmarks( #krate::benchmarking::set_whitelist(whitelist.clone()); let mut results: #krate::__private::Vec<#krate::BenchmarkResult> = #krate::__private::Vec::new(); - // Always do at least one internal repeat... - for _ in 0 .. internal_repeats.max(1) { - // Always reset the state after the benchmark. - #krate::__private::defer!(#krate::benchmarking::wipe_db()); - - // Set up the externalities environment for the setup we want to - // benchmark. - let closure_to_benchmark = < - SelectedBenchmark as #krate::BenchmarkingSetup<#type_use_generics> - >::instance(&selected_benchmark, c, verify)?; - + let on_before_start = || { // Set the block number to at least 1 so events are deposited. if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { #frame_system::Pallet::::set_block_number(1u32.into()); @@ -822,6 +831,12 @@ pub fn benchmarks( // Reset the read/write counter so we don't count operations in the setup process. #krate::benchmarking::reset_read_write_count(); + }; + + // Always do at least one internal repeat... + for _ in 0 .. internal_repeats.max(1) { + // Always reset the state after the benchmark. + #krate::__private::defer!(#krate::benchmarking::wipe_db()); // Time the extrinsic logic. #krate::__private::log::trace!( @@ -831,20 +846,12 @@ pub fn benchmarks( c ); - let start_pov = #krate::benchmarking::proof_size(); - let start_extrinsic = #krate::benchmarking::current_time(); - - closure_to_benchmark()?; - - let finish_extrinsic = #krate::benchmarking::current_time(); - let end_pov = #krate::benchmarking::proof_size(); + let mut recording = #krate::BenchmarkRecording::new(&on_before_start); + >::instance(&selected_benchmark, &mut recording, c, verify)?; // Calculate the diff caused by the benchmark. - let elapsed_extrinsic = finish_extrinsic.saturating_sub(start_extrinsic); - let diff_pov = match (start_pov, end_pov) { - (Some(start), Some(end)) => end.saturating_sub(start), - _ => Default::default(), - }; + let elapsed_extrinsic = recording.elapsed_extrinsic().expect("elapsed time should be recorded"); + let diff_pov = recording.diff_pov().unwrap_or_default(); // Commit the changes to get proper write count #krate::benchmarking::commit_db(); @@ -1163,9 +1170,10 @@ fn expand_benchmark( fn instance( &self, + recording: &mut impl #krate::Recording, components: &[(#krate::BenchmarkParameter, u32)], verify: bool - ) -> Result<#krate::__private::Box Result<(), #krate::BenchmarkError>>, #krate::BenchmarkError> { + ) -> Result<(), #krate::BenchmarkError> { #( // prepare instance #param_names let #param_names = components.iter() @@ -1179,15 +1187,15 @@ fn expand_benchmark( #setup_stmts )* #pre_call - Ok(#krate::__private::Box::new(move || -> Result<(), #krate::BenchmarkError> { - #post_call - if verify { - #( - #verify_stmts - )* - } - #impl_last_stmt - })) + recording.start(); + #post_call + recording.stop(); + if verify { + #( + #verify_stmts + )* + } + #impl_last_stmt } } @@ -1205,18 +1213,15 @@ fn expand_benchmark( // Always reset the state after the benchmark. #krate::__private::defer!(#krate::benchmarking::wipe_db()); - // Set up the benchmark, return execution + verification function. - let closure_to_verify = < - SelectedBenchmark as #krate::BenchmarkingSetup - >::instance(&selected_benchmark, &c, true)?; - - // Set the block number to at least 1 so events are deposited. - if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { - #frame_system::Pallet::::set_block_number(1u32.into()); - } + let on_before_start = || { + // Set the block number to at least 1 so events are deposited. + if #krate::__private::Zero::is_zero(&#frame_system::Pallet::::block_number()) { + #frame_system::Pallet::::set_block_number(1u32.into()); + } + }; // Run execution + verification - closure_to_verify() + >::test_instance(&selected_benchmark, &c, &on_before_start) }; if components.is_empty() { diff --git a/support/procedural-fork/src/construct_runtime/expand/call.rs b/support/procedural-fork/src/construct_runtime/expand/call.rs index 7e8c2e856..cc467c31d 100644 --- a/support/procedural-fork/src/construct_runtime/expand/call.rs +++ b/support/procedural-fork/src/construct_runtime/expand/call.rs @@ -69,6 +69,7 @@ pub fn expand_outer_dispatch( quote! { #( #query_call_part_macros )* + /// The aggregated runtime call type. #[derive( Clone, PartialEq, Eq, #scrate::__private::codec::Encode, diff --git a/support/procedural-fork/src/construct_runtime/expand/inherent.rs b/support/procedural-fork/src/construct_runtime/expand/inherent.rs index b58d540fe..9705f9703 100644 --- a/support/procedural-fork/src/construct_runtime/expand/inherent.rs +++ b/support/procedural-fork/src/construct_runtime/expand/inherent.rs @@ -61,17 +61,17 @@ pub fn expand_outer_inherent( trait InherentDataExt { fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; + #scrate::__private::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic>; fn check_extrinsics(&self, block: &#block) -> #scrate::inherent::CheckInherentsResult; } impl InherentDataExt for #scrate::inherent::InherentData { fn create_extrinsics(&self) -> - #scrate::__private::sp_std::vec::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> + #scrate::__private::Vec<<#block as #scrate::sp_runtime::traits::Block>::Extrinsic> { use #scrate::inherent::ProvideInherent; - let mut inherents = #scrate::__private::sp_std::vec::Vec::new(); + let mut inherents = #scrate::__private::Vec::new(); #( #pallet_attrs diff --git a/support/procedural-fork/src/construct_runtime/expand/metadata.rs b/support/procedural-fork/src/construct_runtime/expand/metadata.rs index f98c719ca..9f3d9cd4a 100644 --- a/support/procedural-fork/src/construct_runtime/expand/metadata.rs +++ b/support/procedural-fork/src/construct_runtime/expand/metadata.rs @@ -114,7 +114,7 @@ pub fn expand_runtime_metadata( >(); #scrate::__private::metadata_ir::MetadataIR { - pallets: #scrate::__private::sp_std::vec![ #(#pallets),* ], + pallets: #scrate::__private::vec![ #(#pallets),* ], extrinsic: #scrate::__private::metadata_ir::ExtrinsicMetadataIR { ty, version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, @@ -159,7 +159,7 @@ pub fn expand_runtime_metadata( }) } - pub fn metadata_versions() -> #scrate::__private::sp_std::vec::Vec { + pub fn metadata_versions() -> #scrate::__private::Vec { #scrate::__private::metadata_ir::supported_versions() } } diff --git a/support/procedural-fork/src/construct_runtime/expand/origin.rs b/support/procedural-fork/src/construct_runtime/expand/origin.rs index 2d50777bf..58c8adec5 100644 --- a/support/procedural-fork/src/construct_runtime/expand/origin.rs +++ b/support/procedural-fork/src/construct_runtime/expand/origin.rs @@ -110,25 +110,25 @@ pub fn expand_outer_origin( #[derive(Clone)] pub struct RuntimeOrigin { pub caller: OriginCaller, - filter: #scrate::__private::sp_std::rc::Rc::RuntimeCall) -> bool>>, + filter: #scrate::__private::Rc<#scrate::__private::Box::RuntimeCall) -> bool>>, } #[cfg(not(feature = "std"))] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + impl core::fmt::Debug for RuntimeOrigin { fn fmt( &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt: &mut core::fmt::Formatter, + ) -> core::result::Result<(), core::fmt::Error> { fmt.write_str("") } } #[cfg(feature = "std")] - impl #scrate::__private::sp_std::fmt::Debug for RuntimeOrigin { + impl core::fmt::Debug for RuntimeOrigin { fn fmt( &self, - fmt: &mut #scrate::__private::sp_std::fmt::Formatter, - ) -> #scrate::__private::sp_std::result::Result<(), #scrate::__private::sp_std::fmt::Error> { + fmt: &mut core::fmt::Formatter, + ) -> core::result::Result<(), core::fmt::Error> { fmt.debug_struct("Origin") .field("caller", &self.caller) .field("filter", &"[function ptr]") @@ -144,7 +144,7 @@ pub fn expand_outer_origin( fn add_filter(&mut self, filter: impl Fn(&Self::Call) -> bool + 'static) { let f = self.filter.clone(); - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(move |call| { + self.filter = #scrate::__private::Rc::new(#scrate::__private::Box::new(move |call| { f(call) && filter(call) })); } @@ -155,7 +155,7 @@ pub fn expand_outer_origin( as #scrate::traits::Contains<<#runtime as #system_path::Config>::RuntimeCall> >::contains; - self.filter = #scrate::__private::sp_std::rc::Rc::new(Box::new(filter)); + self.filter = #scrate::__private::Rc::new(#scrate::__private::Box::new(filter)); } fn set_caller_from(&mut self, other: impl Into) { @@ -257,7 +257,7 @@ pub fn expand_outer_origin( impl TryFrom for #system_path::Origin<#runtime> { type Error = OriginCaller; fn try_from(x: OriginCaller) - -> #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, OriginCaller> + -> core::result::Result<#system_path::Origin<#runtime>, OriginCaller> { if let OriginCaller::system(l) = x { Ok(l) @@ -280,7 +280,7 @@ pub fn expand_outer_origin( fn from(x: OriginCaller) -> Self { let mut o = RuntimeOrigin { caller: x, - filter: #scrate::__private::sp_std::rc::Rc::new(Box::new(|_| true)), + filter: #scrate::__private::Rc::new(#scrate::__private::Box::new(|_| true)), }; #scrate::traits::OriginTrait::reset_filter(&mut o); @@ -289,7 +289,7 @@ pub fn expand_outer_origin( } } - impl From for #scrate::__private::sp_std::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { + impl From for core::result::Result<#system_path::Origin<#runtime>, RuntimeOrigin> { /// NOTE: converting to pallet origin loses the origin filter information. fn from(val: RuntimeOrigin) -> Self { if let OriginCaller::system(l) = val.caller { @@ -357,7 +357,7 @@ fn expand_origin_caller_variant( } fn expand_origin_pallet_conversions( - scrate: &TokenStream, + _scrate: &TokenStream, runtime: &Ident, pallet: &Pallet, instance: Option<&Ident>, @@ -405,7 +405,7 @@ fn expand_origin_pallet_conversions( } #attr - impl From for #scrate::__private::sp_std::result::Result<#pallet_origin, RuntimeOrigin> { + impl From for core::result::Result<#pallet_origin, RuntimeOrigin> { /// NOTE: converting to pallet origin loses the origin filter information. fn from(val: RuntimeOrigin) -> Self { if let OriginCaller::#variant_name(l) = val.caller { @@ -421,7 +421,7 @@ fn expand_origin_pallet_conversions( type Error = OriginCaller; fn try_from( x: OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<#pallet_origin, OriginCaller> { + ) -> core::result::Result<#pallet_origin, OriginCaller> { if let OriginCaller::#variant_name(l) = x { Ok(l) } else { @@ -435,7 +435,7 @@ fn expand_origin_pallet_conversions( type Error = (); fn try_from( x: &'a OriginCaller, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + ) -> core::result::Result<&'a #pallet_origin, ()> { if let OriginCaller::#variant_name(l) = x { Ok(&l) } else { @@ -449,7 +449,7 @@ fn expand_origin_pallet_conversions( type Error = (); fn try_from( x: &'a RuntimeOrigin, - ) -> #scrate::__private::sp_std::result::Result<&'a #pallet_origin, ()> { + ) -> core::result::Result<&'a #pallet_origin, ()> { if let OriginCaller::#variant_name(l) = &x.caller { Ok(&l) } else { diff --git a/support/procedural-fork/src/construct_runtime/mod.rs b/support/procedural-fork/src/construct_runtime/mod.rs index de688b3d6..9bc271fdc 100644 --- a/support/procedural-fork/src/construct_runtime/mod.rs +++ b/support/procedural-fork/src/construct_runtime/mod.rs @@ -491,7 +491,7 @@ fn construct_runtime_final_expansion( #[doc(hidden)] trait InternalConstructRuntime { #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + fn runtime_metadata(&self) -> #scrate::__private::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { Default::default() } } @@ -554,6 +554,7 @@ pub(crate) fn decl_all_pallets<'a>( for pallet_declaration in pallet_declarations { let type_name = &pallet_declaration.name; let pallet = &pallet_declaration.path; + let docs = &pallet_declaration.docs; let mut generics = vec![quote!(#runtime)]; generics.extend( pallet_declaration @@ -567,6 +568,7 @@ pub(crate) fn decl_all_pallets<'a>( attrs.extend(TokenStream2::from_str(&feat).expect("was parsed successfully; qed")); } let type_decl = quote!( + #( #[doc = #docs] )* #(#attrs)* pub type #type_name = #pallet::Pallet <#(#generics),*>; ); @@ -703,10 +705,10 @@ pub(crate) fn decl_pallet_runtime_setup( impl #scrate::traits::PalletInfo for PalletInfo { fn index() -> Option { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + let type_id = core::any::TypeId::of::

(); #( #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + if type_id == core::any::TypeId::of::<#names>() { return Some(#indices) } )* @@ -715,10 +717,10 @@ pub(crate) fn decl_pallet_runtime_setup( } fn name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + let type_id = core::any::TypeId::of::

(); #( #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + if type_id == core::any::TypeId::of::<#names>() { return Some(#name_strings) } )* @@ -727,10 +729,10 @@ pub(crate) fn decl_pallet_runtime_setup( } fn name_hash() -> Option<[u8; 16]> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + let type_id = core::any::TypeId::of::

(); #( #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + if type_id == core::any::TypeId::of::<#names>() { return Some(#name_hashes) } )* @@ -739,10 +741,10 @@ pub(crate) fn decl_pallet_runtime_setup( } fn module_name() -> Option<&'static str> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + let type_id = core::any::TypeId::of::

(); #( #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + if type_id == core::any::TypeId::of::<#names>() { return Some(#module_names) } )* @@ -751,10 +753,10 @@ pub(crate) fn decl_pallet_runtime_setup( } fn crate_version() -> Option<#scrate::traits::CrateVersion> { - let type_id = #scrate::__private::sp_std::any::TypeId::of::

(); + let type_id = core::any::TypeId::of::

(); #( #pallet_attrs - if type_id == #scrate::__private::sp_std::any::TypeId::of::<#names>() { + if type_id == core::any::TypeId::of::<#names>() { return Some( <#pallet_structs as #scrate::traits::PalletInfoAccess>::crate_version() ) diff --git a/support/procedural-fork/src/construct_runtime/parse.rs b/support/procedural-fork/src/construct_runtime/parse.rs index 173a8dd12..e5e60b3ff 100644 --- a/support/procedural-fork/src/construct_runtime/parse.rs +++ b/support/procedural-fork/src/construct_runtime/parse.rs @@ -65,8 +65,6 @@ pub enum RuntimeDeclaration { /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug)] pub struct ImplicitRuntimeDeclaration { - pub name: Ident, - pub where_section: Option, pub pallets: Vec, } @@ -103,8 +101,6 @@ impl Parse for RuntimeDeclaration { match convert_pallets(pallets.content.inner.into_iter().collect())? { PalletsConversion::Implicit(pallets) => { Ok(RuntimeDeclaration::Implicit(ImplicitRuntimeDeclaration { - name, - where_section, pallets, })) } @@ -131,9 +127,6 @@ impl Parse for RuntimeDeclaration { #[derive(Debug)] pub struct WhereSection { pub span: Span, - pub block: syn::TypePath, - pub node_block: syn::TypePath, - pub unchecked_extrinsic: syn::TypePath, } impl Parse for WhereSection { @@ -152,10 +145,9 @@ impl Parse for WhereSection { } input.parse::()?; } - let block = remove_kind(input, WhereKind::Block, &mut definitions)?.value; - let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; - let unchecked_extrinsic = - remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; + remove_kind(input, WhereKind::Block, &mut definitions)?; + remove_kind(input, WhereKind::NodeBlock, &mut definitions)?; + remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?; if let Some(WhereDefinition { ref kind_span, ref kind, @@ -168,12 +160,7 @@ impl Parse for WhereSection { ); return Err(Error::new(*kind_span, msg)); } - Ok(Self { - span: input.span(), - block, - node_block, - unchecked_extrinsic, - }) + Ok(Self { span: input.span() }) } } @@ -188,7 +175,6 @@ pub enum WhereKind { pub struct WhereDefinition { pub kind_span: Span, pub kind: WhereKind, - pub value: syn::TypePath, } impl Parse for WhereDefinition { @@ -210,14 +196,10 @@ impl Parse for WhereDefinition { return Err(lookahead.error()); }; - Ok(Self { - kind_span, - kind, - value: { - let _: Token![=] = input.parse()?; - input.parse()? - }, - }) + let _: Token![=] = input.parse()?; + let _: syn::TypePath = input.parse()?; + + Ok(Self { kind_span, kind }) } } @@ -646,6 +628,8 @@ pub struct Pallet { pub pallet_parts: Vec, /// Expressions specified inside of a #[cfg] attribute. pub cfg_pattern: Vec, + /// The doc literals + pub docs: Vec, } impl Pallet { @@ -827,6 +811,7 @@ fn convert_pallets(pallets: Vec) -> syn::Result>>()?; diff --git a/support/procedural-fork/src/dynamic_params.rs b/support/procedural-fork/src/dynamic_params.rs index 70a18bf34..e1f9f626c 100644 --- a/support/procedural-fork/src/dynamic_params.rs +++ b/support/procedural-fork/src/dynamic_params.rs @@ -94,7 +94,7 @@ impl ToTokens for DynamicParamModAttr { let mut quoted_enum = quote! {}; for m in self.inner_mods() { let aggregate_name = - syn::Ident::new(&m.ident.to_string().to_class_case(), m.ident.span()); + syn::Ident::new(&m.ident.to_string().to_pascal_case(), m.ident.span()); let mod_name = &m.ident; let mut attrs = m.attrs.clone(); @@ -245,7 +245,7 @@ impl ToTokens for DynamicPalletParamAttr { ); let aggregate_name = syn::Ident::new( - ¶ms_mod.ident.to_string().to_class_case(), + ¶ms_mod.ident.to_string().to_pascal_case(), params_mod.ident.span(), ); let (mod_name, vis) = (¶ms_mod.ident, ¶ms_mod.vis); diff --git a/support/procedural-fork/src/pallet/expand/constants.rs b/support/procedural-fork/src/pallet/expand/constants.rs index 5153ccf49..19862a8a6 100644 --- a/support/procedural-fork/src/pallet/expand/constants.rs +++ b/support/procedural-fork/src/pallet/expand/constants.rs @@ -30,8 +30,7 @@ struct ConstDef { pub metadata_name: Option, } -/// -/// * Impl fn module_constant_metadata for pallet. +/// Implement the `pallet_constants_metadata` function for the pallet. pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); @@ -97,7 +96,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { name: #ident_str, ty: #frame_support::__private::scale_info::meta_type::<#const_type>(), value: { #default_byte_impl }, - docs: #frame_support::__private::sp_std::vec![ #( #doc ),* ], + docs: #frame_support::__private::vec![ #( #doc ),* ], } }) }); @@ -107,9 +106,9 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn pallet_constants_metadata() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> + -> #frame_support::__private::Vec<#frame_support::__private::metadata_ir::PalletConstantMetadataIR> { - #frame_support::__private::sp_std::vec![ #( #consts ),* ] + #frame_support::__private::vec![ #( #consts ),* ] } } ) diff --git a/support/procedural-fork/src/pallet/expand/documentation.rs b/support/procedural-fork/src/pallet/expand/documentation.rs index adc4f7ce9..62b2e8b8b 100644 --- a/support/procedural-fork/src/pallet/expand/documentation.rs +++ b/support/procedural-fork/src/pallet/expand/documentation.rs @@ -166,9 +166,9 @@ pub fn expand_documentation(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn pallet_documentation_metadata() - -> #frame_support::__private::sp_std::vec::Vec<&'static str> + -> #frame_support::__private::Vec<&'static str> { - #frame_support::__private::sp_std::vec![ #( #docs ),* ] + #frame_support::__private::vec![ #( #docs ),* ] } } ) diff --git a/support/procedural-fork/src/pallet/expand/error.rs b/support/procedural-fork/src/pallet/expand/error.rs index e2c3f680c..1b76034ef 100644 --- a/support/procedural-fork/src/pallet/expand/error.rs +++ b/support/procedural-fork/src/pallet/expand/error.rs @@ -66,28 +66,30 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] #[codec(skip)] __Ignore( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)>, + core::marker::PhantomData<(#type_use_gen)>, #frame_support::Never, ) ); - let as_str_matches = error.variants.iter().map( - |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { - let variant_str = variant.to_string(); - let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); - match field_ty { - Some(VariantField { is_named: true }) => { - quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant { .. } => #variant_str,) - }, - Some(VariantField { is_named: false }) => { - quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant(..) => #variant_str,) - }, - None => { - quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant => #variant_str,) - }, - } - }, - ); + let as_str_matches = + error + .variants + .iter() + .map(|VariantDef { ident: variant, field: field_ty, cfg_attrs }| { + let variant_str = variant.to_string(); + let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); + match field_ty { + Some(VariantField { is_named: true }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant { .. } => #variant_str,) + }, + Some(VariantField { is_named: false }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant(..) => #variant_str,) + }, + None => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant => #variant_str,) + }, + } + }); let error_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; @@ -126,11 +128,11 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { } quote::quote_spanned!(error.attr_span => - impl<#type_impl_gen> #frame_support::__private::sp_std::fmt::Debug for #error_ident<#type_use_gen> + impl<#type_impl_gen> core::fmt::Debug for #error_ident<#type_use_gen> #config_where_clause { - fn fmt(&self, f: &mut #frame_support::__private::sp_std::fmt::Formatter<'_>) - -> #frame_support::__private::sp_std::fmt::Result + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) + -> core::fmt::Result { f.write_str(self.as_str()) } diff --git a/support/procedural-fork/src/pallet/expand/hooks.rs b/support/procedural-fork/src/pallet/expand/hooks.rs index 6967f4c08..8ff0e8f30 100644 --- a/support/procedural-fork/src/pallet/expand/hooks.rs +++ b/support/procedural-fork/src/pallet/expand/hooks.rs @@ -258,24 +258,24 @@ pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { >::on_runtime_upgrade() } - #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result<#frame_support::__private::sp_std::vec::Vec, #frame_support::sp_runtime::TryRuntimeError> { - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::pre_upgrade() - } + #frame_support::try_runtime_enabled! { + fn pre_upgrade() -> Result<#frame_support::__private::Vec, #frame_support::sp_runtime::TryRuntimeError> { + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::pre_upgrade() + } - #[cfg(feature = "try-runtime")] - fn post_upgrade(state: #frame_support::__private::sp_std::vec::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #post_storage_version_check + fn post_upgrade(state: #frame_support::__private::Vec) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #post_storage_version_check - < - Self - as - #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> - >::post_upgrade(state) + < + Self + as + #frame_support::traits::Hooks<#frame_system::pallet_prelude::BlockNumberFor::> + >::post_upgrade(state) + } } } @@ -310,34 +310,35 @@ pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { } } - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> - #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> - for #pallet_ident<#type_use_gen> #where_clause - { - fn try_state( - n: #frame_system::pallet_prelude::BlockNumberFor::, - _s: #frame_support::traits::TryStateSelect - ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { - #frame_support::__private::log::info!( - target: #frame_support::LOG_TARGET, - "🩺 Running {:?} try-state checks", - #pallet_name, - ); - < - Self as #frame_support::traits::Hooks< - #frame_system::pallet_prelude::BlockNumberFor:: - > - >::try_state(n).map_err(|err| { - #frame_support::__private::log::error!( + #frame_support::try_runtime_enabled! { + impl<#type_impl_gen> + #frame_support::traits::TryState<#frame_system::pallet_prelude::BlockNumberFor::> + for #pallet_ident<#type_use_gen> #where_clause + { + fn try_state( + n: #frame_system::pallet_prelude::BlockNumberFor::, + _s: #frame_support::traits::TryStateSelect + ) -> Result<(), #frame_support::sp_runtime::TryRuntimeError> { + #frame_support::__private::log::info!( target: #frame_support::LOG_TARGET, - "❌ {:?} try_state checks failed: {:?}", + "🩺 Running {:?} try-state checks", #pallet_name, - err ); + < + Self as #frame_support::traits::Hooks< + #frame_system::pallet_prelude::BlockNumberFor:: + > + >::try_state(n).map_err(|err| { + #frame_support::__private::log::error!( + target: #frame_support::LOG_TARGET, + "❌ {:?} try_state checks failed: {:?}", + #pallet_name, + err + ); - err - }) + err + }) + } } } ) diff --git a/support/procedural-fork/src/pallet/expand/pallet_struct.rs b/support/procedural-fork/src/pallet/expand/pallet_struct.rs index c5def65ed..64e5d533c 100644 --- a/support/procedural-fork/src/pallet/expand/pallet_struct.rs +++ b/support/procedural-fork/src/pallet/expand/pallet_struct.rs @@ -54,7 +54,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { if let Some(field) = pallet_item.fields.iter_mut().next() { if field.ty == syn::parse_quote!(_) { field.ty = syn::parse_quote!( - #frame_support::__private::sp_std::marker::PhantomData<(#type_use_gen)> + core::marker::PhantomData<(#type_use_gen)> ); } } @@ -148,10 +148,10 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { #storages_where_clauses { fn storage_info() - -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::StorageInfo> + -> #frame_support::__private::Vec<#frame_support::traits::StorageInfo> { #[allow(unused_mut)] - let mut res = #frame_support::__private::sp_std::vec![]; + let mut res = #frame_support::__private::vec![]; #( #(#storage_cfg_attrs)* @@ -191,8 +191,8 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { let whitelisted_storage_keys_impl = quote::quote![ use #frame_support::traits::{StorageInfoTrait, TrackedStorageKey, WhitelistedStorageKeys}; impl<#type_impl_gen> WhitelistedStorageKeys for #pallet_ident<#type_use_gen> #storages_where_clauses { - fn whitelisted_storage_keys() -> #frame_support::__private::sp_std::vec::Vec { - use #frame_support::__private::sp_std::vec; + fn whitelisted_storage_keys() -> #frame_support::__private::Vec { + use #frame_support::__private::vec; vec![#( TrackedStorageKey::new(#whitelisted_storage_idents::<#type_use_gen>::hashed_key().to_vec()) ),*] @@ -284,7 +284,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { #config_where_clause { fn count() -> usize { 1 } - fn infos() -> #frame_support::__private::sp_std::vec::Vec<#frame_support::traits::PalletInfoData> { + fn infos() -> #frame_support::__private::Vec<#frame_support::traits::PalletInfoData> { use #frame_support::traits::PalletInfoAccess; let item = #frame_support::traits::PalletInfoData { index: Self::index(), @@ -292,7 +292,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { module_name: Self::module_name(), crate_version: Self::crate_version(), }; - #frame_support::__private::sp_std::vec![item] + #frame_support::__private::vec![item] } } diff --git a/support/procedural-fork/src/pallet/expand/storage.rs b/support/procedural-fork/src/pallet/expand/storage.rs index b77e9846b..95b046670 100644 --- a/support/procedural-fork/src/pallet/expand/storage.rs +++ b/support/procedural-fork/src/pallet/expand/storage.rs @@ -454,7 +454,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #(#cfg_attrs)* { <#full_ident as #frame_support::storage::StorageEntryMetadataBuilder>::build_metadata( - #frame_support::__private::sp_std::vec![ + #frame_support::__private::vec![ #( #docs, )* ], &mut entries, @@ -886,39 +886,40 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { storage_names.sort_by_cached_key(|ident| ident.to_string()); quote::quote!( - #[cfg(feature = "try-runtime")] - impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage - for #pallet_ident<#type_use_gen> #completed_where_clause - { - fn try_decode_entire_state() -> Result> { - let pallet_name = <::PalletInfo as frame_support::traits::PalletInfo> - ::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed"); - - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); - - // NOTE: for now, we have to exclude storage items that are feature gated. - let mut errors = #frame_support::__private::sp_std::vec::Vec::new(); - let mut decoded = 0usize; - - #( - #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ - {pallet_name}::{}", stringify!(#storage_names)); + #frame_support::try_runtime_enabled! { + impl<#type_impl_gen> #frame_support::traits::TryDecodeEntireStorage + for #pallet_ident<#type_use_gen> #completed_where_clause + { + fn try_decode_entire_state() -> Result> { + let pallet_name = <::PalletInfo as #frame_support::traits::PalletInfo> + ::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"); + + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode pallet: {pallet_name}"); + + // NOTE: for now, we have to exclude storage items that are feature gated. + let mut errors = #frame_support::__private::Vec::new(); + let mut decoded = 0usize; + + #( + #frame_support::__private::log::debug!(target: "runtime::try-decode-state", "trying to decode storage: \ + {pallet_name}::{}", stringify!(#storage_names)); + + match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { + Ok(count) => { + decoded += count; + }, + Err(err) => { + errors.extend(err); + }, + } + )* - match <#storage_names as #frame_support::traits::TryDecodeEntireStorage>::try_decode_entire_state() { - Ok(count) => { - decoded += count; - }, - Err(err) => { - errors.extend(err); - }, + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) } - )* - - if errors.is_empty() { - Ok(decoded) - } else { - Err(errors) } } } @@ -939,7 +940,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { .expect("No name found for the pallet in the runtime! This usually means that the pallet wasn't added to `construct_runtime!`."), entries: { #[allow(unused_mut)] - let mut entries = #frame_support::__private::sp_std::vec![]; + let mut entries = #frame_support::__private::vec![]; #( #entries_builder )* entries }, diff --git a/support/procedural-fork/src/pallet/expand/tasks.rs b/support/procedural-fork/src/pallet/expand/tasks.rs index 8c4dfb54f..8a0bd2252 100644 --- a/support/procedural-fork/src/pallet/expand/tasks.rs +++ b/support/procedural-fork/src/pallet/expand/tasks.rs @@ -163,7 +163,6 @@ impl ToTokens for TasksDef { .map(|task| &task.arg_names) .collect::>(); - let sp_std = quote!(#scrate::__private::sp_std); let impl_generics = &self.item_impl.generics; tokens.extend(quote! { impl #impl_generics #enum_use @@ -173,13 +172,13 @@ impl ToTokens for TasksDef { impl #impl_generics #scrate::traits::Task for #enum_use { - type Enumeration = #sp_std::vec::IntoIter<#enum_use>; + type Enumeration = #scrate::__private::IntoIter<#enum_use>; fn iter() -> Self::Enumeration { - let mut all_tasks = #sp_std::vec![]; + let mut all_tasks = #scrate::__private::vec![]; #(all_tasks .extend(#task_iters.map(|(#(#task_arg_names),*)| #enum_ident::#task_fn_idents { #(#task_arg_names: #task_arg_names.clone()),* }) - .collect::<#sp_std::vec::Vec<_>>()); + .collect::<#scrate::__private::Vec<_>>()); )* all_tasks.into_iter() } diff --git a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs index 57b78339a..8e7dc39d8 100644 --- a/support/procedural-fork/src/pallet/expand/tt_default_parts.rs +++ b/support/procedural-fork/src/pallet/expand/tt_default_parts.rs @@ -208,9 +208,9 @@ pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { macro_rules! #default_parts_unique_id_v2 { { $caller:tt - frame_support = [{ $($frame_support:ident)::* }] + your_tt_return = [{ $my_tt_return:path }] } => { - $($frame_support)*::__private::tt_return! { + $my_tt_return! { $caller tokens = [{ + Pallet #call_part_v2 #storage_part_v2 #event_part_v2 #error_part_v2 #origin_part_v2 #config_part_v2 diff --git a/support/procedural-fork/src/pallet/parse/composite.rs b/support/procedural-fork/src/pallet/parse/composite.rs index 38da1f205..239b4fd4b 100644 --- a/support/procedural-fork/src/pallet/parse/composite.rs +++ b/support/procedural-fork/src/pallet/parse/composite.rs @@ -87,8 +87,6 @@ pub mod keyword { } pub struct CompositeDef { - /// The index of the CompositeDef item in the pallet module. - pub index: usize, /// The composite keyword used (contains span). pub composite_keyword: keyword::CompositeKeyword, /// Name of the associated type. @@ -104,7 +102,6 @@ pub struct CompositeDef { impl CompositeDef { pub fn try_from( attr_span: proc_macro2::Span, - index: usize, scrate: &syn::Path, item: &mut syn::Item, ) -> syn::Result { @@ -186,7 +183,6 @@ impl CompositeDef { syn::parse2::(item.ident.to_token_stream())?; Ok(CompositeDef { - index, composite_keyword, attr_span, generics: item.generics.clone(), diff --git a/support/procedural-fork/src/pallet/parse/config.rs b/support/procedural-fork/src/pallet/parse/config.rs index cde565245..95b4143b6 100644 --- a/support/procedural-fork/src/pallet/parse/config.rs +++ b/support/procedural-fork/src/pallet/parse/config.rs @@ -62,8 +62,6 @@ pub struct ConfigDef { pub has_event_type: bool, /// The where clause on trait definition but modified so `Self` is `T`. pub where_clause: Option, - /// The span of the pallet::config attribute. - pub attr_span: proc_macro2::Span, /// Whether a default sub-trait should be generated. /// /// Contains default sub-trait items (instantiated by `#[pallet::config(with_default)]`). @@ -97,30 +95,32 @@ impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { let bound = trait_ty .bounds .iter() - .find_map(|b| { - if let syn::TypeParamBound::Trait(tb) = b { - tb.path - .segments - .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) - } else { - None - } + .find_map(|param_bound| { + let syn::TypeParamBound::Trait(trait_bound) = param_bound else { + return None; + }; + + trait_bound + .path + .segments + .last() + .and_then(|s| (s.ident == "Get").then(|| s)) }) .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { - if ab.args.len() == 1 { - if let syn::GenericArgument::Type(ref ty) = ab.args[0] { - Ok(ty) - } else { - Err(err(ab.args[0].span(), "Expected a type argument")) - } - } else { - Err(err(bound.span(), "Expected a single type argument")) - } - } else { - Err(err(bound.span(), "Expected trait generic args")) - }?; + + let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments else { + return Err(err(bound.span(), "Expected trait generic args")); + }; + + // Only one type argument is expected. + if ab.args.len() != 1 { + return Err(err(bound.span(), "Expected a single type argument")); + } + + let syn::GenericArgument::Type(ref type_arg) = ab.args[0] else { + return Err(err(ab.args[0].span(), "Expected a type argument")); + }; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) .expect("Internal error: replacing `Self` by `T` should result in valid type"); @@ -229,59 +229,62 @@ fn check_event_type( trait_item: &syn::TraitItem, trait_has_instance: bool, ) -> syn::Result { - if let syn::TraitItem::Type(type_) = trait_item { - if type_.ident == "RuntimeEvent" { - // Check event has no generics - if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ - no generics nor where_clause"; - return Err(syn::Error::new(trait_item.span(), msg)); - } + let syn::TraitItem::Type(type_) = trait_item else { + return Ok(false); + }; - // Check bound contains IsType and From - let has_is_type_bound = type_.bounds.iter().any(|s| { - syn::parse2::(s.to_token_stream()) - .map_or(false, |b| has_expected_system_config(b.0, frame_system)) - }); + if type_.ident != "RuntimeEvent" { + return Ok(false); + } - if !has_is_type_bound { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ - bound: `IsType<::RuntimeEvent>`".to_string(); - return Err(syn::Error::new(type_.span(), msg)); - } + // Check event has no generics + if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { + let msg = + "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must have\ + no generics nor where_clause"; + return Err(syn::Error::new(trait_item.span(), msg)); + } - let from_event_bound = type_ - .bounds - .iter() - .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); + // Check bound contains IsType and From + let has_is_type_bound = type_.bounds.iter().any(|s| { + syn::parse2::(s.to_token_stream()) + .map_or(false, |b| has_expected_system_config(b.0, frame_system)) + }); + + if !has_is_type_bound { + let msg = + "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + bound: `IsType<::RuntimeEvent>`" + .to_string(); + return Err(syn::Error::new(type_.span(), msg)); + } - let from_event_bound = if let Some(b) = from_event_bound { - b - } else { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ - bound: `From` or `From>` or `From>`"; - return Err(syn::Error::new(type_.span(), msg)); - }; + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); - if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) - { - let msg = "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ + let Some(from_event_bound) = from_event_bound else { + let msg = + "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` is reserved and must \ + bound: `From` or `From>` or `From>`"; + return Err(syn::Error::new(type_.span(), msg)); + }; + + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) { + let msg = + "Invalid `type RuntimeEvent`, associated type `RuntimeEvent` bounds inconsistent \ `From`. Config and generic Event must be both with instance or \ without instance"; - return Err(syn::Error::new(type_.span(), msg)); - } - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) + return Err(syn::Error::new(type_.span(), msg)); } + + Ok(true) } /// Check that the path to `frame_system::Config` is valid, this is that the path is just -/// `frame_system::Config` or when using the `frame` crate it is `frame::xyz::frame_system::Config`. +/// `frame_system::Config` or when using the `frame` crate it is +/// `polkadot_sdk_frame::xyz::frame_system::Config`. fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool { // Check if `frame_system` is actually 'frame_system'. if path.segments.iter().all(|s| s.ident != "frame_system") { @@ -305,7 +308,7 @@ fn has_expected_system_config(path: syn::Path, frame_system: &syn::Path) -> bool syn::parse2::(quote::quote!(frame_system)).expect("is a valid path; qed") } (_, _) => - // They are either both `frame_system` or both `frame::xyz::frame_system`. + // They are either both `frame_system` or both `polkadot_sdk_frame::xyz::frame_system`. { frame_system.clone() } @@ -351,14 +354,11 @@ pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenS impl ConfigDef { pub fn try_from( frame_system: &syn::Path, - attr_span: proc_macro2::Span, index: usize, item: &mut syn::Item, enable_default: bool, ) -> syn::Result { - let item = if let syn::Item::Trait(item) = item { - item - } else { + let syn::Item::Trait(item) = item else { let msg = "Invalid pallet::config, expected trait definition"; return Err(syn::Error::new(item.span(), msg)); }; @@ -512,7 +512,6 @@ impl ConfigDef { consts_metadata, has_event_type, where_clause, - attr_span, default_sub_trait, }) } @@ -539,14 +538,29 @@ mod tests { #[test] fn has_expected_system_config_works_with_frame() { + let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); + + let frame_system = + syn::parse2::(quote::quote!(polkadot_sdk_frame::deps::frame_system)) + .unwrap(); + assert!(has_expected_system_config(path.clone(), &frame_system)); + let frame_system = syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); - let path = syn::parse2::(quote::quote!(frame_system::Config)).unwrap(); assert!(has_expected_system_config(path, &frame_system)); } #[test] fn has_expected_system_config_works_with_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(polkadot_sdk_frame::deps::frame_system)) + .unwrap(); + let path = syn::parse2::(quote::quote!( + polkadot_sdk_frame::deps::frame_system::Config + )) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + let frame_system = syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); let path = @@ -556,6 +570,13 @@ mod tests { #[test] fn has_expected_system_config_works_with_other_frame_full_path() { + let frame_system = + syn::parse2::(quote::quote!(polkadot_sdk_frame::xyz::frame_system)).unwrap(); + let path = + syn::parse2::(quote::quote!(polkadot_sdk_frame::xyz::frame_system::Config)) + .unwrap(); + assert!(has_expected_system_config(path, &frame_system)); + let frame_system = syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); let path = @@ -566,26 +587,32 @@ mod tests { #[test] fn has_expected_system_config_does_not_works_with_mixed_frame_full_path() { let frame_system = - syn::parse2::(quote::quote!(frame::xyz::frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + syn::parse2::(quote::quote!(polkadot_sdk_frame::xyz::frame_system)).unwrap(); + let path = syn::parse2::(quote::quote!( + polkadot_sdk_frame::deps::frame_system::Config + )) + .unwrap(); assert!(!has_expected_system_config(path, &frame_system)); } #[test] fn has_expected_system_config_does_not_works_with_other_mixed_frame_full_path() { let frame_system = - syn::parse2::(quote::quote!(frame::deps::frame_system)).unwrap(); + syn::parse2::(quote::quote!(polkadot_sdk_frame::deps::frame_system)) + .unwrap(); let path = - syn::parse2::(quote::quote!(frame::xyz::frame_system::Config)).unwrap(); + syn::parse2::(quote::quote!(polkadot_sdk_frame::xyz::frame_system::Config)) + .unwrap(); assert!(!has_expected_system_config(path, &frame_system)); } #[test] fn has_expected_system_config_does_not_work_with_frame_full_path_if_not_frame_crate() { let frame_system = syn::parse2::(quote::quote!(frame_system)).unwrap(); - let path = - syn::parse2::(quote::quote!(frame::deps::frame_system::Config)).unwrap(); + let path = syn::parse2::(quote::quote!( + polkadot_sdk_frame::deps::frame_system::Config + )) + .unwrap(); assert!(!has_expected_system_config(path, &frame_system)); } diff --git a/support/procedural-fork/src/pallet/parse/error.rs b/support/procedural-fork/src/pallet/parse/error.rs index e93e2113f..7aab5732b 100644 --- a/support/procedural-fork/src/pallet/parse/error.rs +++ b/support/procedural-fork/src/pallet/parse/error.rs @@ -16,7 +16,6 @@ // limitations under the License. use super::helper; -use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use syn::{spanned::Spanned, Fields}; @@ -37,8 +36,6 @@ pub struct VariantDef { pub ident: syn::Ident, /// The variant field, if any. pub field: Option, - /// The variant doc literals. - pub docs: Vec, /// The `cfg` attributes. pub cfg_attrs: Vec, } @@ -112,7 +109,6 @@ impl ErrorDef { Ok(VariantDef { ident: variant.ident.clone(), field: field_ty, - docs: get_doc_literals(&variant.attrs), cfg_attrs, }) }) diff --git a/support/procedural-fork/src/pallet/parse/extra_constants.rs b/support/procedural-fork/src/pallet/parse/extra_constants.rs index 38acea21a..431fcf677 100644 --- a/support/procedural-fork/src/pallet/parse/extra_constants.rs +++ b/support/procedural-fork/src/pallet/parse/extra_constants.rs @@ -37,8 +37,6 @@ pub struct ExtraConstantsDef { pub where_clause: Option, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, - /// The index of call item in pallet module. - pub index: usize, /// The extra constant defined. pub extra_constants: Vec, } @@ -79,7 +77,7 @@ impl syn::parse::Parse for ExtraConstAttr { } impl ExtraConstantsDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + pub fn try_from(item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { @@ -159,7 +157,6 @@ impl ExtraConstantsDef { } Ok(Self { - index, instances, where_clause: item.generics.where_clause.clone(), extra_constants, diff --git a/support/procedural-fork/src/pallet/parse/genesis_build.rs b/support/procedural-fork/src/pallet/parse/genesis_build.rs index 670d4d5ef..936c929af 100644 --- a/support/procedural-fork/src/pallet/parse/genesis_build.rs +++ b/support/procedural-fork/src/pallet/parse/genesis_build.rs @@ -20,8 +20,6 @@ use syn::spanned::Spanned; /// Definition for pallet genesis build implementation. pub struct GenesisBuildDef { - /// The index of item in pallet module. - pub index: usize, /// A set of usage of instance, must be check for consistency with trait. pub instances: Option>, /// The where_clause used. @@ -31,11 +29,7 @@ pub struct GenesisBuildDef { } impl GenesisBuildDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { @@ -58,7 +52,6 @@ impl GenesisBuildDef { Ok(Self { attr_span, - index, instances, where_clause: item.generics.where_clause.clone(), }) diff --git a/support/procedural-fork/src/pallet/parse/helper.rs b/support/procedural-fork/src/pallet/parse/helper.rs index f58c8d81c..1105046c2 100644 --- a/support/procedural-fork/src/pallet/parse/helper.rs +++ b/support/procedural-fork/src/pallet/parse/helper.rs @@ -55,23 +55,21 @@ pub(crate) fn take_first_item_pallet_attr( where Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { - attrs - } else { + let Some(attrs) = item.mut_item_attrs() else { return Ok(None); }; - if let Some(index) = attrs.iter().position(|attr| { + let Some(index) = attrs.iter().position(|attr| { attr.path() .segments .first() .map_or(false, |segment| segment.ident == "pallet") - }) { - let pallet_attr = attrs.remove(index); - Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) - } else { - Ok(None) - } + }) else { + return Ok(None); + }; + + let pallet_attr = attrs.remove(index); + Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) } /// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` diff --git a/support/procedural-fork/src/pallet/parse/hooks.rs b/support/procedural-fork/src/pallet/parse/hooks.rs index 1cf5c72cc..bca1a3383 100644 --- a/support/procedural-fork/src/pallet/parse/hooks.rs +++ b/support/procedural-fork/src/pallet/parse/hooks.rs @@ -20,8 +20,6 @@ use syn::spanned::Spanned; /// Implementation of the pallet hooks. pub struct HooksDef { - /// The index of item in pallet. - pub index: usize, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, /// The where_clause used. @@ -33,11 +31,7 @@ pub struct HooksDef { } impl HooksDef { - pub fn try_from( - attr_span: proc_macro2::Span, - index: usize, - item: &mut syn::Item, - ) -> syn::Result { + pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { @@ -77,7 +71,6 @@ impl HooksDef { Ok(Self { attr_span, - index, instances, has_runtime_upgrade, where_clause: item.generics.where_clause.clone(), diff --git a/support/procedural-fork/src/pallet/parse/inherent.rs b/support/procedural-fork/src/pallet/parse/inherent.rs index 4eb04e914..911de2ffe 100644 --- a/support/procedural-fork/src/pallet/parse/inherent.rs +++ b/support/procedural-fork/src/pallet/parse/inherent.rs @@ -20,14 +20,12 @@ use syn::spanned::Spanned; /// The definition of the pallet inherent implementation. pub struct InherentDef { - /// The index of inherent item in pallet module. - pub index: usize, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, } impl InherentDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + pub fn try_from(item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { @@ -55,6 +53,6 @@ impl InherentDef { helper::check_impl_gen(&item.generics, item.impl_token.span())?, ]; - Ok(InherentDef { index, instances }) + Ok(InherentDef { instances }) } } diff --git a/support/procedural-fork/src/pallet/parse/mod.rs b/support/procedural-fork/src/pallet/parse/mod.rs index 57c252473..69f921733 100644 --- a/support/procedural-fork/src/pallet/parse/mod.rs +++ b/support/procedural-fork/src/pallet/parse/mod.rs @@ -109,10 +109,9 @@ impl Def { let pallet_attr: Option = helper::take_first_item_pallet_attr(item)?; match pallet_attr { - Some(PalletAttr::Config(span, with_default)) if config.is_none() => + Some(PalletAttr::Config(_, with_default)) if config.is_none() => config = Some(config::ConfigDef::try_from( &frame_system, - span, index, item, with_default, @@ -122,7 +121,7 @@ impl Def { pallet_struct = Some(p); }, Some(PalletAttr::Hooks(span)) if hooks.is_none() => { - let m = hooks::HooksDef::try_from(span, index, item)?; + let m = hooks::HooksDef::try_from(span, item)?; hooks = Some(m); }, Some(PalletAttr::RuntimeCall(cw, span)) if call.is_none() => @@ -162,27 +161,27 @@ impl Def { genesis_config = Some(g); }, Some(PalletAttr::GenesisBuild(span)) if genesis_build.is_none() => { - let g = genesis_build::GenesisBuildDef::try_from(span, index, item)?; + let g = genesis_build::GenesisBuildDef::try_from(span, item)?; genesis_build = Some(g); }, Some(PalletAttr::RuntimeOrigin(_)) if origin.is_none() => - origin = Some(origin::OriginDef::try_from(index, item)?), + origin = Some(origin::OriginDef::try_from(item)?), Some(PalletAttr::Inherent(_)) if inherent.is_none() => - inherent = Some(inherent::InherentDef::try_from(index, item)?), + inherent = Some(inherent::InherentDef::try_from(item)?), Some(PalletAttr::Storage(span)) => storages.push(storage::StorageDef::try_from(span, index, item, dev_mode)?), Some(PalletAttr::ValidateUnsigned(_)) if validate_unsigned.is_none() => { - let v = validate_unsigned::ValidateUnsignedDef::try_from(index, item)?; + let v = validate_unsigned::ValidateUnsignedDef::try_from(item)?; validate_unsigned = Some(v); }, Some(PalletAttr::TypeValue(span)) => type_values.push(type_value::TypeValueDef::try_from(span, index, item)?), Some(PalletAttr::ExtraConstants(_)) => extra_constants = - Some(extra_constants::ExtraConstantsDef::try_from(index, item)?), + Some(extra_constants::ExtraConstantsDef::try_from(item)?), Some(PalletAttr::Composite(span)) => { let composite = - composite::CompositeDef::try_from(span, index, &frame_support, item)?; + composite::CompositeDef::try_from(span, &frame_support, item)?; if composites.iter().any(|def| { match (&def.composite_keyword, &composite.composite_keyword) { ( @@ -777,7 +776,6 @@ impl syn::parse::Parse for PalletAttr { #[derive(Clone)] pub struct InheritedCallWeightAttr { pub typename: syn::Type, - pub span: proc_macro2::Span, } impl syn::parse::Parse for InheritedCallWeightAttr { @@ -801,7 +799,6 @@ impl syn::parse::Parse for InheritedCallWeightAttr { Ok(Self { typename: buffer.parse()?, - span: input.span(), }) } } diff --git a/support/procedural-fork/src/pallet/parse/origin.rs b/support/procedural-fork/src/pallet/parse/origin.rs index 2dd84c40d..8232719d6 100644 --- a/support/procedural-fork/src/pallet/parse/origin.rs +++ b/support/procedural-fork/src/pallet/parse/origin.rs @@ -25,16 +25,13 @@ use syn::spanned::Spanned; /// * `struct Origin` /// * `enum Origin` pub struct OriginDef { - /// The index of item in pallet module. - pub index: usize, - pub has_instance: bool, pub is_generic: bool, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, } impl OriginDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + pub fn try_from(item: &mut syn::Item) -> syn::Result { let item_span = item.span(); let (vis, ident, generics) = match &item { syn::Item::Enum(item) => (&item.vis, &item.ident, &item.generics), @@ -46,7 +43,6 @@ impl OriginDef { } }; - let has_instance = generics.params.len() == 2; let is_generic = !generics.params.is_empty(); let mut instances = vec![]; @@ -71,8 +67,6 @@ impl OriginDef { } Ok(OriginDef { - index, - has_instance, is_generic, instances, }) diff --git a/support/procedural-fork/src/pallet/parse/storage.rs b/support/procedural-fork/src/pallet/parse/storage.rs index 64a5e685b..811832427 100644 --- a/support/procedural-fork/src/pallet/parse/storage.rs +++ b/support/procedural-fork/src/pallet/parse/storage.rs @@ -718,11 +718,11 @@ fn process_generics( "CountedStorageNMap" => StorageKind::CountedNMap, found => { let msg = format!( - "Invalid pallet::storage, expected ident: `StorageValue` or \ + "Invalid pallet::storage, expected ident: `StorageValue` or \ `StorageMap` or `CountedStorageMap` or `StorageDoubleMap` or `StorageNMap` or `CountedStorageNMap` \ in order to expand metadata, found `{}`.", - found, - ); + found, + ); return Err(syn::Error::new(segment.ident.span(), msg)); } }; diff --git a/support/procedural-fork/src/pallet/parse/tasks.rs b/support/procedural-fork/src/pallet/parse/tasks.rs index 50633fbd0..2a8d14826 100644 --- a/support/procedural-fork/src/pallet/parse/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tasks.rs @@ -34,8 +34,8 @@ use syn::{ parse2, spanned::Spanned, token::{Bracket, Paren, PathSep, Pound}, - Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, - PathArguments, Result, TypePath, + Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, PathArguments, + Result, TypePath, }; pub mod keywords { @@ -192,7 +192,6 @@ pub struct TaskDef { pub condition_attr: TaskConditionAttr, pub list_attr: TaskListAttr, pub weight_attr: TaskWeightAttr, - pub normal_attrs: Vec, pub item: ImplItemFn, pub arg_names: Vec, } @@ -202,7 +201,7 @@ impl syn::parse::Parse for TaskDef { let item = input.parse::()?; // we only want to activate TaskAttrType parsing errors for tasks-related attributes, // so we filter them here - let (task_attrs, normal_attrs) = partition_task_attrs(&item); + let task_attrs = partition_task_attrs(&item).0; let task_attrs: Vec = task_attrs .into_iter() @@ -319,7 +318,6 @@ impl syn::parse::Parse for TaskDef { condition_attr, list_attr, weight_attr, - normal_attrs, item, arg_names, }) diff --git a/support/procedural-fork/src/pallet/parse/tests/tasks.rs b/support/procedural-fork/src/pallet/parse/tests/tasks.rs index 6cd4d13bb..0097ed047 100644 --- a/support/procedural-fork/src/pallet/parse/tests/tasks.rs +++ b/support/procedural-fork/src/pallet/parse/tests/tasks.rs @@ -124,10 +124,10 @@ fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { where T: TypeInfo, { - type Enumeration = sp_std::vec::IntoIter>; + type Enumeration = alloc::vec::IntoIter>; fn iter() -> Self::Enumeration { - sp_std::vec![Task::increment, Task::decrement].into_iter() + alloc::vec![Task::increment, Task::decrement].into_iter() } } diff --git a/support/procedural-fork/src/pallet/parse/type_value.rs b/support/procedural-fork/src/pallet/parse/type_value.rs index d5c85248f..1054fd74c 100644 --- a/support/procedural-fork/src/pallet/parse/type_value.rs +++ b/support/procedural-fork/src/pallet/parse/type_value.rs @@ -28,12 +28,8 @@ pub struct TypeValueDef { pub ident: syn::Ident, /// The type return by Get. pub type_: Box, - /// The block returning the value to get - pub block: Box, /// If type value is generic over `T` (or `T` and `I` for instantiable pallet) pub is_generic: bool, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, /// The where clause of the function. pub where_clause: Option, /// The span of the pallet::type_value attribute. @@ -90,7 +86,6 @@ impl TypeValueDef { let vis = item.vis.clone(); let ident = item.sig.ident.clone(); - let block = item.block.clone(); let type_ = match item.sig.output.clone() { syn::ReturnType::Type(_, type_) => type_, syn::ReturnType::Default => { @@ -99,10 +94,7 @@ impl TypeValueDef { } }; - let mut instances = vec![]; - if let Some(usage) = helper::check_type_value_gen(&item.sig.generics, item.sig.span())? { - instances.push(usage); - } + helper::check_type_value_gen(&item.sig.generics, item.sig.span())?; let is_generic = item.sig.generics.type_params().count() > 0; let where_clause = item.sig.generics.where_clause.clone(); @@ -113,9 +105,7 @@ impl TypeValueDef { is_generic, vis, ident, - block, type_, - instances, where_clause, docs, }) diff --git a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs index 6e5109a74..3fcbe09e8 100644 --- a/support/procedural-fork/src/pallet/parse/validate_unsigned.rs +++ b/support/procedural-fork/src/pallet/parse/validate_unsigned.rs @@ -19,15 +19,10 @@ use super::helper; use syn::spanned::Spanned; /// The definition of the pallet validate unsigned implementation. -pub struct ValidateUnsignedDef { - /// The index of validate unsigned item in pallet module. - pub index: usize, - /// A set of usage of instance, must be check for consistency with config. - pub instances: Vec, -} +pub struct ValidateUnsignedDef {} impl ValidateUnsignedDef { - pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { + pub fn try_from(item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { @@ -52,11 +47,9 @@ impl ValidateUnsignedDef { return Err(syn::Error::new(item.span(), msg)); } - let instances = vec![ - helper::check_pallet_struct_usage(&item.self_ty)?, - helper::check_impl_gen(&item.generics, item.impl_token.span())?, - ]; + helper::check_pallet_struct_usage(&item.self_ty)?; + helper::check_impl_gen(&item.generics, item.impl_token.span())?; - Ok(ValidateUnsignedDef { index, instances }) + Ok(ValidateUnsignedDef {}) } } diff --git a/support/procedural-fork/src/runtime/expand/mod.rs b/support/procedural-fork/src/runtime/expand/mod.rs index c26cbccb7..a1a6d4d07 100644 --- a/support/procedural-fork/src/runtime/expand/mod.rs +++ b/support/procedural-fork/src/runtime/expand/mod.rs @@ -97,23 +97,26 @@ fn construct_runtime_implicit_to_explicit( quote!() }; let mut expansion = quote::quote!( - #[frame_support::runtime #attr] + #[#frame_support::runtime #attr] #input ); for pallet in definition.pallet_decls.iter() { let pallet_path = &pallet.path; let pallet_name = &pallet.name; - let pallet_instance = pallet - .instance - .as_ref() - .map(|instance| quote::quote!(<#instance>)); + let runtime_param = &pallet.runtime_param; + let pallet_segment_and_instance = match (&pallet.pallet_segment, &pallet.instance) { + (Some(segment), Some(instance)) => quote::quote!(::#segment<#runtime_param, #instance>), + (Some(segment), None) => quote::quote!(::#segment<#runtime_param>), + (None, Some(instance)) => quote::quote!(<#instance>), + (None, None) => quote::quote!(), + }; expansion = quote::quote!( #frame_support::__private::tt_call! { macro = [{ #pallet_path::tt_default_parts_v2 }] - frame_support = [{ #frame_support }] + your_tt_return = [{ #frame_support::__private::tt_return }] ~~> #frame_support::match_and_insert! { target = [{ #expansion }] - pattern = [{ #pallet_name = #pallet_path #pallet_instance }] + pattern = [{ #pallet_name = #pallet_path #pallet_segment_and_instance }] } } ); @@ -264,7 +267,7 @@ fn construct_runtime_final_expansion( // Prevent UncheckedExtrinsic to print unused warning. const _: () = { #[allow(unused)] - type __hidden_use_of_unchecked_extrinsic = #unchecked_extrinsic; + type __HiddenUseOfUncheckedExtrinsic = #unchecked_extrinsic; }; #[derive( @@ -294,7 +297,7 @@ fn construct_runtime_final_expansion( #[doc(hidden)] trait InternalConstructRuntime { #[inline(always)] - fn runtime_metadata(&self) -> #scrate::__private::sp_std::vec::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { + fn runtime_metadata(&self) -> #scrate::__private::Vec<#scrate::__private::metadata_ir::RuntimeApiMetadataIR> { Default::default() } } diff --git a/support/procedural-fork/src/runtime/mod.rs b/support/procedural-fork/src/runtime/mod.rs index 589acff6c..a96b21cd1 100644 --- a/support/procedural-fork/src/runtime/mod.rs +++ b/support/procedural-fork/src/runtime/mod.rs @@ -200,8 +200,6 @@ //! +----------------------+ //! ``` -#![cfg(feature = "experimental")] - pub use parse::Def; use proc_macro::TokenStream; use syn::spanned::Spanned; diff --git a/support/procedural-fork/src/runtime/parse/mod.rs b/support/procedural-fork/src/runtime/parse/mod.rs index 79cf894e8..a6a49e814 100644 --- a/support/procedural-fork/src/runtime/parse/mod.rs +++ b/support/procedural-fork/src/runtime/parse/mod.rs @@ -118,7 +118,6 @@ pub enum AllPalletsDeclaration { /// Declaration of a runtime with some pallet with implicit declaration of parts. #[derive(Debug, Clone)] pub struct ImplicitAllPalletsDeclaration { - pub name: Ident, pub pallet_decls: Vec, pub pallet_count: usize, } @@ -132,7 +131,6 @@ pub struct ExplicitAllPalletsDeclaration { pub struct Def { pub input: TokenStream2, - pub item: syn::ItemMod, pub runtime_struct: runtime_struct::RuntimeStructDef, pub pallets: AllPalletsDeclaration, pub runtime_types: Vec, @@ -161,8 +159,7 @@ impl Def { let mut pallets = vec![]; for item in items.iter_mut() { - let mut pallet_item = None; - let mut pallet_index = 0; + let mut pallet_index_and_item = None; let mut disable_call = false; let mut disable_unsigned = false; @@ -171,17 +168,16 @@ impl Def { helper::take_first_item_runtime_attr::(item)? { match runtime_attr { - RuntimeAttr::Runtime(span) if runtime_struct.is_none() => { - let p = runtime_struct::RuntimeStructDef::try_from(span, item)?; + RuntimeAttr::Runtime(_) if runtime_struct.is_none() => { + let p = runtime_struct::RuntimeStructDef::try_from(item)?; runtime_struct = Some(p); } RuntimeAttr::Derive(_, types) if runtime_types.is_none() => { runtime_types = Some(types); } RuntimeAttr::PalletIndex(span, index) => { - pallet_index = index; - pallet_item = if let syn::Item::Type(item) = item { - Some(item.clone()) + pallet_index_and_item = if let syn::Item::Type(item) = item { + Some((index, item.clone())) } else { let msg = "Invalid runtime::pallet_index, expected type definition"; return Err(syn::Error::new(span, msg)); @@ -196,11 +192,11 @@ impl Def { } } - if let Some(pallet_item) = pallet_item { + if let Some((pallet_index, pallet_item)) = pallet_index_and_item { match *pallet_item.ty.clone() { syn::Type::Path(ref path) => { let pallet_decl = - PalletDeclaration::try_from(item.span(), &pallet_item, path)?; + PalletDeclaration::try_from(item.span(), &pallet_item, &path.path)?; if let Some(used_pallet) = names.insert(pallet_decl.name.clone(), pallet_decl.name.span()) @@ -239,6 +235,11 @@ impl Def { } _ => continue, } + } else { + if let syn::Item::Type(item) = item { + let msg = "Missing pallet index for pallet declaration. Please add `#[runtime::pallet_index(...)]`"; + return Err(syn::Error::new(item.span(), &msg)); + } } } @@ -246,7 +247,6 @@ impl Def { let decl_count = pallet_decls.len(); let pallets = if decl_count > 0 { AllPalletsDeclaration::Implicit(ImplicitAllPalletsDeclaration { - name, pallet_decls, pallet_count: decl_count.saturating_add(pallets.len()), }) @@ -255,21 +255,41 @@ impl Def { }; let def = Def { - input, - item, - runtime_struct: runtime_struct.ok_or_else(|| { - syn::Error::new(item_span, + input, + runtime_struct: runtime_struct.ok_or_else(|| { + syn::Error::new(item_span, "Missing Runtime. Please add a struct inside the module and annotate it with `#[runtime::runtime]`" ) - })?, - pallets, - runtime_types: runtime_types.ok_or_else(|| { - syn::Error::new(item_span, + })?, + pallets, + runtime_types: runtime_types.ok_or_else(|| { + syn::Error::new(item_span, "Missing Runtime Types. Please annotate the runtime struct with `#[runtime::derive]`" ) - })?, - }; + })?, + }; Ok(def) } } + +#[test] +fn runtime_parsing_works() { + let def = Def::try_from(syn::parse_quote! { + #[runtime::runtime] + mod runtime { + #[runtime::derive(RuntimeCall, RuntimeEvent)] + #[runtime::runtime] + pub struct Runtime; + + #[runtime::pallet_index(0)] + pub type System = frame_system::Pallet; + + #[runtime::pallet_index(1)] + pub type Pallet1 = pallet1; + } + }) + .expect("Failed to parse runtime definition"); + + assert_eq!(def.runtime_struct.ident, "Runtime"); +} diff --git a/support/procedural-fork/src/runtime/parse/pallet.rs b/support/procedural-fork/src/runtime/parse/pallet.rs index 039e2631b..591c05930 100644 --- a/support/procedural-fork/src/runtime/parse/pallet.rs +++ b/support/procedural-fork/src/runtime/parse/pallet.rs @@ -15,9 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::construct_runtime::parse::{Pallet, PalletPart, PalletPartKeyword, PalletPath}; +use crate::{ + construct_runtime::parse::{Pallet, PalletPart, PalletPartKeyword, PalletPath}, + runtime::parse::PalletDeclaration, +}; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; -use syn::{punctuated::Punctuated, spanned::Spanned, token, Error, Ident, PathArguments}; +use syn::{punctuated::Punctuated, token, Error}; impl Pallet { pub fn try_from( @@ -56,26 +60,13 @@ impl Pallet { "Invalid pallet declaration, expected a path or a trait object", ))?; - let mut instance = None; - if let Some(segment) = path - .inner - .segments - .iter_mut() - .find(|seg| !seg.arguments.is_empty()) - { - if let PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { - args, .. - }) = segment.arguments.clone() - { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = Some(Ident::new( - &arg_path.to_token_stream().to_string(), - arg_path.span(), - )); - segment.arguments = PathArguments::None; - } - } - } + let PalletDeclaration { + path: inner, + instance, + .. + } = PalletDeclaration::try_from(attr_span, item, &path.inner)?; + + path = PalletPath { inner }; pallet_parts = pallet_parts .into_iter() @@ -94,6 +85,8 @@ impl Pallet { let cfg_pattern = vec![]; + let docs = get_doc_literals(&item.attrs); + Ok(Pallet { is_expanded: true, name, @@ -102,6 +95,123 @@ impl Pallet { instance, cfg_pattern, pallet_parts, + docs, }) } } + +#[test] +fn pallet_parsing_works() { + use syn::{parse_quote, ItemType}; + + let item: ItemType = parse_quote! { + pub type System = frame_system + Call; + }; + let ItemType { ty, .. } = item.clone(); + let syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) = *ty else { + panic!("Expected a trait object"); + }; + + let index = 0; + let pallet = Pallet::try_from( + proc_macro2::Span::call_site(), + &item, + index, + false, + false, + &bounds, + ) + .unwrap(); + + assert_eq!(pallet.name.to_string(), "System"); + assert_eq!(pallet.index, index); + assert_eq!(pallet.path.to_token_stream().to_string(), "frame_system"); + assert_eq!(pallet.instance, None); +} + +#[test] +fn pallet_parsing_works_with_instance() { + use syn::{parse_quote, ItemType}; + + let item: ItemType = parse_quote! { + pub type System = frame_system + Call; + }; + let ItemType { ty, .. } = item.clone(); + let syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) = *ty else { + panic!("Expected a trait object"); + }; + + let index = 0; + let pallet = Pallet::try_from( + proc_macro2::Span::call_site(), + &item, + index, + false, + false, + &bounds, + ) + .unwrap(); + + assert_eq!(pallet.name.to_string(), "System"); + assert_eq!(pallet.index, index); + assert_eq!(pallet.path.to_token_stream().to_string(), "frame_system"); + assert_eq!(pallet.instance, Some(parse_quote! { Instance1 })); +} + +#[test] +fn pallet_parsing_works_with_pallet() { + use syn::{parse_quote, ItemType}; + + let item: ItemType = parse_quote! { + pub type System = frame_system::Pallet + Call; + }; + let ItemType { ty, .. } = item.clone(); + let syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) = *ty else { + panic!("Expected a trait object"); + }; + + let index = 0; + let pallet = Pallet::try_from( + proc_macro2::Span::call_site(), + &item, + index, + false, + false, + &bounds, + ) + .unwrap(); + + assert_eq!(pallet.name.to_string(), "System"); + assert_eq!(pallet.index, index); + assert_eq!(pallet.path.to_token_stream().to_string(), "frame_system"); + assert_eq!(pallet.instance, None); +} + +#[test] +fn pallet_parsing_works_with_instance_and_pallet() { + use syn::{parse_quote, ItemType}; + + let item: ItemType = parse_quote! { + pub type System = frame_system::Pallet + Call; + }; + let ItemType { ty, .. } = item.clone(); + let syn::Type::TraitObject(syn::TypeTraitObject { bounds, .. }) = *ty else { + panic!("Expected a trait object"); + }; + + let index = 0; + let pallet = Pallet::try_from( + proc_macro2::Span::call_site(), + &item, + index, + false, + false, + &bounds, + ) + .unwrap(); + + assert_eq!(pallet.name.to_string(), "System"); + assert_eq!(pallet.index, index); + assert_eq!(pallet.path.to_token_stream().to_string(), "frame_system"); + assert_eq!(pallet.instance, Some(parse_quote! { Instance1 })); +} diff --git a/support/procedural-fork/src/runtime/parse/pallet_decl.rs b/support/procedural-fork/src/runtime/parse/pallet_decl.rs index bb1246606..fab826eee 100644 --- a/support/procedural-fork/src/runtime/parse/pallet_decl.rs +++ b/support/procedural-fork/src/runtime/parse/pallet_decl.rs @@ -15,19 +15,22 @@ // See the License for the specific language governing permissions and // limitations under the License. -use quote::ToTokens; -use syn::{spanned::Spanned, Attribute, Ident, PathArguments}; +use syn::{Ident, PathArguments}; /// The declaration of a pallet. #[derive(Debug, Clone)] pub struct PalletDeclaration { - /// The name of the pallet, e.g.`System` in `System: frame_system`. + /// The name of the pallet, e.g.`System` in `pub type System = frame_system`. pub name: Ident, - /// Optional attributes tagged right above a pallet declaration. - pub attrs: Vec, - /// The path of the pallet, e.g. `frame_system` in `System: frame_system`. + /// The path of the pallet, e.g. `frame_system` in `pub type System = frame_system`. pub path: syn::Path, - /// The instance of the pallet, e.g. `Instance1` in `Council: pallet_collective::`. + /// The segment of the pallet, e.g. `Pallet` in `pub type System = frame_system::Pallet`. + pub pallet_segment: Option, + /// The runtime parameter of the pallet, e.g. `Runtime` in + /// `pub type System = frame_system::Pallet`. + pub runtime_param: Option, + /// The instance of the pallet, e.g. `Instance1` in `pub type Council = + /// pallet_collective`. pub instance: Option, } @@ -35,12 +38,14 @@ impl PalletDeclaration { pub fn try_from( _attr_span: proc_macro2::Span, item: &syn::ItemType, - path: &syn::TypePath, + path: &syn::Path, ) -> syn::Result { let name = item.ident.clone(); - let mut path = path.path.clone(); + let mut path = path.clone(); + let mut pallet_segment = None; + let mut runtime_param = None; let mut instance = None; if let Some(segment) = path .segments @@ -51,21 +56,131 @@ impl PalletDeclaration { args, .. }) = segment.arguments.clone() { - if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = args.first() { - instance = Some(Ident::new( - &arg_path.to_token_stream().to_string(), - arg_path.span(), - )); + if segment.ident == "Pallet" { + let mut segment = segment.clone(); segment.arguments = PathArguments::None; + pallet_segment = Some(segment.clone()); + } + let mut args_iter = args.iter(); + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = + args_iter.next() + { + let ident = arg_path.path.require_ident()?.clone(); + if segment.ident == "Pallet" { + runtime_param = Some(ident); + if let Some(syn::GenericArgument::Type(syn::Type::Path(arg_path))) = + args_iter.next() + { + instance = Some(arg_path.path.require_ident()?.clone()); + } + } else { + instance = Some(ident); + segment.arguments = PathArguments::None; + } } } } + if pallet_segment.is_some() { + path = syn::Path { + leading_colon: None, + segments: path + .segments + .iter() + .filter(|seg| seg.arguments.is_empty()) + .cloned() + .collect(), + }; + } + Ok(Self { name, path, + pallet_segment, + runtime_param, instance, - attrs: item.attrs.clone(), }) } } + +#[test] +fn declaration_works() { + use syn::parse_quote; + + let decl: PalletDeclaration = PalletDeclaration::try_from( + proc_macro2::Span::call_site(), + &parse_quote! { pub type System = frame_system; }, + &parse_quote! { frame_system }, + ) + .expect("Failed to parse pallet declaration"); + + assert_eq!(decl.name, "System"); + assert_eq!(decl.path, parse_quote! { frame_system }); + assert_eq!(decl.pallet_segment, None); + assert_eq!(decl.runtime_param, None); + assert_eq!(decl.instance, None); +} + +#[test] +fn declaration_works_with_instance() { + use syn::parse_quote; + + let decl: PalletDeclaration = PalletDeclaration::try_from( + proc_macro2::Span::call_site(), + &parse_quote! { pub type System = frame_system; }, + &parse_quote! { frame_system }, + ) + .expect("Failed to parse pallet declaration"); + + assert_eq!(decl.name, "System"); + assert_eq!(decl.path, parse_quote! { frame_system }); + assert_eq!(decl.pallet_segment, None); + assert_eq!(decl.runtime_param, None); + assert_eq!(decl.instance, Some(parse_quote! { Instance1 })); +} + +#[test] +fn declaration_works_with_pallet() { + use syn::parse_quote; + + let decl: PalletDeclaration = PalletDeclaration::try_from( + proc_macro2::Span::call_site(), + &parse_quote! { pub type System = frame_system::Pallet; }, + &parse_quote! { frame_system::Pallet }, + ) + .expect("Failed to parse pallet declaration"); + + assert_eq!(decl.name, "System"); + assert_eq!(decl.path, parse_quote! { frame_system }); + + let segment: syn::PathSegment = syn::PathSegment { + ident: parse_quote! { Pallet }, + arguments: PathArguments::None, + }; + assert_eq!(decl.pallet_segment, Some(segment)); + assert_eq!(decl.runtime_param, Some(parse_quote! { Runtime })); + assert_eq!(decl.instance, None); +} + +#[test] +fn declaration_works_with_pallet_and_instance() { + use syn::parse_quote; + + let decl: PalletDeclaration = PalletDeclaration::try_from( + proc_macro2::Span::call_site(), + &parse_quote! { pub type System = frame_system::Pallet; }, + &parse_quote! { frame_system::Pallet }, + ) + .expect("Failed to parse pallet declaration"); + + assert_eq!(decl.name, "System"); + assert_eq!(decl.path, parse_quote! { frame_system }); + + let segment: syn::PathSegment = syn::PathSegment { + ident: parse_quote! { Pallet }, + arguments: PathArguments::None, + }; + assert_eq!(decl.pallet_segment, Some(segment)); + assert_eq!(decl.runtime_param, Some(parse_quote! { Runtime })); + assert_eq!(decl.instance, Some(parse_quote! { Instance1 })); +} diff --git a/support/procedural-fork/src/runtime/parse/runtime_struct.rs b/support/procedural-fork/src/runtime/parse/runtime_struct.rs index 7ddbdcfeb..82c6470d7 100644 --- a/support/procedural-fork/src/runtime/parse/runtime_struct.rs +++ b/support/procedural-fork/src/runtime/parse/runtime_struct.rs @@ -18,11 +18,10 @@ use syn::spanned::Spanned; pub struct RuntimeStructDef { pub ident: syn::Ident, - pub attr_span: proc_macro2::Span, } impl RuntimeStructDef { - pub fn try_from(attr_span: proc_macro2::Span, item: &mut syn::Item) -> syn::Result { + pub fn try_from(item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Struct(item) = item { item } else { @@ -32,7 +31,6 @@ impl RuntimeStructDef { Ok(Self { ident: item.ident.clone(), - attr_span, }) } } From fb0f5452df76cfac1f681f6ade33af06c4a0da00 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Fri, 4 Oct 2024 16:41:21 -0700 Subject: [PATCH 154/213] make commit-reveal based on subnet tempo --- pallets/admin-utils/src/benchmarking.rs | 11 - pallets/admin-utils/src/lib.rs | 26 - pallets/admin-utils/tests/tests.rs | 23 - pallets/subtensor/src/benchmarks.rs | 1 - pallets/subtensor/src/lib.rs | 9 - pallets/subtensor/src/macros/errors.rs | 2 + pallets/subtensor/src/rpc_info/subnet_info.rs | 5 +- pallets/subtensor/src/subnets/weights.rs | 51 +- pallets/subtensor/src/utils/misc.rs | 7 - pallets/subtensor/tests/weights.rs | 447 +++++++----------- 10 files changed, 192 insertions(+), 390 deletions(-) diff --git a/pallets/admin-utils/src/benchmarking.rs b/pallets/admin-utils/src/benchmarking.rs index 7515525f0..3d8b962f6 100644 --- a/pallets/admin-utils/src/benchmarking.rs +++ b/pallets/admin-utils/src/benchmarking.rs @@ -227,17 +227,6 @@ mod benchmarks { _(RawOrigin::Root, 1u16/*netuid*/, 1u16/*tempo*/)/*sudo_set_tempo*/; } - #[benchmark] - fn sudo_set_commit_reveal_weights_interval() { - pallet_subtensor::Pallet::::init_new_network( - 1u16, /*netuid*/ - 1u16, /*sudo_tempo*/ - ); - - #[extrinsic_call] - _(RawOrigin::Root, 1u16/*netuid*/, 3u64/*interval*/)/*set_commit_reveal_weights_interval()*/; - } - #[benchmark] fn sudo_set_commit_reveal_weights_enabled() { pallet_subtensor::Pallet::::init_new_network( diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index 3e06b822e..501122a8e 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -960,32 +960,6 @@ pub mod pallet { Ok(()) } - /// The extrinsic sets the commit/reveal interval for a subnet. - /// It is only callable by the root account or subnet owner. - /// The extrinsic will call the Subtensor pallet to set the interval. - #[pallet::call_index(48)] - #[pallet::weight(T::WeightInfo::sudo_set_commit_reveal_weights_interval())] - pub fn sudo_set_commit_reveal_weights_interval( - origin: OriginFor, - netuid: u16, - interval: u64, - ) -> DispatchResult { - pallet_subtensor::Pallet::::ensure_subnet_owner_or_root(origin, netuid)?; - - ensure!( - pallet_subtensor::Pallet::::if_subnet_exist(netuid), - Error::::SubnetDoesNotExist - ); - - pallet_subtensor::Pallet::::set_commit_reveal_weights_interval(netuid, interval); - log::debug!( - "SetWeightCommitInterval( netuid: {:?}, interval: {:?} ) ", - netuid, - interval - ); - Ok(()) - } - /// The extrinsic enabled/disables commit/reaveal for a given subnet. /// It is only callable by the root account or subnet owner. /// The extrinsic will call the Subtensor pallet to set the value. diff --git a/pallets/admin-utils/tests/tests.rs b/pallets/admin-utils/tests/tests.rs index 8ab85f177..746dfa6f5 100644 --- a/pallets/admin-utils/tests/tests.rs +++ b/pallets/admin-utils/tests/tests.rs @@ -1113,29 +1113,6 @@ fn test_sudo_set_min_delegate_take() { }); } -#[test] -fn test_sudo_set_weight_commit_interval() { - new_test_ext().execute_with(|| { - let netuid: u16 = 1; - add_network(netuid, 10); - - let to_be_set = 55; - let init_value = SubtensorModule::get_commit_reveal_weights_interval(netuid); - - assert_ok!(AdminUtils::sudo_set_commit_reveal_weights_interval( - <::RuntimeOrigin>::root(), - netuid, - to_be_set - )); - - assert!(init_value != to_be_set); - assert_eq!( - SubtensorModule::get_commit_reveal_weights_interval(netuid), - to_be_set - ); - }); -} - #[test] fn test_sudo_set_commit_reveal_weights_enabled() { new_test_ext().execute_with(|| { diff --git a/pallets/subtensor/src/benchmarks.rs b/pallets/subtensor/src/benchmarks.rs index 4915bb3ac..bd48676b6 100644 --- a/pallets/subtensor/src/benchmarks.rs +++ b/pallets/subtensor/src/benchmarks.rs @@ -416,7 +416,6 @@ reveal_weights { ); Subtensor::::set_validator_permit_for_uid(netuid, 0, true); - Subtensor::::set_commit_reveal_weights_interval(netuid, 0); let commit_hash: H256 = BlakeTwo256::hash_of(&( hotkey.clone(), diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index cc3d7d025..92156be94 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -629,11 +629,6 @@ pub mod pallet { T::InitialServingRateLimit::get() } #[pallet::type_value] - /// Default value for weight commit reveal interval. - pub fn DefaultWeightCommitRevealInterval() -> u64 { - 1000 - } - #[pallet::type_value] /// Default value for weight commit/reveal enabled. pub fn DefaultCommitRevealWeightsEnabled() -> bool { false @@ -1032,10 +1027,6 @@ pub mod pallet { StorageMap<_, Identity, u16, u64, ValueQuery, DefaultAdjustmentAlpha>; #[pallet::storage] /// --- MAP ( netuid ) --> interval - pub type WeightCommitRevealInterval = - StorageMap<_, Identity, u16, u64, ValueQuery, DefaultWeightCommitRevealInterval>; - #[pallet::storage] - /// --- MAP ( netuid ) --> interval pub type CommitRevealWeightsEnabled = StorageMap<_, Identity, u16, bool, ValueQuery, DefaultCommitRevealWeightsEnabled>; #[pallet::storage] diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index 22a0a6f89..9fd7c5fea 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -184,5 +184,7 @@ mod errors { TxChildkeyTakeRateLimitExceeded, /// Invalid identity. InvalidIdentity, + /// invalid reaveal epoch + InvalidRevealEpoch, } } diff --git a/pallets/subtensor/src/rpc_info/subnet_info.rs b/pallets/subtensor/src/rpc_info/subnet_info.rs index 9b22e0401..312a20723 100644 --- a/pallets/subtensor/src/rpc_info/subnet_info.rs +++ b/pallets/subtensor/src/rpc_info/subnet_info.rs @@ -51,7 +51,7 @@ pub struct SubnetInfov2 { identity: Option, } -#[freeze_struct("55b472510f10e76a")] +#[freeze_struct("e8abe48842dcc8c4")] #[derive(Decode, Encode, PartialEq, Eq, Clone, Debug)] pub struct SubnetHyperparams { rho: Compact, @@ -76,7 +76,6 @@ pub struct SubnetHyperparams { max_validators: Compact, adjustment_alpha: Compact, difficulty: Compact, - commit_reveal_weights_interval: Compact, commit_reveal_weights_enabled: bool, alpha_high: Compact, alpha_low: Compact, @@ -252,7 +251,6 @@ impl Pallet { let max_validators = Self::get_max_allowed_validators(netuid); let adjustment_alpha = Self::get_adjustment_alpha(netuid); let difficulty = Self::get_difficulty_as_u64(netuid); - let commit_reveal_weights_interval = Self::get_commit_reveal_weights_interval(netuid); let commit_reveal_weights_enabled = Self::get_commit_reveal_weights_enabled(netuid); let liquid_alpha_enabled = Self::get_liquid_alpha_enabled(netuid); let (alpha_low, alpha_high): (u16, u16) = Self::get_alpha_values(netuid); @@ -280,7 +278,6 @@ impl Pallet { max_validators: max_validators.into(), adjustment_alpha: adjustment_alpha.into(), difficulty: difficulty.into(), - commit_reveal_weights_interval: commit_reveal_weights_interval.into(), commit_reveal_weights_enabled, alpha_high: alpha_high.into(), alpha_low: alpha_low.into(), diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 1a53e44cc..85f560f68 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -119,6 +119,8 @@ impl Pallet { Error::::InvalidRevealCommitHashNotMatch ); + *maybe_commit = None; + Self::do_set_weights(origin, netuid, uids, values, version_key) }) } @@ -452,50 +454,43 @@ impl Pallet { uids.len() <= subnetwork_n as usize } - #[allow(clippy::arithmetic_side_effects)] pub fn can_commit(netuid: u16, who: &T::AccountId) -> bool { if let Some((_hash, commit_block)) = WeightCommits::::get(netuid, who) { - let interval: u64 = Self::get_commit_reveal_weights_interval(netuid); - if interval == 0 { - return true; //prevent division by 0 - } - let current_block: u64 = Self::get_current_block_as_u64(); - let interval_start: u64 = current_block.saturating_sub(current_block % interval); - let last_commit_interval_start: u64 = - commit_block.saturating_sub(commit_block % interval); - - // Allow commit if we're within the interval bounds - if current_block <= interval_start.saturating_add(interval) - && interval_start > last_commit_interval_start - { + let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); + let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); + + // Allow commit if we're in a new epoch + if current_epoch > commit_epoch { return true; } - false } else { true } } - #[allow(clippy::arithmetic_side_effects)] pub fn is_reveal_block_range(netuid: u16, commit_block: u64) -> bool { - let interval: u64 = Self::get_commit_reveal_weights_interval(netuid); - if interval == 0 { - return true; //prevent division by 0 - } - - let commit_interval_start: u64 = commit_block.saturating_sub(commit_block % interval); // Find the start of the interval in which the commit occurred - let reveal_interval_start: u64 = commit_interval_start.saturating_add(interval); // Start of the next interval after the commit interval let current_block: u64 = Self::get_current_block_as_u64(); + let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); + let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); - // Allow reveal if the current block is within the interval following the commit's interval - if current_block >= reveal_interval_start - && current_block < reveal_interval_start.saturating_add(interval) - { + // Allow reveal if the current epoch is immediately after the commit's epoch + if current_epoch == commit_epoch + 1 { return true; } - false } + + pub fn get_epoch_index(netuid: u16, block_number: u64) -> u64 { + let tempo = Self::get_tempo(netuid); + if tempo == 0 { + return 0; + } + let tempo_plus_one = (tempo as u64).saturating_add(1); + let netuid_plus_one = (netuid as u64).saturating_add(1); + let epoch_index = + (block_number.saturating_add(netuid_plus_one)).saturating_div(tempo_plus_one); + epoch_index + } } diff --git a/pallets/subtensor/src/utils/misc.rs b/pallets/subtensor/src/utils/misc.rs index 76546a1a2..57cc38786 100644 --- a/pallets/subtensor/src/utils/misc.rs +++ b/pallets/subtensor/src/utils/misc.rs @@ -486,13 +486,6 @@ impl Pallet { Kappa::::insert(netuid, kappa); Self::deposit_event(Event::KappaSet(netuid, kappa)); } - - pub fn get_commit_reveal_weights_interval(netuid: u16) -> u64 { - WeightCommitRevealInterval::::get(netuid) - } - pub fn set_commit_reveal_weights_interval(netuid: u16, interval: u64) { - WeightCommitRevealInterval::::set(netuid, interval); - } pub fn get_commit_reveal_weights_enabled(netuid: u16) -> bool { CommitRevealWeightsEnabled::::get(netuid) } diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 214e3add0..27a43de4c 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -402,12 +402,17 @@ fn test_set_weights_is_root_error() { let uids = vec![0]; let weights = vec![1]; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; let version_key: u64 = 0; let hotkey = U256::from(1); assert_err!( - commit_reveal_set_weights(hotkey, root_netuid, uids, weights, salt, version_key), + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + root_netuid, + uids.clone(), + weights.clone(), + version_key, + ), Error::::CanNotSetRootNetworkWeights ); }); @@ -430,14 +435,12 @@ fn test_weights_err_no_validator_permit() { let weights_keys: Vec = vec![1, 2]; let weight_values: Vec = vec![1, 2]; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; - let result = commit_reveal_set_weights( - hotkey_account_id, + let result = SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey_account_id), netuid, weights_keys, weight_values, - salt.clone(), 0, ); assert_eq!(result, Err(Error::::NeuronNoValidatorPermit.into())); @@ -448,12 +451,11 @@ fn test_weights_err_no_validator_permit() { SubtensorModule::get_uid_for_net_and_hotkey(netuid, &hotkey_account_id) .expect("Not registered."); SubtensorModule::set_validator_permit_for_uid(netuid, neuron_uid, true); - let result = commit_reveal_set_weights( - hotkey_account_id, + let result = SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey_account_id), netuid, weights_keys, weight_values, - salt, 0, ); assert_ok!(result); @@ -470,7 +472,7 @@ fn test_set_weights_min_stake_failed() { let version_key: u64 = 0; let hotkey = U256::from(0); let coldkey = U256::from(0); - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + add_network(netuid, 0, 0); register_ok_neuron(netuid, hotkey, coldkey, 2143124); SubtensorModule::set_weights_min_stake(20_000_000_000_000); @@ -486,24 +488,22 @@ fn test_set_weights_min_stake_failed() { // Check that it fails at the pallet level. SubtensorModule::set_weights_min_stake(100_000_000_000_000); assert_eq!( - commit_reveal_set_weights( - hotkey, + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid, dests.clone(), weights.clone(), - salt.clone(), - version_key + version_key, ), Err(Error::::NotEnoughStakeToSetWeights.into()) ); // Now passes SubtensorModule::increase_stake_on_hotkey_account(&hotkey, 100_000_000_000_000); - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid, dests.clone(), weights.clone(), - salt.clone(), version_key )); }); @@ -517,7 +517,7 @@ fn test_weights_version_key() { let coldkey = U256::from(66); let netuid0: u16 = 1; let netuid1: u16 = 2; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + add_network(netuid0, 0, 0); add_network(netuid1, 0, 0); register_ok_neuron(netuid0, hotkey, coldkey, 2143124); @@ -525,20 +525,18 @@ fn test_weights_version_key() { let weights_keys: Vec = vec![0]; let weight_values: Vec = vec![1]; - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid0, weights_keys.clone(), weight_values.clone(), - salt.clone(), 0 )); - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid1, weights_keys.clone(), weight_values.clone(), - salt.clone(), 0 )); @@ -549,42 +547,38 @@ fn test_weights_version_key() { SubtensorModule::set_weights_version_key(netuid1, key1); // Setting works with version key. - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid0, weights_keys.clone(), weight_values.clone(), - salt.clone(), key0 )); - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid1, weights_keys.clone(), weight_values.clone(), - salt.clone(), key1 )); // validator:20313 >= network:12312 (accepted: validator newer) - assert_ok!(commit_reveal_set_weights( - hotkey, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid0, weights_keys.clone(), weight_values.clone(), - salt.clone(), key1 )); // Setting fails with incorrect keys. // validator:12312 < network:20313 (rejected: validator not updated) assert_eq!( - commit_reveal_set_weights( - hotkey, + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), netuid1, weights_keys.clone(), weight_values.clone(), - salt.clone(), key0 ), Err(Error::::IncorrectWeightVersionKey.into()) @@ -893,19 +887,23 @@ fn test_set_weight_not_enough_values() { // Should fail because we are only setting a single value and its not the self weight. let weight_keys: Vec = vec![1]; // not weight. let weight_values: Vec = vec![88]; // random value. - let result = - commit_reveal_set_weights(account_id, 1, weight_keys, weight_values, salt.clone(), 0); + let result = SubtensorModule::set_weights( + RuntimeOrigin::signed(account_id), + 1, + weight_keys, + weight_values, + 0, + ); assert_eq!(result, Err(Error::::WeightVecLengthIsLow.into())); // Shouldnt fail because we setting a single value but it is the self weight. let weight_keys: Vec = vec![0]; // self weight. let weight_values: Vec = vec![88]; // random value. - assert_ok!(commit_reveal_set_weights( - account_id, + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(account_id), 1, weight_keys, weight_values, - salt.clone(), 0 )); @@ -1390,24 +1388,39 @@ fn test_commit_reveal_weights_ok() { version_key, )); - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); + // Set block number to 0 + System::set_block_number(0); + + // Add network with tempo 5 + let tempo: u16 = 5; + add_network(netuid, tempo, 0); + + // Register neurons and set up configurations + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); + // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + // Commit at block 0 assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, commit_hash )); - step_block(5); + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + // Reveal in the next epoch assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -1420,7 +1433,7 @@ fn test_commit_reveal_weights_ok() { } #[test] -fn test_commit_reveal_interval() { +fn test_commit_reveal_tempo_interval() { new_test_ext(1).execute_with(|| { let netuid: u16 = 1; let uids: Vec = vec![0, 1]; @@ -1438,26 +1451,34 @@ fn test_commit_reveal_interval() { version_key, )); - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); + System::set_block_number(0); + + let tempo: u16 = 100; + add_network(netuid, tempo, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - SubtensorModule::set_commit_reveal_weights_interval(netuid, 100); + // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); - System::set_block_number(0); + // Commit at block 0 assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, commit_hash )); + + // Attempt to commit again in the same epoch, should fail assert_err!( SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), Error::::WeightsCommitNotAllowed ); + + // Attempt to reveal in the same epoch, should fail assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1469,48 +1490,15 @@ fn test_commit_reveal_interval() { ), Error::::InvalidRevealCommitTempo ); - step_block(99); - assert_err!( - SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), - Error::::WeightsCommitNotAllowed - ); - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key, - ), - Error::::InvalidRevealCommitTempo - ); - step_block(1); - assert_ok!(SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key, - )); - assert_ok!(SubtensorModule::commit_weights( - RuntimeOrigin::signed(hotkey), + + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( netuid, - commit_hash - )); - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key, - ), - Error::::InvalidRevealCommitTempo + tempo, + SubtensorModule::get_current_block_as_u64(), ); - step_block(100); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -1520,13 +1508,8 @@ fn test_commit_reveal_interval() { version_key, )); - // Testing that if you miss the next tempo you cannot reveal it. - assert_ok!(SubtensorModule::commit_weights( - RuntimeOrigin::signed(hotkey), - netuid, - commit_hash - )); - step_block(205); + step_block(6); + assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1536,49 +1519,41 @@ fn test_commit_reveal_interval() { salt.clone(), version_key, ), - Error::::InvalidRevealCommitTempo + Error::::NoWeightsCommitFound ); - // Testing when you commit but do not reveal until later intervals assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, commit_hash )); - step_block(425); - let commit_hash_2: H256 = BlakeTwo256::hash_of(&( - hotkey, + + // step two epochs + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key + 1, - )); - assert_ok!(SubtensorModule::commit_weights( - RuntimeOrigin::signed(hotkey), + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( netuid, - commit_hash_2 - )); - step_block(100); + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Attempt to reveal previous commit in the new epoch, should fail with `InvalidRevealCommitTempo` assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), + uids, + weight_values, + salt, version_key, ), - Error::::InvalidRevealCommitHashNotMatch + Error::::InvalidRevealCommitTempo ); - assert_ok!(SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key + 1, - )); }); } @@ -1589,17 +1564,19 @@ fn test_commit_reveal_hash() { let uids: Vec = vec![0, 1]; let weight_values: Vec = vec![10, 10]; let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let bad_salt: Vec = vec![0, 2, 3, 4, 5, 6, 7, 8]; let version_key: u64 = 0; let hotkey: U256 = U256::from(1); - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); + add_network(netuid, 5, 0); + System::set_block_number(0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); let commit_hash: H256 = BlakeTwo256::hash_of(&( @@ -1617,8 +1594,15 @@ fn test_commit_reveal_hash() { commit_hash )); - step_block(5); + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + // Attempt to reveal with incorrect data, should fail assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1630,46 +1614,26 @@ fn test_commit_reveal_hash() { ), Error::::InvalidRevealCommitHashNotMatch ); + assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, uids.clone(), weight_values.clone(), - salt.clone(), - 7, - ), - Error::::InvalidRevealCommitHashNotMatch - ); - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - vec![10, 9], - salt.clone(), + bad_salt.clone(), version_key, ), Error::::InvalidRevealCommitHashNotMatch ); - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - vec![0, 1, 2], - vec![10, 10, 33], - salt.clone(), - 9, - ), - Error::::InvalidRevealCommitHashNotMatch - ); + // Correct reveal, should succeed assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, uids, weight_values, - salt.clone(), + salt, version_key, )); }); @@ -1694,74 +1658,49 @@ fn test_commit_reveal_disabled_or_enabled() { version_key, )); - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); + add_network(netuid, 5, 0); + System::set_block_number(0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); + // Disable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); + // Attempt to commit, should fail assert_err!( SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), Error::::CommitRevealDisabled ); - step_block(5); - - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key, - ), - Error::::CommitRevealDisabled - ); - - SubtensorModule::set_commit_reveal_weights_enabled(netuid + 1, true); - - //Should still fail because bad netuid - assert_err!( - SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), - Error::::CommitRevealDisabled - ); - - step_block(5); - - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), - version_key, - ), - Error::::CommitRevealDisabled - ); - - // Enable and should pass + // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + // Commit should now succeed assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, commit_hash )); - step_block(5); + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + // Reveal should succeed assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, uids, weight_values, - salt.clone(), + salt, version_key, )); }); @@ -1786,40 +1725,34 @@ fn test_toggle_commit_reveal_weights_and_set_weights() { version_key, )); - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); + add_network(netuid, 5, 0); + System::set_block_number(0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - SubtensorModule::set_weights_set_rate_limit(netuid, 5); - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); - - step_block(5); - // Set weights OK - let result = SubtensorModule::set_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - 0, - ); - assert_ok!(result); - - // Enable Commit/Reveal + // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); - // Commit is enabled the same block + // Commit at block 0 assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, commit_hash )); - step_block(5); //Step to the next commit/reveal tempo + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); - // Reveal OK + // Reveal in the next epoch assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -1829,72 +1762,20 @@ fn test_toggle_commit_reveal_weights_and_set_weights() { version_key, )); - // Disable Commit/Reveal + // Disable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); - // Cannot set weights the same block due to WeightsRateLimit - step_block(5); //step to avoid settingweightstofast + // Advance to allow setting weights (due to rate limit) + step_block(5); - let result = SubtensorModule::set_weights( + // Set weights directly + assert_ok!(SubtensorModule::set_weights( RuntimeOrigin::signed(hotkey), netuid, - uids.clone(), - weight_values.clone(), - 0, - ); - assert_ok!(result); - }); -} - -#[test] -fn test_commit_reveal_bad_salt_fail() { - new_test_ext(1).execute_with(|| { - let netuid: u16 = 1; - let uids: Vec = vec![0, 1]; - let weight_values: Vec = vec![10, 10]; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; - let bad_salt: Vec = vec![0, 2, 3, 4, 5, 6, 7, 8]; - let version_key: u64 = 0; - let hotkey: U256 = U256::from(1); - - let commit_hash: H256 = BlakeTwo256::hash_of(&( - hotkey, - netuid, - uids.clone(), - weight_values.clone(), - salt.clone(), + uids, + weight_values, version_key, )); - - add_network(netuid, 0, 0); - register_ok_neuron(netuid, U256::from(3), U256::from(4), 300000); - register_ok_neuron(netuid, U256::from(1), U256::from(2), 100000); - SubtensorModule::set_weights_set_rate_limit(netuid, 5); - SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); - SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); - SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); - - assert_ok!(SubtensorModule::commit_weights( - RuntimeOrigin::signed(hotkey), - netuid, - commit_hash - )); - - step_block(5); - - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - bad_salt.clone(), - version_key, - ), - Error::::InvalidRevealCommitHashNotMatch - ); }); } @@ -1906,8 +1787,6 @@ fn commit_reveal_set_weights( salt: Vec, version_key: u64, ) -> DispatchResult { - SubtensorModule::set_commit_reveal_weights_interval(netuid, 5); - SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); let commit_hash: H256 = BlakeTwo256::hash_of(&( @@ -1921,7 +1800,13 @@ fn commit_reveal_set_weights( SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash)?; - step_block(5); + // Calculate blocks to next epoch and advance + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), From 1719e4b3df1c86054eb134f82d062a9b244f5c2c Mon Sep 17 00:00:00 2001 From: johnreedv Date: Sat, 5 Oct 2024 09:57:11 -0700 Subject: [PATCH 155/213] expand test & clippy --- pallets/subtensor/src/subnets/weights.rs | 22 ++++------- pallets/subtensor/tests/weights.rs | 49 +++++++++++++++++++----- 2 files changed, 47 insertions(+), 24 deletions(-) diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 85f560f68..d2d6cf2a0 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -475,22 +475,16 @@ impl Pallet { let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); - // Allow reveal if the current epoch is immediately after the commit's epoch - if current_epoch == commit_epoch + 1 { - return true; - } - false + // Reveal is allowed only in the epoch immediately after the commit's epoch + current_epoch == commit_epoch.saturating_add(1) } pub fn get_epoch_index(netuid: u16, block_number: u64) -> u64 { - let tempo = Self::get_tempo(netuid); - if tempo == 0 { - return 0; - } - let tempo_plus_one = (tempo as u64).saturating_add(1); - let netuid_plus_one = (netuid as u64).saturating_add(1); - let epoch_index = - (block_number.saturating_add(netuid_plus_one)).saturating_div(tempo_plus_one); - epoch_index + let tempo: u64 = Self::get_tempo(netuid) as u64; + let tempo_plus_one: u64 = tempo.saturating_add(1); + let netuid_plus_one: u64 = (netuid as u64).saturating_add(1); + let block_with_offset: u64 = block_number.saturating_add(netuid_plus_one); + + block_with_offset.checked_div(tempo_plus_one).unwrap_or(0) } } diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 27a43de4c..e752c28d2 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1388,10 +1388,8 @@ fn test_commit_reveal_weights_ok() { version_key, )); - // Set block number to 0 System::set_block_number(0); - // Add network with tempo 5 let tempo: u16 = 5; add_network(netuid, tempo, 0); @@ -1401,8 +1399,6 @@ fn test_commit_reveal_weights_ok() { SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - - // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); // Commit at block 0 @@ -1461,8 +1457,6 @@ fn test_commit_reveal_tempo_interval() { SubtensorModule::set_weights_set_rate_limit(netuid, 5); SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - - // Enable commit/reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); // Commit at block 0 @@ -1542,18 +1536,53 @@ fn test_commit_reveal_tempo_interval() { ); step_block(blocks_to_next_epoch.saturating_add(1) as u16); - // Attempt to reveal previous commit in the new epoch, should fail with `InvalidRevealCommitTempo` assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, - uids, - weight_values, - salt, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ), + Error::::InvalidRevealCommitTempo + ); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + step_block(50); + + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), version_key, ), Error::::InvalidRevealCommitTempo ); + + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt, + version_key, + )); }); } From 911ffec3294088205a04781fd3dd3464ea1fa000 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Sat, 5 Oct 2024 12:51:35 -0700 Subject: [PATCH 156/213] add test_tempo_change --- pallets/subtensor/tests/weights.rs | 162 +++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index e752c28d2..513421017 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1808,6 +1808,168 @@ fn test_toggle_commit_reveal_weights_and_set_weights() { }); } +#[test] +fn test_tempo_change_during_commit_reveal_process() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let version_key: u64 = 0; + let hotkey: U256 = U256::from(1); + + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + + System::set_block_number(1); + + let tempo: u16 = 100; + add_network(netuid, tempo, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 5); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + log::info!( + "Commit successful at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + step_block(9); + log::info!( + "Advanced to block {}", + SubtensorModule::get_current_block_as_u64() + ); + + let tempo_before_next_reveal: u16 = 200; + log::info!("Changing tempo to {}", tempo_before_next_reveal); + SubtensorModule::set_tempo(netuid, tempo_before_next_reveal); + + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch as u16); + log::info!( + "Advanced to block {}", + SubtensorModule::get_current_block_as_u64() + 1 + ); + assert!(SubtensorModule::should_run_epoch(netuid, SubtensorModule::get_current_block_as_u64())); + step_block(1); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + log::info!( + "Revealed at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + log::info!( + "Commit successful at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + let tempo: u16 = 150; + log::info!("Changing tempo to {}", tempo); + SubtensorModule::set_tempo(netuid, tempo); + + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch as u16); + log::info!( + "Advanced to block {}", + SubtensorModule::get_current_block_as_u64() + 1 + ); + assert!(SubtensorModule::should_run_epoch(netuid, SubtensorModule::get_current_block_as_u64())); + step_block(1); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + log::info!( + "Revealed at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + + let tempo: u16 = 1050; + log::info!("Changing tempo to {}", tempo); + SubtensorModule::set_tempo(netuid, tempo); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + log::info!( + "Commit successful at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + let tempo: u16 = 805; + log::info!("Changing tempo to {}", tempo); + SubtensorModule::set_tempo(netuid, tempo); + + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + log::info!( + "Advanced to block {}", + SubtensorModule::get_current_block_as_u64() + ); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + log::info!( + "Revealed at block {}", + SubtensorModule::get_current_block_as_u64() + ); + }); +} + fn commit_reveal_set_weights( hotkey: U256, netuid: u16, From c2674bf94309398e2e523f39a95f03dda507d3a7 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Sun, 6 Oct 2024 11:37:36 -0700 Subject: [PATCH 157/213] support multiple commits --- pallets/subtensor/src/lib.rs | 5 +- pallets/subtensor/src/macros/errors.rs | 8 +- pallets/subtensor/src/subnets/weights.rs | 153 +++++-- pallets/subtensor/tests/swap_hotkey.rs | 7 +- pallets/subtensor/tests/weights.rs | 510 ++++++++++++++++++++++- 5 files changed, 610 insertions(+), 73 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 92156be94..39ad90895 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -72,6 +72,7 @@ pub mod pallet { use frame_system::pallet_prelude::*; use sp_core::H256; use sp_runtime::traits::{Dispatchable, TrailingZeroInput}; + use sp_std::collections::vec_deque::VecDeque; use sp_std::vec; use sp_std::vec::Vec; use subtensor_macros::freeze_struct; @@ -1245,14 +1246,14 @@ pub mod pallet { /// ITEM( weights_min_stake ) pub type WeightsMinStake = StorageValue<_, u64, ValueQuery, DefaultWeightsMinStake>; #[pallet::storage] - /// --- MAP (netuid, who) --> (hash, weight) | Returns the hash and weight committed by an account for a given netuid. + /// --- MAP (netuid, who) --> VecDeque<(hash, commit_block)> | Stores a queue of commits for an account on a given netuid. pub type WeightCommits = StorageDoubleMap< _, Twox64Concat, u16, Twox64Concat, T::AccountId, - (H256, u64), + VecDeque<(H256, u64)>, OptionQuery, >; diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index 9fd7c5fea..7ef1ec999 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -184,7 +184,11 @@ mod errors { TxChildkeyTakeRateLimitExceeded, /// Invalid identity. InvalidIdentity, - /// invalid reaveal epoch - InvalidRevealEpoch, + /// Maximum commit limit reached + TooManyUnrevealedCommits, + /// Reveal is out of order + RevealOutOfOrder, + /// Attempted to reveal weights that are expired + AttemptedToRevealExpiredWeightCommit, } } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index d2d6cf2a0..a4730f4ea 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -2,7 +2,7 @@ use super::*; use crate::epoch::math::*; use sp_core::H256; use sp_runtime::traits::{BlakeTwo256, Hash}; -use sp_std::vec; +use sp_std::{collections::vec_deque::VecDeque, vec}; impl Pallet { /// ---- The implementation for committing weight hashes. @@ -18,34 +18,45 @@ impl Pallet { /// - The hash representing the committed weights. /// /// # Raises: - /// * `WeightsCommitNotAllowed`: - /// - Attempting to commit when it is not allowed. + /// * `CommitRevealDisabled`: + /// - Attempting to commit when the commit-reveal mechanism is disabled. /// + /// * `TooManyUnrevealedCommits`: + /// - Attempting to commit when the user has more than the allowed limit of unrevealed commits. pub fn do_commit_weights( origin: T::RuntimeOrigin, netuid: u16, commit_hash: H256, ) -> DispatchResult { + // --- 1. Check the caller's signature (hotkey). let who = ensure_signed(origin)?; log::debug!("do_commit_weights( hotkey:{:?} netuid:{:?})", who, netuid); + // --- 2. Ensure commit-reveal is enabled for the network. ensure!( Self::get_commit_reveal_weights_enabled(netuid), Error::::CommitRevealDisabled ); - ensure!( - Self::can_commit(netuid, &who), - Error::::WeightsCommitNotAllowed - ); + // --- 3. Check if the current number of unrevealed commits is within the allowed limit. + WeightCommits::::try_mutate(netuid, &who, |maybe_commits| -> DispatchResult { + if let Some(ref commits) = maybe_commits { + ensure!(commits.len() < 10, Error::::TooManyUnrevealedCommits); + } - WeightCommits::::insert( - netuid, - &who, - (commit_hash, Self::get_current_block_as_u64()), - ); - Ok(()) + // --- 4. Take the existing commits or create a new VecDeque. + let mut commits: VecDeque<(H256, u64)> = maybe_commits.take().unwrap_or_default(); + + // --- 5. Append the new commit to the queue. + commits.push_back((commit_hash, Self::get_current_block_as_u64())); + + // --- 6. Store the updated queue back to storage. + *maybe_commits = Some(commits); + + // --- 7. Return ok. + Ok(()) + }) } /// ---- The implementation for revealing committed weights. @@ -63,22 +74,30 @@ impl Pallet { /// * `values` (`Vec`): /// - The values of the weights being revealed. /// - /// * `salt` (`Vec`): + /// * `salt` (`Vec`): /// - The values of the weights being revealed. /// /// * `version_key` (`u64`): /// - The network version key. /// /// # Raises: + /// * `CommitRevealDisabled`: + /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. + /// /// * `NoWeightsCommitFound`: /// - Attempting to reveal weights without an existing commit. /// - /// * `InvalidRevealCommitHashNotMatchTempo`: - /// - Attempting to reveal weights outside the valid tempo. + /// * `InvalidRevealCommitTempo`: + /// - Attempting to reveal weights outside the valid reveal period. /// - /// * `InvalidRevealCommitHashNotMatch`: - /// - The revealed hash does not match the committed hash. + /// * `AttemptedToRevealExpiredWeightCommit`: + /// - Attempting to reveal a weight commit that has expired. + /// + /// * `RevealOutOfOrder`: + /// - Attempting to reveal a commit out of the expected order. /// + /// * `InvalidRevealCommitHashNotMatch`: + /// - The revealed hash does not match any committed hash. pub fn do_reveal_weights( origin: T::RuntimeOrigin, netuid: u16, @@ -87,25 +106,36 @@ impl Pallet { salt: Vec, version_key: u64, ) -> DispatchResult { + // --- 1. Check the caller's signature (hotkey). let who = ensure_signed(origin.clone())?; log::debug!("do_reveal_weights( hotkey:{:?} netuid:{:?})", who, netuid); + // --- 2. Ensure commit-reveal is enabled for the network. ensure!( Self::get_commit_reveal_weights_enabled(netuid), Error::::CommitRevealDisabled ); - WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commit| -> DispatchResult { - let (commit_hash, commit_block) = maybe_commit - .as_ref() + // --- 3. Mutate the WeightCommits to retrieve existing commits for the user. + WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { + let commits = maybe_commits + .as_mut() .ok_or(Error::::NoWeightsCommitFound)?; - ensure!( - Self::is_reveal_block_range(netuid, *commit_block), - Error::::InvalidRevealCommitTempo - ); + // --- 4. Remove any expired commits from the front of the queue, collecting their hashes. + let mut expired_hashes = Vec::new(); + while let Some((hash, commit_block)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block) { + // Collect the expired commit hash + expired_hashes.push(*hash); + commits.pop_front(); + } else { + break; + } + } + // --- 5. Hash the provided data. let provided_hash: H256 = BlakeTwo256::hash_of(&( who.clone(), netuid, @@ -114,13 +144,56 @@ impl Pallet { salt.clone(), version_key, )); + + // --- 6. After removing expired commits, check if any commits are left. + if commits.is_empty() { + // No non-expired commits + // Check if provided_hash matches any expired commits + if expired_hashes.contains(&provided_hash) { + return Err(Error::::AttemptedToRevealExpiredWeightCommit.into()); + } else { + return Err(Error::::NoWeightsCommitFound.into()); + } + } + + // --- 7. Collect the hashes of the remaining (non-expired) commits. + let non_expired_hashes: Vec = commits.iter().map(|(hash, _)| *hash).collect(); + + // --- 8. Get the first commit from the VecDeque. + let (commit_hash, commit_block) = + commits.front().ok_or(Error::::NoWeightsCommitFound)?; + + // --- 9. Ensure the commit is ready to be revealed in the current block range. ensure!( - provided_hash == *commit_hash, - Error::::InvalidRevealCommitHashNotMatch + Self::is_reveal_block_range(netuid, *commit_block), + Error::::InvalidRevealCommitTempo ); - *maybe_commit = None; + // --- 10. Check if the provided hash matches the first commit's hash. + if provided_hash != *commit_hash { + // Check if the provided hash matches any other non-expired commit in the queue + if non_expired_hashes + .iter() + .skip(1) + .any(|hash| *hash == provided_hash) + { + return Err(Error::::RevealOutOfOrder.into()); + } else if expired_hashes.contains(&provided_hash) { + return Err(Error::::AttemptedToRevealExpiredWeightCommit.into()); + } else { + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + } + } + + // --- 11. Remove the first commit from the queue after passing all checks. + commits.pop_front(); + + // --- 12. If the queue is now empty, remove the storage entry for the user. + if commits.is_empty() { + *maybe_commits = None; + } + // --- 13. Proceed to set the revealed weights. Self::do_set_weights(origin, netuid, uids, values, version_key) }) } @@ -454,22 +527,6 @@ impl Pallet { uids.len() <= subnetwork_n as usize } - pub fn can_commit(netuid: u16, who: &T::AccountId) -> bool { - if let Some((_hash, commit_block)) = WeightCommits::::get(netuid, who) { - let current_block: u64 = Self::get_current_block_as_u64(); - let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); - let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); - - // Allow commit if we're in a new epoch - if current_epoch > commit_epoch { - return true; - } - false - } else { - true - } - } - pub fn is_reveal_block_range(netuid: u16, commit_block: u64) -> bool { let current_block: u64 = Self::get_current_block_as_u64(); let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); @@ -487,4 +544,12 @@ impl Pallet { block_with_offset.checked_div(tempo_plus_one).unwrap_or(0) } + + pub fn is_commit_expired(netuid: u16, commit_block: u64) -> bool { + let current_block = Self::get_current_block_as_u64(); + let current_epoch = Self::get_epoch_index(netuid, current_block); + let commit_epoch = Self::get_epoch_index(netuid, commit_block); + + current_epoch > commit_epoch.saturating_add(1) + } } diff --git a/pallets/subtensor/tests/swap_hotkey.rs b/pallets/subtensor/tests/swap_hotkey.rs index 89938e3eb..bf5ecb301 100644 --- a/pallets/subtensor/tests/swap_hotkey.rs +++ b/pallets/subtensor/tests/swap_hotkey.rs @@ -342,7 +342,7 @@ fn test_swap_certificates() { ); }); } - +use sp_std::collections::vec_deque::VecDeque; // SKIP_WASM_BUILD=1 RUST_LOG=debug cargo test --test swap_hotkey -- test_swap_weight_commits --exact --nocapture #[test] fn test_swap_weight_commits() { @@ -351,12 +351,13 @@ fn test_swap_weight_commits() { let new_hotkey = U256::from(2); let coldkey = U256::from(3); let netuid = 0u16; - let weight_commits = (H256::from_low_u64_be(100), 200); + let mut weight_commits: VecDeque<(H256, u64)> = VecDeque::new(); + weight_commits.push_back((H256::from_low_u64_be(100), 200)); let mut weight = Weight::zero(); add_network(netuid, 0, 1); IsNetworkMember::::insert(old_hotkey, netuid, true); - WeightCommits::::insert(netuid, old_hotkey, weight_commits); + WeightCommits::::insert(netuid, old_hotkey, weight_commits.clone()); assert_ok!(SubtensorModule::perform_hotkey_swap( &old_hotkey, diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 513421017..2b3a5d305 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -728,7 +728,6 @@ fn test_weights_err_max_weight_limit() { // Add network. let netuid: u16 = 1; let tempo: u16 = 100; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; add_network(netuid, tempo, 0); // Set params. @@ -789,18 +788,18 @@ fn test_weights_err_max_weight_limit() { // Non self-weight fails. let uids: Vec = vec![1, 2, 3, 4]; let values: Vec = vec![u16::MAX / 4, u16::MAX / 4, u16::MAX / 54, u16::MAX / 4]; - let result = commit_reveal_set_weights(U256::from(0), 1, uids, values, salt.clone(), 0); + let result = + SubtensorModule::set_weights(RuntimeOrigin::signed(U256::from(0)), 1, uids, values, 0); assert_eq!(result, Err(Error::::MaxWeightExceeded.into())); // Self-weight is a success. let uids: Vec = vec![0]; // Self. let values: Vec = vec![u16::MAX]; // normalizes to u32::MAX - assert_ok!(commit_reveal_set_weights( - U256::from(0), + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(U256::from(0)), 1, uids, values, - salt.clone(), 0 )); }); @@ -928,7 +927,6 @@ fn test_set_weight_too_many_uids() { new_test_ext(0).execute_with(|| { let netuid: u16 = 1; let tempo: u16 = 13; - let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; add_network(netuid, tempo, 0); register_ok_neuron(1, U256::from(1), U256::from(2), 100_000); @@ -943,12 +941,11 @@ fn test_set_weight_too_many_uids() { // Should fail because we are setting more weights than there are neurons. let weight_keys: Vec = vec![0, 1, 2, 3, 4]; // more uids than neurons in subnet. let weight_values: Vec = vec![88, 102, 303, 1212, 11]; // random value. - let result = commit_reveal_set_weights( - U256::from(1), + let result = SubtensorModule::set_weights( + RuntimeOrigin::signed(U256::from(1)), 1, weight_keys, weight_values, - salt.clone(), 0, ); assert_eq!( @@ -959,12 +956,11 @@ fn test_set_weight_too_many_uids() { // Shouldnt fail because we are setting less weights than there are neurons. let weight_keys: Vec = vec![0, 1]; // Only on neurons that exist. let weight_values: Vec = vec![10, 10]; // random value. - assert_ok!(commit_reveal_set_weights( - U256::from(1), + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(U256::from(1)), 1, weight_keys, weight_values, - salt, 0 )); }); @@ -1369,6 +1365,71 @@ fn test_set_weights_commit_reveal_enabled_error() { }); } +#[test] +fn test_reveal_weights_when_commit_reveal_disabled() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let version_key: u64 = 0; + let hotkey: U256 = U256::from(1); + + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + + System::set_block_number(0); + + let tempo: u16 = 5; + add_network(netuid, tempo, 0); + + // Register neurons and set up configurations + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 5); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // Enable commit-reveal and commit + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + // Advance to the next epoch + let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Disable commit-reveal before reveal + SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); + + // Attempt to reveal, should fail with CommitRevealDisabled + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt, + version_key, + ), + Error::::CommitRevealDisabled + ); + }); +} + #[test] fn test_commit_reveal_weights_ok() { new_test_ext(1).execute_with(|| { @@ -1467,10 +1528,10 @@ fn test_commit_reveal_tempo_interval() { )); // Attempt to commit again in the same epoch, should fail - assert_err!( - SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), - Error::::WeightsCommitNotAllowed - ); + // assert_err!( + // SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), + // Error::::WeightsCommitNotAllowed + // ); // Attempt to reveal in the same epoch, should fail assert_err!( @@ -1545,7 +1606,7 @@ fn test_commit_reveal_tempo_interval() { salt.clone(), version_key, ), - Error::::InvalidRevealCommitTempo + Error::::AttemptedToRevealExpiredWeightCommit ); assert_ok!(SubtensorModule::commit_weights( @@ -1869,7 +1930,10 @@ fn test_tempo_change_during_commit_reveal_process() { "Advanced to block {}", SubtensorModule::get_current_block_as_u64() + 1 ); - assert!(SubtensorModule::should_run_epoch(netuid, SubtensorModule::get_current_block_as_u64())); + assert!(SubtensorModule::should_run_epoch( + netuid, + SubtensorModule::get_current_block_as_u64() + )); step_block(1); assert_ok!(SubtensorModule::reveal_weights( @@ -1909,7 +1973,10 @@ fn test_tempo_change_during_commit_reveal_process() { "Advanced to block {}", SubtensorModule::get_current_block_as_u64() + 1 ); - assert!(SubtensorModule::should_run_epoch(netuid, SubtensorModule::get_current_block_as_u64())); + assert!(SubtensorModule::should_run_epoch( + netuid, + SubtensorModule::get_current_block_as_u64() + )); step_block(1); assert_ok!(SubtensorModule::reveal_weights( @@ -1925,7 +1992,6 @@ fn test_tempo_change_during_commit_reveal_process() { SubtensorModule::get_current_block_as_u64() ); - let tempo: u16 = 1050; log::info!("Changing tempo to {}", tempo); SubtensorModule::set_tempo(netuid, tempo); @@ -1949,11 +2015,16 @@ fn test_tempo_change_during_commit_reveal_process() { SubtensorModule::get_tempo(netuid), SubtensorModule::get_current_block_as_u64(), ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_block(blocks_to_next_epoch as u16); log::info!( "Advanced to block {}", - SubtensorModule::get_current_block_as_u64() + SubtensorModule::get_current_block_as_u64() + 1 ); + assert!(SubtensorModule::should_run_epoch( + netuid, + SubtensorModule::get_current_block_as_u64() + )); + step_block(1); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1970,6 +2041,401 @@ fn test_tempo_change_during_commit_reveal_process() { }); } +#[test] +fn test_commit_reveal_multiple_commits() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let version_key: u64 = 0; + let hotkey: U256 = U256::from(1); + + System::set_block_number(0); + + let tempo: u16 = 7200; + add_network(netuid, tempo, 0); + + // Setup the network and neurons + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + // 1. Commit 10 times successfully + let mut commit_hashes = vec![]; + for i in 0..10 { + let salt_i: Vec = vec![i; 8]; // Unique salt for each commit + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_i.clone(), + version_key, + )); + commit_hashes.push((commit_hash, salt_i)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + // 2. Attempt to commit an 11th time, should fail + let salt_11: Vec = vec![11; 8]; + let commit_hash_11: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_11.clone(), + version_key, + )); + assert_err!( + SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash_11), + Error::::TooManyUnrevealedCommits + ); + + // 3. Attempt to reveal out of order (reveal the second commit first), should fail + // Advance to the next epoch for reveals to be valid + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Try to reveal the second commit first + let (_commit_hash_2, salt_2) = &commit_hashes[1]; + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_2.clone(), + version_key, + ), + Error::::RevealOutOfOrder + ); + + // 4. Reveal commits in order, ensuring they succeed + for (_commit_hash_i, salt_i) in commit_hashes.iter() { + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_i.clone(), + version_key, + )); + } + + // After revealing all commits, attempt to commit again should now succeed + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_11 + )); + + // 5. Test expired commits are removed and do not block reveals + // Commit again and let the commit expire + let salt_12: Vec = vec![12; 8]; + let commit_hash_12: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_12.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_12 + )); + + // Advance two epochs so the commit expires + for _ in 0..2 { + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + } + + // Attempt to reveal the expired commit, should fail + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_12.clone(), + version_key, + ), + Error::::AttemptedToRevealExpiredWeightCommit + ); + + // Commit again and reveal after advancing to next epoch + let salt_13: Vec = vec![13; 8]; + let commit_hash_13: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_13.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_13 + )); + + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_13.clone(), + version_key, + )); + + // 6. Ensure that attempting to reveal after the valid reveal period fails + // Commit again + let salt_14: Vec = vec![14; 8]; + let commit_hash_14: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_14.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_14 + )); + + // Advance beyond the valid reveal period (more than one epoch) + for _ in 0..2 { + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + } + + // Attempt to reveal, should fail + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_14.clone(), + version_key, + ), + Error::::AttemptedToRevealExpiredWeightCommit + ); + + // 7. Attempt to reveal a commit that is not ready yet (before the reveal period) + // Commit again + let salt_15: Vec = vec![15; 8]; + let commit_hash_15: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_15.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_15 + )); + + // Attempt to reveal immediately, should fail + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_15.clone(), + version_key, + ), + Error::::InvalidRevealCommitTempo + ); + + // Advance to the next epoch + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Now reveal should succeed + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_15.clone(), + version_key, + )); + + // 8. Test that revealing with incorrect data (salt) fails + // Commit again + let salt_16: Vec = vec![16; 8]; + let commit_hash_16: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_16.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_16 + )); + + // Advance to the next epoch + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Attempt to reveal with incorrect salt + let wrong_salt: Vec = vec![99; 8]; + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + wrong_salt.clone(), + version_key, + ), + Error::::InvalidRevealCommitHashNotMatch + ); + + // Reveal with correct data should succeed + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_16.clone(), + version_key, + )); + + // 9. Test that attempting to reveal when there are no commits fails + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_16.clone(), + version_key, + ), + Error::::NoWeightsCommitFound + ); + + // 10. Commit twice and attempt to reveal out of sequence + let salt_a: Vec = vec![21; 8]; + let commit_hash_a: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_a.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_a + )); + + let salt_b: Vec = vec![22; 8]; + let commit_hash_b: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_b.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_b + )); + + // Advance to next epoch + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // Attempt to reveal the second commit first + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_b.clone(), + version_key, + ), + Error::::RevealOutOfOrder + ); + + // Reveal the first commit + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_a.clone(), + version_key, + )); + + // Now reveal the second commit + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt_b, + version_key, + )); + }); +} + fn commit_reveal_set_weights( hotkey: U256, netuid: u16, From a9f60eaa68b6b858e9d95f9553d27164c487e4df Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 7 Oct 2024 10:46:11 -0700 Subject: [PATCH 158/213] remove expired commits when committing --- pallets/subtensor/src/subnets/weights.rs | 25 ++- pallets/subtensor/tests/weights.rs | 205 +++++++++++++++++++++++ 2 files changed, 222 insertions(+), 8 deletions(-) diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index a4730f4ea..6be6e578d 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -39,22 +39,31 @@ impl Pallet { Error::::CommitRevealDisabled ); - // --- 3. Check if the current number of unrevealed commits is within the allowed limit. + // --- 3. Mutate the WeightCommits to retrieve existing commits for the user. WeightCommits::::try_mutate(netuid, &who, |maybe_commits| -> DispatchResult { - if let Some(ref commits) = maybe_commits { - ensure!(commits.len() < 10, Error::::TooManyUnrevealedCommits); - } - // --- 4. Take the existing commits or create a new VecDeque. let mut commits: VecDeque<(H256, u64)> = maybe_commits.take().unwrap_or_default(); - // --- 5. Append the new commit to the queue. + // --- 5. Remove any expired commits from the front of the queue. + while let Some((_, commit_block)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block) { + // Remove the expired commit + commits.pop_front(); + } else { + break; + } + } + + // --- 6. Check if the current number of unrevealed commits is within the allowed limit. + ensure!(commits.len() < 10, Error::::TooManyUnrevealedCommits); + + // --- 7. Append the new commit to the queue. commits.push_back((commit_hash, Self::get_current_block_as_u64())); - // --- 6. Store the updated queue back to storage. + // --- 8. Store the updated queue back to storage. *maybe_commits = Some(commits); - // --- 7. Return ok. + // --- 9. Return ok. Ok(()) }) } diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 2b3a5d305..786bc9b1d 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -13,6 +13,7 @@ use sp_runtime::{ traits::{BlakeTwo256, DispatchInfoOf, Hash, SignedExtension}, DispatchError, }; +use sp_std::collections::vec_deque::VecDeque; use substrate_fixed::types::I32F32; /*************************** @@ -2476,3 +2477,207 @@ fn commit_reveal_set_weights( Ok(()) } + +#[test] +fn test_expired_commits_handling_in_commit_and_reveal() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey: ::AccountId = U256::from(1); + let version_key: u64 = 0; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + // Register neurons + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // 1. Commit 5 times in epoch 0 + let mut commit_info = Vec::new(); + for i in 0..5 { + let salt: Vec = vec![i; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + // Advance to epoch 1 + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // 2. Commit another 5 times in epoch 1 + for i in 5..10 { + let salt: Vec = vec![i; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + // 3. Attempt to commit an 11th time, should fail with TooManyUnrevealedCommits + let salt_11: Vec = vec![11; 8]; + let commit_hash_11: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_11.clone(), + version_key, + )); + assert_err!( + SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash_11), + Error::::TooManyUnrevealedCommits + ); + + // 4. Advance to epoch 2 to expire the commits from epoch 0 + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); // Now at epoch 2 + + // 5. Attempt to commit again; should succeed after expired commits are removed + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_11 + )); + + // 6. Verify that the number of unrevealed, non-expired commits is now 6 + let commits: VecDeque<(H256, u64)> = + pallet_subtensor::WeightCommits::::get(netuid, hotkey) + .expect("Expected a commit"); + assert_eq!(commits.len(), 6); // 5 non-expired commits from epoch 1 + new commit + + // 7. Attempt to reveal an expired commit (from epoch 0) + // Previous commit removed expired commits + let (_, expired_salt) = &commit_info[0]; + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + expired_salt.clone(), + version_key, + ), + Error::::InvalidRevealCommitHashNotMatch + ); + + // 8. Reveal commits from epoch 1 at current_epoch = 2 + for (_, salt) in commit_info.iter().skip(5).take(5) { + let salt = salt.clone(); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + } + + // 9. Advance to epoch 3 to reveal the new commit + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + // 10. Reveal the new commit from epoch 2 + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_11.clone(), + version_key, + )); + + // 10. Verify that all commits have been revealed and the queue is empty + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(commits.is_none()); + + // 11. Attempt to reveal again, should fail with NoWeightsCommitFound + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_11.clone(), + version_key, + ), + Error::::NoWeightsCommitFound + ); + + // 12. Commit again to ensure we can continue after previous commits + let salt_12: Vec = vec![12; 8]; + let commit_hash_12: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt_12.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash_12 + )); + + // Advance to next epoch (epoch 4) and reveal + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + tempo, + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt_12, + version_key, + )); + }); +} From c22c3c553437f56b4844c2dbb3f913ccba00fa06 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 7 Oct 2024 14:36:56 -0700 Subject: [PATCH 159/213] add RevealPeriodEpochs and improve errors --- pallets/subtensor/src/lib.rs | 9 + pallets/subtensor/src/macros/errors.rs | 10 +- pallets/subtensor/src/subnets/weights.rs | 34 ++- pallets/subtensor/tests/weights.rs | 351 ++++++++++++++++++++++- 4 files changed, 373 insertions(+), 31 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 39ad90895..02b164c0a 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -569,6 +569,11 @@ pub mod pallet { 0 } #[pallet::type_value] + /// Default minimum stake for weights. + pub fn DefaultRevealPeriodEpochs() -> u64 { + 1 + } + #[pallet::type_value] /// Value definition for vector of u16. pub fn EmptyU16Vec() -> Vec { vec![] @@ -1256,6 +1261,10 @@ pub mod pallet { VecDeque<(H256, u64)>, OptionQuery, >; + #[pallet::storage] + /// --- Map (netuid) --> Number of epochs allowed for commit reveal periods + pub type RevealPeriodEpochs = + StorageMap<_, Twox64Concat, u16, u64, ValueQuery, DefaultRevealPeriodEpochs>; /// ================== /// ==== Genesis ===== diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index 7ef1ec999..5f3712355 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -118,8 +118,6 @@ mod errors { WeightsCommitNotAllowed, /// No commit found for the provided hotkey+netuid combination when attempting to reveal the weights. NoWeightsCommitFound, - /// Not the correct block/range to reveal weights. - InvalidRevealCommitTempo, /// Committed hash does not equal the hashed reveal data. InvalidRevealCommitHashNotMatch, /// Attempting to call set_weights when commit/reveal is enabled @@ -186,9 +184,11 @@ mod errors { InvalidIdentity, /// Maximum commit limit reached TooManyUnrevealedCommits, - /// Reveal is out of order + /// Reveal is out of order. RevealOutOfOrder, - /// Attempted to reveal weights that are expired - AttemptedToRevealExpiredWeightCommit, + /// Attempted to reveal weights that are expired. + ExpiredWeightCommit, + /// Attempted to reveal weights too early. + RevealTooEarly, } } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 6be6e578d..2cd97d5f7 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -84,7 +84,7 @@ impl Pallet { /// - The values of the weights being revealed. /// /// * `salt` (`Vec`): - /// - The values of the weights being revealed. + /// - The salt used to generate the commit hash. /// /// * `version_key` (`u64`): /// - The network version key. @@ -96,12 +96,12 @@ impl Pallet { /// * `NoWeightsCommitFound`: /// - Attempting to reveal weights without an existing commit. /// - /// * `InvalidRevealCommitTempo`: - /// - Attempting to reveal weights outside the valid reveal period. - /// - /// * `AttemptedToRevealExpiredWeightCommit`: + /// * `ExpiredWeightCommit`: /// - Attempting to reveal a weight commit that has expired. /// + /// * `RevealTooEarly`: + /// - Attempting to reveal weights outside the valid reveal period. + /// /// * `RevealOutOfOrder`: /// - Attempting to reveal a commit out of the expected order. /// @@ -159,7 +159,7 @@ impl Pallet { // No non-expired commits // Check if provided_hash matches any expired commits if expired_hashes.contains(&provided_hash) { - return Err(Error::::AttemptedToRevealExpiredWeightCommit.into()); + return Err(Error::::ExpiredWeightCommit.into()); } else { return Err(Error::::NoWeightsCommitFound.into()); } @@ -175,7 +175,7 @@ impl Pallet { // --- 9. Ensure the commit is ready to be revealed in the current block range. ensure!( Self::is_reveal_block_range(netuid, *commit_block), - Error::::InvalidRevealCommitTempo + Error::::RevealTooEarly ); // --- 10. Check if the provided hash matches the first commit's hash. @@ -188,7 +188,7 @@ impl Pallet { { return Err(Error::::RevealOutOfOrder.into()); } else if expired_hashes.contains(&provided_hash) { - return Err(Error::::AttemptedToRevealExpiredWeightCommit.into()); + return Err(Error::::ExpiredWeightCommit.into()); } else { return Err(Error::::InvalidRevealCommitHashNotMatch.into()); } @@ -540,9 +540,10 @@ impl Pallet { let current_block: u64 = Self::get_current_block_as_u64(); let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); + let reveal_period: u64 = RevealPeriodEpochs::::get(netuid); - // Reveal is allowed only in the epoch immediately after the commit's epoch - current_epoch == commit_epoch.saturating_add(1) + // Reveal is allowed only in the exact epoch `commit_epoch + reveal_period` + current_epoch == commit_epoch.saturating_add(reveal_period) } pub fn get_epoch_index(netuid: u16, block_number: u64) -> u64 { @@ -555,10 +556,15 @@ impl Pallet { } pub fn is_commit_expired(netuid: u16, commit_block: u64) -> bool { - let current_block = Self::get_current_block_as_u64(); - let current_epoch = Self::get_epoch_index(netuid, current_block); - let commit_epoch = Self::get_epoch_index(netuid, commit_block); + let current_block: u64 = Self::get_current_block_as_u64(); + let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); + let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); + let reveal_period: u64 = RevealPeriodEpochs::::get(netuid); + + current_epoch > commit_epoch.saturating_add(reveal_period) + } - current_epoch > commit_epoch.saturating_add(1) + pub fn set_reveal_period(netuid: u16, reveal_period: u64) { + RevealPeriodEpochs::::insert(netuid, reveal_period); } } diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 786bc9b1d..c13fd0007 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1528,12 +1528,6 @@ fn test_commit_reveal_tempo_interval() { commit_hash )); - // Attempt to commit again in the same epoch, should fail - // assert_err!( - // SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash), - // Error::::WeightsCommitNotAllowed - // ); - // Attempt to reveal in the same epoch, should fail assert_err!( SubtensorModule::reveal_weights( @@ -1544,7 +1538,7 @@ fn test_commit_reveal_tempo_interval() { salt.clone(), version_key, ), - Error::::InvalidRevealCommitTempo + Error::::RevealTooEarly ); // Calculate blocks to next epoch and advance @@ -1607,7 +1601,7 @@ fn test_commit_reveal_tempo_interval() { salt.clone(), version_key, ), - Error::::AttemptedToRevealExpiredWeightCommit + Error::::ExpiredWeightCommit ); assert_ok!(SubtensorModule::commit_weights( @@ -1627,7 +1621,7 @@ fn test_commit_reveal_tempo_interval() { salt.clone(), version_key, ), - Error::::InvalidRevealCommitTempo + Error::::RevealTooEarly ); let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( @@ -2178,7 +2172,7 @@ fn test_commit_reveal_multiple_commits() { salt_12.clone(), version_key, ), - Error::::AttemptedToRevealExpiredWeightCommit + Error::::ExpiredWeightCommit ); // Commit again and reveal after advancing to next epoch @@ -2250,7 +2244,7 @@ fn test_commit_reveal_multiple_commits() { salt_14.clone(), version_key, ), - Error::::AttemptedToRevealExpiredWeightCommit + Error::::ExpiredWeightCommit ); // 7. Attempt to reveal a commit that is not ready yet (before the reveal period) @@ -2280,7 +2274,7 @@ fn test_commit_reveal_multiple_commits() { salt_15.clone(), version_key, ), - Error::::InvalidRevealCommitTempo + Error::::RevealTooEarly ); // Advance to the next epoch @@ -2681,3 +2675,336 @@ fn test_expired_commits_handling_in_commit_and_reveal() { )); }); } + +#[test] +fn test_reveal_at_exact_epoch() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey: ::AccountId = U256::from(1); + let version_key: u64 = 0; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + let reveal_periods: Vec = vec![0, 1, 2, 7, 40, 86, 100]; + + for &reveal_period in &reveal_periods { + SubtensorModule::set_reveal_period(netuid, reveal_period); + + let salt: Vec = vec![42; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + // Retrieve commit information + let commit_block = SubtensorModule::get_current_block_as_u64(); + let commit_epoch = SubtensorModule::get_epoch_index(netuid, commit_block); + let reveal_epoch = commit_epoch.saturating_add(reveal_period); + + // Attempt to reveal before the allowed epoch + if reveal_period > 0 { + // Advance to epoch before the reveal epoch + if reveal_period >= 1 { + step_epochs((reveal_period - 1) as u16, netuid); + } + + // Attempt to reveal too early + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ), + Error::::RevealTooEarly + ); + } + + // Advance to the exact reveal epoch + let current_epoch = SubtensorModule::get_epoch_index( + netuid, + SubtensorModule::get_current_block_as_u64(), + ); + if current_epoch < reveal_epoch { + step_epochs((reveal_epoch - current_epoch) as u16, netuid); + } + + // Reveal at the exact allowed epoch + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ), + Error::::NoWeightsCommitFound + ); + + let new_salt: Vec = vec![43; 8]; + let new_commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + new_commit_hash + )); + + // Advance past the reveal epoch to ensure commit expiration + step_epochs((reveal_period + 1) as u16, netuid); + + // Attempt to reveal after the allowed epoch + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key, + ), + Error::::ExpiredWeightCommit + ); + + pallet_subtensor::WeightCommits::::remove(netuid, hotkey); + } + }); +} + +#[test] +fn test_tempo_and_reveal_period_change_during_commit_reveal_process() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let salt: Vec = vec![42; 8]; + let version_key: u64 = 0; + let hotkey: ::AccountId = U256::from(1); + + // Compute initial commit hash + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + + System::set_block_number(0); + + let initial_tempo: u16 = 100; + let initial_reveal_period: u64 = 1; + add_network(netuid, initial_tempo, 0); + SubtensorModule::set_reveal_period(netuid, initial_reveal_period); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // Step 1: Commit weights + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + log::info!( + "Commit successful at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + // Retrieve commit block and epoch + let commit_block = SubtensorModule::get_current_block_as_u64(); + let commit_epoch = SubtensorModule::get_epoch_index(netuid, commit_block); + + // Step 2: Change tempo and reveal period after commit + let new_tempo: u16 = 50; + let new_reveal_period: u64 = 2; + SubtensorModule::set_tempo(netuid, new_tempo); + SubtensorModule::set_reveal_period(netuid, new_reveal_period); + log::info!( + "Changed tempo to {} and reveal period to {}", + new_tempo, + new_reveal_period + ); + + // Step 3: Advance blocks to reach the reveal epoch according to new tempo and reveal period + let current_block = SubtensorModule::get_current_block_as_u64(); + let current_epoch = SubtensorModule::get_epoch_index(netuid, current_block); + let reveal_epoch = commit_epoch.saturating_add(new_reveal_period); + + // Advance to one epoch before reveal epoch + if current_epoch < reveal_epoch { + let epochs_to_advance = reveal_epoch - current_epoch - 1; + step_epochs(epochs_to_advance as u16, netuid); + } + + // Attempt to reveal too early + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key + ), + Error::::RevealTooEarly + ); + log::info!( + "Attempted to reveal too early at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + // Advance to reveal epoch + step_epochs(1, netuid); + + // Attempt to reveal at the correct epoch + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key + )); + log::info!( + "Revealed weights at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + // Step 4: Change tempo and reveal period again after reveal + let new_tempo_after_reveal: u16 = 200; + let new_reveal_period_after_reveal: u64 = 1; + SubtensorModule::set_tempo(netuid, new_tempo_after_reveal); + SubtensorModule::set_reveal_period(netuid, new_reveal_period_after_reveal); + log::info!( + "Changed tempo to {} and reveal period to {} after reveal", + new_tempo_after_reveal, + new_reveal_period_after_reveal + ); + + // Step 5: Commit again + let new_salt: Vec = vec![43; 8]; + let new_commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + new_commit_hash + )); + log::info!( + "Commit successful at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + // Retrieve new commit block and epoch + let new_commit_block = SubtensorModule::get_current_block_as_u64(); + let new_commit_epoch = SubtensorModule::get_epoch_index(netuid, new_commit_block); + let new_reveal_epoch = new_commit_epoch.saturating_add(new_reveal_period_after_reveal); + + // Advance to reveal epoch + let current_block = SubtensorModule::get_current_block_as_u64(); + let current_epoch = SubtensorModule::get_epoch_index(netuid, current_block); + if current_epoch < new_reveal_epoch { + let epochs_to_advance = new_reveal_epoch - current_epoch; + step_epochs(epochs_to_advance as u16, netuid); + } + + // Attempt to reveal at the correct epoch + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key + )); + log::info!( + "Revealed weights at block {}", + SubtensorModule::get_current_block_as_u64() + ); + + // Step 6: Attempt to reveal after the allowed epoch (commit expires) + // Advance past the reveal epoch + let expiration_epochs = 1; + step_epochs(expiration_epochs as u16, netuid); + + // Attempt to reveal again (should fail due to expired commit) + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key + ), + Error::::NoWeightsCommitFound + ); + log::info!( + "Attempted to reveal after expiration at block {}", + SubtensorModule::get_current_block_as_u64() + ); + }); +} + +pub fn step_epochs(count: u16, netuid: u16) { + for _ in 0..count { + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch.saturating_add(1) as u16); + } +} From b0b817a108f771a5b8bc55bbf7de567086f513ed Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 7 Oct 2024 15:05:03 -0700 Subject: [PATCH 160/213] use fn step_epochs in tests --- pallets/subtensor/tests/weights.rs | 210 +++++------------------------ 1 file changed, 33 insertions(+), 177 deletions(-) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index c13fd0007..168f2bb64 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1405,13 +1405,7 @@ fn test_reveal_weights_when_commit_reveal_disabled() { commit_hash )); - // Advance to the next epoch - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Disable commit-reveal before reveal SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); @@ -1470,13 +1464,7 @@ fn test_commit_reveal_weights_ok() { commit_hash )); - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Reveal in the next epoch assert_ok!(SubtensorModule::reveal_weights( @@ -1541,13 +1529,7 @@ fn test_commit_reveal_tempo_interval() { Error::::RevealTooEarly ); - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1579,18 +1561,7 @@ fn test_commit_reveal_tempo_interval() { )); // step two epochs - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(2, netuid); assert_err!( SubtensorModule::reveal_weights( @@ -1624,12 +1595,7 @@ fn test_commit_reveal_tempo_interval() { Error::::RevealTooEarly ); - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1679,13 +1645,7 @@ fn test_commit_reveal_hash() { commit_hash )); - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Attempt to reveal with incorrect data, should fail assert_err!( @@ -1771,13 +1731,7 @@ fn test_commit_reveal_disabled_or_enabled() { commit_hash )); - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Reveal should succeed assert_ok!(SubtensorModule::reveal_weights( @@ -1829,13 +1783,7 @@ fn test_toggle_commit_reveal_weights_and_set_weights() { commit_hash )); - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Reveal in the next epoch assert_ok!(SubtensorModule::reveal_weights( @@ -1915,21 +1863,11 @@ fn test_tempo_change_during_commit_reveal_process() { log::info!("Changing tempo to {}", tempo_before_next_reveal); SubtensorModule::set_tempo(netuid, tempo_before_next_reveal); - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch as u16); + step_epochs(1, netuid); log::info!( "Advanced to block {}", - SubtensorModule::get_current_block_as_u64() + 1 - ); - assert!(SubtensorModule::should_run_epoch( - netuid, SubtensorModule::get_current_block_as_u64() - )); - step_block(1); + ); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -1958,21 +1896,11 @@ fn test_tempo_change_during_commit_reveal_process() { log::info!("Changing tempo to {}", tempo); SubtensorModule::set_tempo(netuid, tempo); - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch as u16); + step_epochs(1, netuid); log::info!( "Advanced to block {}", - SubtensorModule::get_current_block_as_u64() + 1 - ); - assert!(SubtensorModule::should_run_epoch( - netuid, SubtensorModule::get_current_block_as_u64() - )); - step_block(1); + ); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -2005,21 +1933,11 @@ fn test_tempo_change_during_commit_reveal_process() { log::info!("Changing tempo to {}", tempo); SubtensorModule::set_tempo(netuid, tempo); - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch as u16); + step_epochs(1, netuid); log::info!( "Advanced to block {}", - SubtensorModule::get_current_block_as_u64() + 1 - ); - assert!(SubtensorModule::should_run_epoch( - netuid, SubtensorModule::get_current_block_as_u64() - )); - step_block(1); + ); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -2095,12 +2013,7 @@ fn test_commit_reveal_multiple_commits() { // 3. Attempt to reveal out of order (reveal the second commit first), should fail // Advance to the next epoch for reveals to be valid - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Try to reveal the second commit first let (_commit_hash_2, salt_2) = &commit_hashes[1]; @@ -2153,14 +2066,7 @@ fn test_commit_reveal_multiple_commits() { )); // Advance two epochs so the commit expires - for _ in 0..2 { - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); - } + step_epochs(2, netuid); // Attempt to reveal the expired commit, should fail assert_err!( @@ -2191,12 +2097,7 @@ fn test_commit_reveal_multiple_commits() { commit_hash_13 )); - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -2225,14 +2126,7 @@ fn test_commit_reveal_multiple_commits() { )); // Advance beyond the valid reveal period (more than one epoch) - for _ in 0..2 { - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); - } + step_epochs(2, netuid); // Attempt to reveal, should fail assert_err!( @@ -2277,13 +2171,7 @@ fn test_commit_reveal_multiple_commits() { Error::::RevealTooEarly ); - // Advance to the next epoch - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Now reveal should succeed assert_ok!(SubtensorModule::reveal_weights( @@ -2312,13 +2200,7 @@ fn test_commit_reveal_multiple_commits() { commit_hash_16 )); - // Advance to the next epoch - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Attempt to reveal with incorrect salt let wrong_salt: Vec = vec![99; 8]; @@ -2388,13 +2270,7 @@ fn test_commit_reveal_multiple_commits() { commit_hash_b )); - // Advance to next epoch - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // Attempt to reveal the second commit first assert_err!( @@ -2452,13 +2328,7 @@ fn commit_reveal_set_weights( SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, commit_hash)?; - // Calculate blocks to next epoch and advance - let blocks_to_next_epoch: u64 = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -2515,12 +2385,7 @@ fn test_expired_commits_handling_in_commit_and_reveal() { } // Advance to epoch 1 - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // 2. Commit another 5 times in epoch 1 for i in 5..10 { @@ -2557,12 +2422,7 @@ fn test_expired_commits_handling_in_commit_and_reveal() { ); // 4. Advance to epoch 2 to expire the commits from epoch 0 - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); // Now at epoch 2 + step_epochs(1, netuid); // Now at epoch 2 // 5. Attempt to commit again; should succeed after expired commits are removed assert_ok!(SubtensorModule::commit_weights( @@ -2607,12 +2467,7 @@ fn test_expired_commits_handling_in_commit_and_reveal() { } // 9. Advance to epoch 3 to reveal the new commit - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); // 10. Reveal the new commit from epoch 2 assert_ok!(SubtensorModule::reveal_weights( @@ -2658,12 +2513,7 @@ fn test_expired_commits_handling_in_commit_and_reveal() { )); // Advance to next epoch (epoch 4) and reveal - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - tempo, - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_epochs(1, netuid); assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -3005,6 +2855,12 @@ pub fn step_epochs(count: u16, netuid: u16) { SubtensorModule::get_tempo(netuid), SubtensorModule::get_current_block_as_u64(), ); - step_block(blocks_to_next_epoch.saturating_add(1) as u16); + step_block(blocks_to_next_epoch as u16); + + assert!(SubtensorModule::should_run_epoch( + netuid, + SubtensorModule::get_current_block_as_u64() + )); + step_block(1); } } From e3d9b012f60776f2122650d1a046ed5b552e3bd9 Mon Sep 17 00:00:00 2001 From: Keith Date: Tue, 8 Oct 2024 21:37:33 +0800 Subject: [PATCH 161/213] Modify publish script to accept all cargo publish parameters --- scripts/publish.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/publish.sh b/scripts/publish.sh index 8b2671787..dd5b110f2 100644 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -1,28 +1,28 @@ #!/bin/bash set -ex cd support/macros -cargo publish --token $1 +cargo publish $1 cd ../.. cd pallets/commitments -cargo publish --token $1 +cargo publish $1 cd .. cd collective -cargo publish --token $1 +cargo publish $1 cd .. cd registry -cargo publish --token $1 +cargo publish $1 cd .. cd subtensor -cargo publish --token $1 +cargo publish $1 cd runtime-api -cargo publish --token $1 +cargo publish $1 cd ../.. cd admin-utils -cargo publish --token $1 +cargo publish $1 cd ../.. cd runtime -cargo publish --token $1 +cargo publish $1 cd .. cd node -cargo publish --token $1 +cargo publish $1 echo "published successfully." From 625f2b390c3a05abc7dfa485ee5de22ec348c93d Mon Sep 17 00:00:00 2001 From: johnreedv Date: Tue, 8 Oct 2024 09:28:41 -0700 Subject: [PATCH 162/213] add test_commit_reveal_order_enforcement --- pallets/subtensor/tests/weights.rs | 116 +++++++++++++++++++++++++++++ 1 file changed, 116 insertions(+) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 168f2bb64..764cfd040 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -2864,3 +2864,119 @@ pub fn step_epochs(count: u16, netuid: u16) { step_block(1); } } + +#[test] +fn test_commit_reveal_order_enforcement() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey: ::AccountId = U256::from(1); + let version_key: u64 = 0; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // Commit three times: A, B, C + let mut commit_info = Vec::new(); + for i in 0..3 { + let salt: Vec = vec![i; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + step_epochs(1, netuid); + + // Attempt to reveal B first (index 1), should fail + let (_commit_hash_b, salt_b) = &commit_info[1]; + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_b.clone(), + version_key, + ), + Error::::RevealOutOfOrder + ); + + // Reveal A (index 0) + let (_commit_hash_a, salt_a) = &commit_info[0]; + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_a.clone(), + version_key, + )); + + // Attempt to reveal C (index 2) before B, should fail + let (_commit_hash_c, salt_c) = &commit_info[2]; + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_c.clone(), + version_key, + ), + Error::::RevealOutOfOrder + ); + + // Reveal B + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_b.clone(), + version_key, + )); + + // Reveal C + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_c.clone(), + version_key, + )); + + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt_a.clone(), + version_key, + ), + Error::::NoWeightsCommitFound + ); + }); +} From 9bd5fc9e022e3695fdc74fc1050d080350bc1208 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Tue, 8 Oct 2024 10:36:51 -0700 Subject: [PATCH 163/213] add hyperparam set_reveal_periods --- pallets/admin-utils/src/lib.rs | 39 ++++++++++++++++++++++++ pallets/admin-utils/src/weights.rs | 22 ------------- pallets/admin-utils/tests/tests.rs | 20 ++++++++++++ pallets/subtensor/src/subnets/weights.rs | 3 ++ 4 files changed, 62 insertions(+), 22 deletions(-) diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index 501122a8e..b2f59668e 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -1170,6 +1170,45 @@ pub mod pallet { Ok(()) } + + /// Sets the commit-reveal weights periods for a specific subnet. + /// + /// This extrinsic allows the subnet owner or root account to set the duration (in epochs) during which committed weights must be revealed. + /// The commit-reveal mechanism ensures that users commit weights in advance and reveal them only within a specified period. + /// + /// # Arguments + /// * `origin` - The origin of the call, which must be the subnet owner or the root account. + /// * `netuid` - The unique identifier of the subnet for which the periods are being set. + /// * `periods` - The number of epochs that define the commit-reveal period. + /// + /// # Errors + /// * `BadOrigin` - If the caller is neither the subnet owner nor the root account. + /// * `SubnetDoesNotExist` - If the specified subnet does not exist. + /// + /// # Weight + /// Weight is handled by the `#[pallet::weight]` attribute. + #[pallet::call_index(56)] + #[pallet::weight((0, DispatchClass::Operational, Pays::No))] + pub fn sudo_set_commit_reveal_weights_periods( + origin: OriginFor, + netuid: u16, + periods: u64, + ) -> DispatchResult { + pallet_subtensor::Pallet::::ensure_subnet_owner_or_root(origin, netuid)?; + + ensure!( + pallet_subtensor::Pallet::::if_subnet_exist(netuid), + Error::::SubnetDoesNotExist + ); + + pallet_subtensor::Pallet::::set_reveal_period(netuid, periods); + log::debug!( + "SetWeightCommitPeriods( netuid: {:?}, periods: {:?} ) ", + netuid, + periods + ); + Ok(()) + } } } diff --git a/pallets/admin-utils/src/weights.rs b/pallets/admin-utils/src/weights.rs index 84fe058f8..ba2247dfd 100644 --- a/pallets/admin-utils/src/weights.rs +++ b/pallets/admin-utils/src/weights.rs @@ -60,7 +60,6 @@ pub trait WeightInfo { fn sudo_set_min_burn() -> Weight; fn sudo_set_network_registration_allowed() -> Weight; fn sudo_set_tempo() -> Weight; - fn sudo_set_commit_reveal_weights_interval() -> Weight; fn sudo_set_commit_reveal_weights_enabled() -> Weight; } @@ -413,15 +412,6 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } - fn sudo_set_commit_reveal_weights_interval() -> Weight { - // Proof Size summary in bytes: - // Measured: `1111` - // Estimated: `4697` - // Minimum execution time: 46_450_000 picoseconds. - Weight::from_parts(47_279_000, 4697) - .saturating_add(T::DbWeight::get().reads(1_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } fn sudo_set_commit_reveal_weights_enabled() -> Weight { // Proof Size summary in bytes: // Measured: `1111` @@ -781,18 +771,6 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } - fn sudo_set_commit_reveal_weights_interval() -> Weight { - // -- Extrinsic Time -- - // Model: - // Time ~= 20.42 - // µs - // Reads = 1 - // Writes = 1 - // Recorded proof Size = 456 - Weight::from_parts(20_420_000, 456) - .saturating_add(RocksDbWeight::get().reads(1_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } fn sudo_set_commit_reveal_weights_enabled() -> Weight { // -- Extrinsic Time -- // Model: diff --git a/pallets/admin-utils/tests/tests.rs b/pallets/admin-utils/tests/tests.rs index 746dfa6f5..d2c36e29f 100644 --- a/pallets/admin-utils/tests/tests.rs +++ b/pallets/admin-utils/tests/tests.rs @@ -1412,3 +1412,23 @@ fn test_sudo_set_dissolve_network_schedule_duration() { System::assert_last_event(Event::DissolveNetworkScheduleDurationSet(new_duration).into()); }); } + +#[test] +fn sudo_set_commit_reveal_weights_periods() { + new_test_ext().execute_with(|| { + let netuid: u16 = 1; + add_network(netuid, 10); + + let to_be_set = 55; + let init_value = SubtensorModule::get_reveal_period(netuid); + + assert_ok!(AdminUtils::sudo_set_commit_reveal_weights_periods( + <::RuntimeOrigin>::root(), + netuid, + to_be_set + )); + + assert!(init_value != to_be_set); + assert_eq!(SubtensorModule::get_reveal_period(netuid), to_be_set); + }); +} diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 2cd97d5f7..c1ba1b1be 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -567,4 +567,7 @@ impl Pallet { pub fn set_reveal_period(netuid: u16, reveal_period: u64) { RevealPeriodEpochs::::insert(netuid, reveal_period); } + pub fn get_reveal_period(netuid: u16) -> u64 { + RevealPeriodEpochs::::get(netuid) + } } From 59ba20bd734fe80cae86049faaa17bd7ac16464b Mon Sep 17 00:00:00 2001 From: Julian Eager Date: Wed, 9 Oct 2024 09:57:24 +0800 Subject: [PATCH 164/213] add lint to forbid `as_*` calls --- build.rs | 1 + support/linting/src/forbid_as_primitive.rs | 78 ++++++++++++++++++++++ support/linting/src/lib.rs | 2 + 3 files changed, 81 insertions(+) create mode 100644 support/linting/src/forbid_as_primitive.rs diff --git a/build.rs b/build.rs index 85388fd6c..7261a28e1 100644 --- a/build.rs +++ b/build.rs @@ -59,6 +59,7 @@ fn main() { } }; + track_lint(ForbidAsPrimitiveConversion::lint(&parsed_file)); track_lint(RequireFreezeStruct::lint(&parsed_file)); track_lint(RequireExplicitPalletIndex::lint(&parsed_file)); }); diff --git a/support/linting/src/forbid_as_primitive.rs b/support/linting/src/forbid_as_primitive.rs new file mode 100644 index 000000000..fd1d2aa7a --- /dev/null +++ b/support/linting/src/forbid_as_primitive.rs @@ -0,0 +1,78 @@ +use super::*; +use syn::{visit::Visit, ExprMethodCall, File, Ident}; + +pub struct ForbidAsPrimitiveConversion; + +impl Lint for ForbidAsPrimitiveConversion { + fn lint(source: &File) -> Result { + let mut visitor = AsPrimitiveVisitor::default(); + + visitor.visit_file(source); + + if !visitor.errors.is_empty() { + return Err(visitor.errors); + } + + Ok(()) + } +} + +#[derive(Default)] +struct AsPrimitiveVisitor { + errors: Vec, +} + +impl<'ast> Visit<'ast> for AsPrimitiveVisitor { + fn visit_expr_method_call(&mut self, node: &'ast ExprMethodCall) { + if is_as_primitive(&node.method) { + self.errors.push(syn::Error::new( + node.method.span(), + "Using 'as_*()' methods is banned to avoid accidental panics. Use `try_into()` instead.", + )); + } + + syn::visit::visit_expr_method_call(self, node); + } +} + +fn is_as_primitive(ident: &Ident) -> bool { + match ident.to_string().as_str() { + "as_u32" | "as_u64" | "as_u128" | "as_usize" => true, + _ => false, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn lint(input: &str) -> Result { + let expr: ExprMethodCall = syn::parse_str(input).expect("should only use on a method call"); + let mut visitor = AsPrimitiveVisitor::default(); + visitor.visit_expr_method_call(&expr); + if !visitor.errors.is_empty() { + return Err(visitor.errors); + } + Ok(()) + } + + #[test] + fn test_as_primitives() { + let input = r#"x.as_u32()"#; + assert!(lint(input).is_err()); + let input = r#"x.as_u64()"#; + assert!(lint(input).is_err()); + let input = r#"x.as_u128()"#; + assert!(lint(input).is_err()); + let input = r#"x.as_usize()"#; + assert!(lint(input).is_err()); + } + + #[test] + fn test_non_as_primitives() { + let input = r#"x.as_ref()"#; + assert!(lint(input).is_ok()); + let input = r#"x.as_slice()"#; + assert!(lint(input).is_ok()); + } +} diff --git a/support/linting/src/lib.rs b/support/linting/src/lib.rs index e5416c1d5..7aaf471c7 100644 --- a/support/linting/src/lib.rs +++ b/support/linting/src/lib.rs @@ -1,8 +1,10 @@ pub mod lint; pub use lint::*; +mod forbid_as_primitive; mod pallet_index; mod require_freeze_struct; +pub use forbid_as_primitive::ForbidAsPrimitiveConversion; pub use pallet_index::RequireExplicitPalletIndex; pub use require_freeze_struct::RequireFreezeStruct; From 5ced8e202ce2656d019d2b7027381a2bf7161587 Mon Sep 17 00:00:00 2001 From: Julian Eager Date: Thu, 10 Oct 2024 03:18:50 +0800 Subject: [PATCH 165/213] clippy --- support/linting/src/forbid_as_primitive.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/support/linting/src/forbid_as_primitive.rs b/support/linting/src/forbid_as_primitive.rs index fd1d2aa7a..b6b305971 100644 --- a/support/linting/src/forbid_as_primitive.rs +++ b/support/linting/src/forbid_as_primitive.rs @@ -36,10 +36,7 @@ impl<'ast> Visit<'ast> for AsPrimitiveVisitor { } fn is_as_primitive(ident: &Ident) -> bool { - match ident.to_string().as_str() { - "as_u32" | "as_u64" | "as_u128" | "as_usize" => true, - _ => false, - } + matches!(ident.to_string().as_str(), "as_u32" | "as_u64" | "as_u128" | "as_usize") } #[cfg(test)] From 1e37e682f9cdcf9fc14450afa9af1f2e657b2d39 Mon Sep 17 00:00:00 2001 From: Julian Eager Date: Thu, 10 Oct 2024 12:10:27 +0800 Subject: [PATCH 166/213] fmt --- support/linting/src/forbid_as_primitive.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/support/linting/src/forbid_as_primitive.rs b/support/linting/src/forbid_as_primitive.rs index b6b305971..b60cf0a49 100644 --- a/support/linting/src/forbid_as_primitive.rs +++ b/support/linting/src/forbid_as_primitive.rs @@ -36,7 +36,10 @@ impl<'ast> Visit<'ast> for AsPrimitiveVisitor { } fn is_as_primitive(ident: &Ident) -> bool { - matches!(ident.to_string().as_str(), "as_u32" | "as_u64" | "as_u128" | "as_usize") + matches!( + ident.to_string().as_str(), + "as_u32" | "as_u64" | "as_u128" | "as_usize" + ) } #[cfg(test)] From 8d66f5f13f9e7eb07ca289b7bfe69fa0cc34bdc1 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 16 Oct 2024 15:57:12 -0700 Subject: [PATCH 167/213] tempo based & modify order enforcement --- pallets/subtensor/src/macros/errors.rs | 4 - pallets/subtensor/src/subnets/weights.rs | 69 +++-- pallets/subtensor/tests/mock.rs | 18 ++ pallets/subtensor/tests/weights.rs | 339 +++++++++++++++-------- 4 files changed, 281 insertions(+), 149 deletions(-) diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index 5f3712355..ac54cff1c 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -114,8 +114,6 @@ mod errors { DelegateTakeTooLow, /// Delegate take is too high. DelegateTakeTooHigh, - /// Not allowed to commit weights. - WeightsCommitNotAllowed, /// No commit found for the provided hotkey+netuid combination when attempting to reveal the weights. NoWeightsCommitFound, /// Committed hash does not equal the hashed reveal data. @@ -184,8 +182,6 @@ mod errors { InvalidIdentity, /// Maximum commit limit reached TooManyUnrevealedCommits, - /// Reveal is out of order. - RevealOutOfOrder, /// Attempted to reveal weights that are expired. ExpiredWeightCommit, /// Attempted to reveal weights too early. diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index c1ba1b1be..690fee200 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -165,45 +165,40 @@ impl Pallet { } } - // --- 7. Collect the hashes of the remaining (non-expired) commits. - let non_expired_hashes: Vec = commits.iter().map(|(hash, _)| *hash).collect(); - - // --- 8. Get the first commit from the VecDeque. - let (commit_hash, commit_block) = - commits.front().ok_or(Error::::NoWeightsCommitFound)?; - - // --- 9. Ensure the commit is ready to be revealed in the current block range. - ensure!( - Self::is_reveal_block_range(netuid, *commit_block), - Error::::RevealTooEarly - ); - - // --- 10. Check if the provided hash matches the first commit's hash. - if provided_hash != *commit_hash { - // Check if the provided hash matches any other non-expired commit in the queue - if non_expired_hashes - .iter() - .skip(1) - .any(|hash| *hash == provided_hash) - { - return Err(Error::::RevealOutOfOrder.into()); - } else if expired_hashes.contains(&provided_hash) { - return Err(Error::::ExpiredWeightCommit.into()); - } else { - return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + // --- 7. Search for the provided_hash in the non-expired commits. + if let Some(position) = commits.iter().position(|(hash, _)| *hash == provided_hash) { + // --- 8. Get the commit block for the commit being revealed. + let (_, commit_block) = commits + .get(position) + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 9. Ensure the commit is ready to be revealed in the current block range. + ensure!( + Self::is_reveal_block_range(netuid, *commit_block), + Error::::RevealTooEarly + ); + + // --- 10. Remove all commits up to and including the one being revealed. + for _ in 0..=position { + commits.pop_front(); } - } - // --- 11. Remove the first commit from the queue after passing all checks. - commits.pop_front(); + // --- 11. If the queue is now empty, remove the storage entry for the user. + if commits.is_empty() { + *maybe_commits = None; + } - // --- 12. If the queue is now empty, remove the storage entry for the user. - if commits.is_empty() { - *maybe_commits = None; + // --- 12. Proceed to set the revealed weights. + Self::do_set_weights(origin, netuid, uids, values, version_key) + } else { + // --- 13. The provided_hash does not match any non-expired commits. + // Check if provided_hash matches any expired commits + if expired_hashes.contains(&provided_hash) { + Err(Error::::ExpiredWeightCommit.into()) + } else { + Err(Error::::InvalidRevealCommitHashNotMatch.into()) + } } - - // --- 13. Proceed to set the revealed weights. - Self::do_set_weights(origin, netuid, uids, values, version_key) }) } @@ -540,7 +535,7 @@ impl Pallet { let current_block: u64 = Self::get_current_block_as_u64(); let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); - let reveal_period: u64 = RevealPeriodEpochs::::get(netuid); + let reveal_period: u64 = Self::get_reveal_period(netuid); // Reveal is allowed only in the exact epoch `commit_epoch + reveal_period` current_epoch == commit_epoch.saturating_add(reveal_period) @@ -559,7 +554,7 @@ impl Pallet { let current_block: u64 = Self::get_current_block_as_u64(); let current_epoch: u64 = Self::get_epoch_index(netuid, current_block); let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); - let reveal_period: u64 = RevealPeriodEpochs::::get(netuid); + let reveal_period: u64 = Self::get_reveal_period(netuid); current_epoch > commit_epoch.saturating_add(reveal_period) } diff --git a/pallets/subtensor/tests/mock.rs b/pallets/subtensor/tests/mock.rs index 6f3b44383..7a2967e8c 100644 --- a/pallets/subtensor/tests/mock.rs +++ b/pallets/subtensor/tests/mock.rs @@ -513,6 +513,24 @@ pub(crate) fn run_to_block(n: u64) { } } +#[allow(dead_code)] +pub(crate) fn step_epochs(count: u16, netuid: u16) { + for _ in 0..count { + let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( + netuid, + SubtensorModule::get_tempo(netuid), + SubtensorModule::get_current_block_as_u64(), + ); + step_block(blocks_to_next_epoch as u16); + + assert!(SubtensorModule::should_run_epoch( + netuid, + SubtensorModule::get_current_block_as_u64() + )); + step_block(1); + } +} + /// Increments current block by `1`, running all hooks associated with doing so, and asserts /// that the block number was in fact incremented. /// diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 764cfd040..3fe8bed82 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1977,7 +1977,7 @@ fn test_commit_reveal_multiple_commits() { SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); // 1. Commit 10 times successfully - let mut commit_hashes = vec![]; + let mut commit_info = Vec::new(); for i in 0..10 { let salt_i: Vec = vec![i; 8]; // Unique salt for each commit let commit_hash: H256 = BlakeTwo256::hash_of(&( @@ -1988,7 +1988,7 @@ fn test_commit_reveal_multiple_commits() { salt_i.clone(), version_key, )); - commit_hashes.push((commit_hash, salt_i)); + commit_info.push((commit_hash, salt_i)); assert_ok!(SubtensorModule::commit_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -2011,35 +2011,40 @@ fn test_commit_reveal_multiple_commits() { Error::::TooManyUnrevealedCommits ); - // 3. Attempt to reveal out of order (reveal the second commit first), should fail + // 3. Attempt to reveal out of order (reveal the second commit first) // Advance to the next epoch for reveals to be valid step_epochs(1, netuid); // Try to reveal the second commit first - let (_commit_hash_2, salt_2) = &commit_hashes[1]; - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_2.clone(), - version_key, - ), - Error::::RevealOutOfOrder - ); + let (_commit_hash_2, salt_2) = &commit_info[1]; + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_2.clone(), + version_key, + )); - // 4. Reveal commits in order, ensuring they succeed - for (_commit_hash_i, salt_i) in commit_hashes.iter() { - assert_ok!(SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_i.clone(), - version_key, - )); - } + // Check that commits before the revealed one are removed + let remaining_commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey) + .expect("expected 8 remaining commits"); + assert_eq!(remaining_commits.len(), 8); // 10 commits - 2 removed (index 0 and 1) + + // 4. Reveal the last commit next + let (_commit_hash_10, salt_10) = &commit_info[9]; + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt_10.clone(), + version_key, + )); + + // Remaining commits should have removed up to index 9 + let remaining_commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(remaining_commits.is_none()); // All commits removed // After revealing all commits, attempt to commit again should now succeed assert_ok!(SubtensorModule::commit_weights( @@ -2239,7 +2244,7 @@ fn test_commit_reveal_multiple_commits() { Error::::NoWeightsCommitFound ); - // 10. Commit twice and attempt to reveal out of sequence + // 10. Commit twice and attempt to reveal out of sequence (which is now allowed) let salt_a: Vec = vec![21; 8]; let commit_hash_a: H256 = BlakeTwo256::hash_of(&( hotkey, @@ -2272,38 +2277,32 @@ fn test_commit_reveal_multiple_commits() { step_epochs(1, netuid); - // Attempt to reveal the second commit first - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_b.clone(), - version_key, - ), - Error::::RevealOutOfOrder - ); - - // Reveal the first commit + // Reveal the second commit first, should now succeed assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, uids.clone(), weight_values.clone(), - salt_a.clone(), + salt_b.clone(), version_key, )); - // Now reveal the second commit - assert_ok!(SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids, - weight_values, - salt_b, - version_key, - )); + // Check that the first commit has been removed + let remaining_commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(remaining_commits.is_none()); + + // Attempting to reveal the first commit should fail as it was removed + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids, + weight_values, + salt_a, + version_key, + ), + Error::::NoWeightsCommitFound + ); }); } @@ -2848,23 +2847,6 @@ fn test_tempo_and_reveal_period_change_during_commit_reveal_process() { }); } -pub fn step_epochs(count: u16, netuid: u16) { - for _ in 0..count { - let blocks_to_next_epoch = SubtensorModule::blocks_until_next_epoch( - netuid, - SubtensorModule::get_tempo(netuid), - SubtensorModule::get_current_block_as_u64(), - ); - step_block(blocks_to_next_epoch as u16); - - assert!(SubtensorModule::should_run_epoch( - netuid, - SubtensorModule::get_current_block_as_u64() - )); - step_block(1); - } -} - #[test] fn test_commit_reveal_order_enforcement() { new_test_ext(1).execute_with(|| { @@ -2908,46 +2890,8 @@ fn test_commit_reveal_order_enforcement() { step_epochs(1, netuid); - // Attempt to reveal B first (index 1), should fail + // Attempt to reveal B first (index 1), should now succeed let (_commit_hash_b, salt_b) = &commit_info[1]; - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_b.clone(), - version_key, - ), - Error::::RevealOutOfOrder - ); - - // Reveal A (index 0) - let (_commit_hash_a, salt_a) = &commit_info[0]; - assert_ok!(SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_a.clone(), - version_key, - )); - - // Attempt to reveal C (index 2) before B, should fail - let (_commit_hash_c, salt_c) = &commit_info[2]; - assert_err!( - SubtensorModule::reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - uids.clone(), - weight_values.clone(), - salt_c.clone(), - version_key, - ), - Error::::RevealOutOfOrder - ); - - // Reveal B assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -2957,7 +2901,13 @@ fn test_commit_reveal_order_enforcement() { version_key, )); - // Reveal C + // Check that commits A and B are removed + let remaining_commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey) + .expect("expected 1 remaining commit"); + assert_eq!(remaining_commits.len(), 1); // Only commit C should remain + + // Attempt to reveal C (index 2), should succeed + let (_commit_hash_c, salt_c) = &commit_info[2]; assert_ok!(SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -2967,6 +2917,8 @@ fn test_commit_reveal_order_enforcement() { version_key, )); + // Attempting to reveal A (index 0) should fail as it's been removed + let (_commit_hash_a, salt_a) = &commit_info[0]; assert_err!( SubtensorModule::reveal_weights( RuntimeOrigin::signed(hotkey), @@ -2980,3 +2932,174 @@ fn test_commit_reveal_order_enforcement() { ); }); } + +#[test] +fn test_reveal_at_exact_block() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey: ::AccountId = U256::from(1); + let version_key: u64 = 0; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let tempo: u16 = 360; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + let reveal_periods: Vec = vec![ + 0, + 1, + 2, + 5, + 19, + 21, + 30, + 77, + 104, + 833, + 1999, + 36398, + u32::MAX as u64, + ]; + + for &reveal_period in &reveal_periods { + SubtensorModule::set_reveal_period(netuid, reveal_period); + + // Step 1: Commit weights + let salt: Vec = vec![42 + (reveal_period % 100) as u16; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + let commit_block = SubtensorModule::get_current_block_as_u64(); + let commit_epoch = SubtensorModule::get_epoch_index(netuid, commit_block); + let reveal_epoch = commit_epoch.saturating_add(reveal_period); + + // Calculate the block number where the reveal epoch starts + let tempo_plus_one = (tempo as u64).saturating_add(1); + let netuid_plus_one = (netuid as u64).saturating_add(1); + let reveal_epoch_start_block = reveal_epoch + .saturating_mul(tempo_plus_one) + .saturating_sub(netuid_plus_one); + + // Attempt to reveal before the reveal epoch starts + let current_block = SubtensorModule::get_current_block_as_u64(); + if current_block < reveal_epoch_start_block { + // Advance to one block before the reveal epoch starts + let blocks_to_advance = reveal_epoch_start_block.saturating_sub(current_block); + if blocks_to_advance > 1 { + // Advance to one block before the reveal epoch + let new_block_number = current_block + blocks_to_advance - 1; + System::set_block_number(new_block_number); + } + + // Attempt to reveal too early + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key + ), + Error::::RevealTooEarly + ); + + // Advance one more block to reach the exact reveal epoch start block + System::set_block_number(reveal_epoch_start_block); + } else { + // If we're already at or past the reveal epoch start block + System::set_block_number(reveal_epoch_start_block); + } + + // Reveal at the exact allowed block + assert_ok!(SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key + )); + + // Attempt to reveal again; should fail with NoWeightsCommitFound + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key + ), + Error::::NoWeightsCommitFound + ); + + // Commit again with new salt + let new_salt: Vec = vec![43 + (reveal_period % 100) as u16; 8]; + let new_commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key, + )); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + new_commit_hash + )); + + // Advance blocks to after the commit expires + let commit_block = SubtensorModule::get_current_block_as_u64(); + let commit_epoch = SubtensorModule::get_epoch_index(netuid, commit_block); + let reveal_epoch = commit_epoch.saturating_add(reveal_period); + let expiration_epoch = reveal_epoch.saturating_add(1); + let expiration_epoch_start_block = expiration_epoch + .saturating_mul(tempo_plus_one) + .saturating_sub(netuid_plus_one); + + let current_block = SubtensorModule::get_current_block_as_u64(); + if current_block < expiration_epoch_start_block { + // Advance to the block where the commit expires + System::set_block_number(expiration_epoch_start_block); + } + + // Attempt to reveal after the commit has expired + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key + ), + Error::::ExpiredWeightCommit + ); + + // Clean up for next iteration + pallet_subtensor::WeightCommits::::remove(netuid, hotkey); + } + }); +} From 240e17ae415dd05e402875686a831e3e90ea527b Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 17 Oct 2024 08:16:02 -0700 Subject: [PATCH 168/213] add commit-reveal-periods to SubnetHyperparams --- pallets/subtensor/src/rpc_info/subnet_info.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pallets/subtensor/src/rpc_info/subnet_info.rs b/pallets/subtensor/src/rpc_info/subnet_info.rs index 312a20723..8c79db03a 100644 --- a/pallets/subtensor/src/rpc_info/subnet_info.rs +++ b/pallets/subtensor/src/rpc_info/subnet_info.rs @@ -51,7 +51,7 @@ pub struct SubnetInfov2 { identity: Option, } -#[freeze_struct("e8abe48842dcc8c4")] +#[freeze_struct("4ceb81dfe8a8f96d")] #[derive(Decode, Encode, PartialEq, Eq, Clone, Debug)] pub struct SubnetHyperparams { rho: Compact, @@ -76,6 +76,7 @@ pub struct SubnetHyperparams { max_validators: Compact, adjustment_alpha: Compact, difficulty: Compact, + commit_reveal_periods: Compact, commit_reveal_weights_enabled: bool, alpha_high: Compact, alpha_low: Compact, @@ -251,6 +252,7 @@ impl Pallet { let max_validators = Self::get_max_allowed_validators(netuid); let adjustment_alpha = Self::get_adjustment_alpha(netuid); let difficulty = Self::get_difficulty_as_u64(netuid); + let commit_reveal_periods = Self::get_reveal_period(netuid); let commit_reveal_weights_enabled = Self::get_commit_reveal_weights_enabled(netuid); let liquid_alpha_enabled = Self::get_liquid_alpha_enabled(netuid); let (alpha_low, alpha_high): (u16, u16) = Self::get_alpha_values(netuid); @@ -278,6 +280,7 @@ impl Pallet { max_validators: max_validators.into(), adjustment_alpha: adjustment_alpha.into(), difficulty: difficulty.into(), + commit_reveal_periods: commit_reveal_periods.into(), commit_reveal_weights_enabled, alpha_high: alpha_high.into(), alpha_low: alpha_low.into(), From 1e273a78551837017716d0a438326158670725a9 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 17 Oct 2024 09:16:43 -0700 Subject: [PATCH 169/213] Update dispatch doc comments --- pallets/subtensor/src/macros/dispatches.rs | 26 ++++++++++++++++------ 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index c4b985a49..bdfd3e6db 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -102,8 +102,11 @@ mod dispatches { /// - The hash representing the committed weights. /// /// # Raises: - /// * `WeightsCommitNotAllowed`: - /// - Attempting to commit when it is not allowed. + /// * `CommitRevealDisabled`: + /// - Attempting to commit when the commit-reveal mechanism is disabled. + /// + /// * `TooManyUnrevealedCommits`: + /// - Attempting to commit when the user has more than the allowed limit of unrevealed commits. /// #[pallet::call_index(96)] #[pallet::weight((Weight::from_parts(46_000_000, 0) @@ -132,21 +135,30 @@ mod dispatches { /// * `values` (`Vec`): /// - The values of the weights being revealed. /// - /// * `salt` (`Vec`): - /// - The random salt to protect from brute-force guessing attack in case of small weight changes bit-wise. + /// * `salt` (`Vec`): + /// - The salt used to generate the commit hash. /// /// * `version_key` (`u64`): /// - The network version key. /// /// # Raises: + /// * `CommitRevealDisabled`: + /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. + /// /// * `NoWeightsCommitFound`: /// - Attempting to reveal weights without an existing commit. /// - /// * `InvalidRevealCommitHashNotMatchTempo`: - /// - Attempting to reveal weights outside the valid tempo. + /// * `ExpiredWeightCommit`: + /// - Attempting to reveal a weight commit that has expired. + /// + /// * `RevealTooEarly`: + /// - Attempting to reveal weights outside the valid reveal period. + /// + /// * `RevealOutOfOrder`: + /// - Attempting to reveal a commit out of the expected order. /// /// * `InvalidRevealCommitHashNotMatch`: - /// - The revealed hash does not match the committed hash. + /// - The revealed hash does not match any committed hash. /// #[pallet::call_index(97)] #[pallet::weight((Weight::from_parts(103_000_000, 0) From 36810269b0e7fa66b5995626cb7dc6c74139dc55 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 17 Oct 2024 14:14:54 -0700 Subject: [PATCH 170/213] add extrinsic batch_reveal_weights --- pallets/subtensor/src/macros/dispatches.rs | 61 +++ pallets/subtensor/src/macros/errors.rs | 2 + pallets/subtensor/src/subnets/weights.rs | 147 ++++++ pallets/subtensor/tests/weights.rs | 572 +++++++++++++++++++++ 4 files changed, 782 insertions(+) diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index bdfd3e6db..19baa2a4b 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -175,6 +175,67 @@ mod dispatches { Self::do_reveal_weights(origin, netuid, uids, values, salt, version_key) } + /// ---- The implementation for batch revealing committed weights. + /// + /// # Args: + /// * `origin`: (`::RuntimeOrigin`): + /// - The signature of the revealing hotkey. + /// + /// * `netuid` (`u16`): + /// - The u16 network identifier. + /// + /// * `uids_list` (`Vec>`): + /// - A list of uids for each set of weights being revealed. + /// + /// * `values_list` (`Vec>`): + /// - A list of values for each set of weights being revealed. + /// + /// * `salts_list` (`Vec>`): + /// - A list of salts used to generate the commit hashes. + /// + /// * `version_keys` (`Vec`): + /// - A list of network version keys. + /// + /// # Raises: + /// * `CommitRevealDisabled`: + /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. + /// + /// * `NoWeightsCommitFound`: + /// - Attempting to reveal weights without an existing commit. + /// + /// * `ExpiredWeightCommit`: + /// - Attempting to reveal a weight commit that has expired. + /// + /// * `RevealTooEarly`: + /// - Attempting to reveal weights outside the valid reveal period. + /// + /// * `InvalidRevealCommitHashNotMatch`: + /// - The revealed hash does not match any committed hash. + /// + /// * `InvalidInputLengths`: + /// - The input vectors are of mismatched lengths. + #[pallet::call_index(98)] + #[pallet::weight((Weight::from_parts(103_000_000, 0) + .saturating_add(T::DbWeight::get().reads(11)) + .saturating_add(T::DbWeight::get().writes(3)), DispatchClass::Normal, Pays::No))] + pub fn batch_reveal_weights( + origin: T::RuntimeOrigin, + netuid: u16, + uids_list: Vec>, + values_list: Vec>, + salts_list: Vec>, + version_keys: Vec, + ) -> DispatchResult { + Self::do_batch_reveal_weights( + origin, + netuid, + uids_list, + values_list, + salts_list, + version_keys, + ) + } + /// # Args: /// * `origin`: (Origin): /// - The caller, a hotkey who wishes to set their weights. diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index ac54cff1c..1e4bf9ae0 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -186,5 +186,7 @@ mod errors { ExpiredWeightCommit, /// Attempted to reveal weights too early. RevealTooEarly, + /// Attempted to batch reveal weights with mismatched vector input lenghts. + InputLengthsUnequal, } } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 690fee200..61130d18a 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -202,6 +202,153 @@ impl Pallet { }) } + /// ---- The implementation for batch revealing committed weights. + /// + /// # Args: + /// * `origin`: (`::RuntimeOrigin`): + /// - The signature of the revealing hotkey. + /// + /// * `netuid` (`u16`): + /// - The u16 network identifier. + /// + /// * `uids_list` (`Vec>`): + /// - A list of uids for each set of weights being revealed. + /// + /// * `values_list` (`Vec>`): + /// - A list of values for each set of weights being revealed. + /// + /// * `salts_list` (`Vec>`): + /// - A list of salts used to generate the commit hashes. + /// + /// * `version_keys` (`Vec`): + /// - A list of network version keys. + /// + /// # Raises: + /// * `CommitRevealDisabled`: + /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. + /// + /// * `NoWeightsCommitFound`: + /// - Attempting to reveal weights without an existing commit. + /// + /// * `ExpiredWeightCommit`: + /// - Attempting to reveal a weight commit that has expired. + /// + /// * `RevealTooEarly`: + /// - Attempting to reveal weights outside the valid reveal period. + /// + /// * `InvalidRevealCommitHashNotMatch`: + /// - The revealed hash does not match any committed hash. + /// + /// * `InvalidInputLengths`: + /// - The input vectors are of mismatched lengths. + pub fn do_batch_reveal_weights( + origin: T::RuntimeOrigin, + netuid: u16, + uids_list: Vec>, + values_list: Vec>, + salts_list: Vec>, + version_keys: Vec, + ) -> DispatchResult { + // --- 1. Check that the input lists are of the same length. + let num_reveals = uids_list.len(); + ensure!( + num_reveals == values_list.len() + && num_reveals == salts_list.len() + && num_reveals == version_keys.len(), + Error::::InputLengthsUnequal + ); + + // --- 2. Check the caller's signature (hotkey). + let who = ensure_signed(origin.clone())?; + + log::debug!( + "do_batch_reveal_weights( hotkey:{:?} netuid:{:?})", + who, + netuid + ); + + // --- 3. Ensure commit-reveal is enabled for the network. + ensure!( + Self::get_commit_reveal_weights_enabled(netuid), + Error::::CommitRevealDisabled + ); + + // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. + WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { + let commits = maybe_commits + .as_mut() + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 5. Remove any expired commits from the front of the queue, collecting their hashes. + let mut expired_hashes = Vec::new(); + while let Some((hash, commit_block)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block) { + // Collect the expired commit hash + expired_hashes.push(*hash); + commits.pop_front(); + } else { + break; + } + } + + // --- 6. Process each reveal. + for ((uids, values), (salt, version_key)) in uids_list + .into_iter() + .zip(values_list) + .zip(salts_list.into_iter().zip(version_keys)) + { + // --- 6a. Hash the provided data. + let provided_hash: H256 = BlakeTwo256::hash_of(&( + who.clone(), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + + // --- 6b. Search for the provided_hash in the non-expired commits. + if let Some(position) = commits.iter().position(|(hash, _)| *hash == provided_hash) + { + // --- 6c. Get the commit block for the commit being revealed. + let (_, commit_block) = commits + .get(position) + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 6d. Ensure the commit is ready to be revealed in the current block range. + ensure!( + Self::is_reveal_block_range(netuid, *commit_block), + Error::::RevealTooEarly + ); + + // --- 6e. Remove all commits up to and including the one being revealed. + for _ in 0..=position { + commits.pop_front(); + } + + // --- 6f. Proceed to set the revealed weights. + Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; + } else { + // The provided_hash does not match any non-expired commits. + // Check if it matches any expired commits + if expired_hashes.contains(&provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); + } else { + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + } + } + } + + // --- 7. If the queue is now empty, remove the storage entry for the user. + if commits.is_empty() { + *maybe_commits = None; + } + + // --- 8. Return ok. + Ok(()) + }) + } + /// ---- The implementation for the extrinsic set_weights. /// /// # Args: diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 3fe8bed82..c52739482 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -3103,3 +3103,575 @@ fn test_reveal_at_exact_block() { } }); } + +#[test] +fn test_successful_batch_reveal() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0, 0, 0]; + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0], vec![0, 1]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40], vec![50, 60]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, hotkey, U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // 1. Commit multiple times + let mut commit_info = Vec::new(); + for i in 0..3 { + let salt: Vec = vec![i as u16; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[i].clone(), + weight_values_list[i].clone(), + salt.clone(), + version_keys[i], + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + step_epochs(1, netuid); + + // 2. Prepare batch reveal parameters + let salts_list: Vec> = commit_info.iter().map(|(_, salt)| salt.clone()).collect(); + + // 3. Perform batch reveal + assert_ok!(SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys.clone(), + )); + + // 4. Ensure all commits are removed + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(commits.is_none()); + }); +} + +#[test] +fn test_batch_reveal_with_expired_commits() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0, 0, 0]; + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0], vec![0, 1]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40], vec![50, 60]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, hotkey, U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + let mut commit_info = Vec::new(); + + // 1. Commit the first weight in epoch 0 + let salt0: Vec = vec![0u16; 8]; + let commit_hash0: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[0].clone(), + weight_values_list[0].clone(), + salt0.clone(), + version_keys[0], + )); + commit_info.push((commit_hash0, salt0)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash0 + )); + + // Advance to epoch 1 + step_epochs(1, netuid); + + // 2. Commit the next two weights in epoch 1 + for i in 1..3 { + let salt: Vec = vec![i as u16; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[i].clone(), + weight_values_list[i].clone(), + salt.clone(), + version_keys[i], + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + // Advance to epoch 2 (after reveal period for first commit) + step_epochs(1, netuid); + + // 3. Prepare batch reveal parameters + let salts_list: Vec> = commit_info.iter().map(|(_, salt)| salt.clone()).collect(); + + // 4. Perform batch reveal + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys.clone(), + ); + assert_err!(result, Error::::ExpiredWeightCommit); + + // 5. Expired commit is not removed until a successful call + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey) + .expect("Expected remaining commits"); + assert_eq!(commits.len(), 3); + + // 6. Try revealing the remaining commits + let valid_uids_list = uids_list[1..].to_vec(); + let valid_weight_values_list = weight_values_list[1..].to_vec(); + let valid_salts_list = salts_list[1..].to_vec(); + let valid_version_keys = version_keys[1..].to_vec(); + + assert_ok!(SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + valid_uids_list, + valid_weight_values_list, + valid_salts_list, + valid_version_keys, + )); + + // 7. Ensure all commits are removed + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(commits.is_none()); + }); +} + +#[test] +fn test_batch_reveal_with_invalid_input_lengths() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + // Base data for valid inputs + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40]]; + let salts_list: Vec> = vec![vec![0u16; 8], vec![1u16; 8]]; + let version_keys: Vec = vec![0, 0]; + + // Test cases with mismatched input lengths + + // Case 1: uids_list has an extra element + let uids_list_case = vec![vec![0, 1], vec![1, 0], vec![2, 3]]; + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list_case.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys.clone(), + ); + assert_err!(result, Error::::InputLengthsUnequal); + + // Case 2: weight_values_list has an extra element + let weight_values_list_case = vec![vec![10, 20], vec![30, 40], vec![50, 60]]; + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list_case.clone(), + salts_list.clone(), + version_keys.clone(), + ); + assert_err!(result, Error::::InputLengthsUnequal); + + // Case 3: salts_list has an extra element + let salts_list_case = vec![vec![0u16; 8], vec![1u16; 8], vec![2u16; 8]]; + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list_case.clone(), + version_keys.clone(), + ); + assert_err!(result, Error::::InputLengthsUnequal); + + // Case 4: version_keys has an extra element + let version_keys_case = vec![0, 0, 0]; + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys_case.clone(), + ); + assert_err!(result, Error::::InputLengthsUnequal); + + // Case 5: All input vectors have mismatched lengths + let uids_list_case = vec![vec![0, 1]]; + let weight_values_list_case = vec![vec![10, 20], vec![30, 40]]; + let salts_list_case = vec![vec![0u16; 8]]; + let version_keys_case = vec![0, 0, 0]; + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list_case, + weight_values_list_case, + salts_list_case, + version_keys_case, + ); + assert_err!(result, Error::::InputLengthsUnequal); + + // Case 6: Valid input lengths (should not return an error) + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys.clone(), + ); + // We expect an error because no commits have been made, but it should not be InputLengthsUnequal + assert_err!(result, Error::::NoWeightsCommitFound); + }); +} + +#[test] +fn test_batch_reveal_with_no_commits() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0]; + let uids_list: Vec> = vec![vec![0, 1]]; + let weight_values_list: Vec> = vec![vec![10, 20]]; + let salts_list: Vec> = vec![vec![0u16; 8]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + // 1. Attempt to perform batch reveal without any commits + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list, + weight_values_list, + salts_list, + version_keys, + ); + assert_err!(result, Error::::NoWeightsCommitFound); + }); +} + +#[test] +fn test_batch_reveal_before_reveal_period() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0, 0]; + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, hotkey, U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // 1. Commit multiple times in the same epoch + let mut commit_info = Vec::new(); + for i in 0..2 { + let salt: Vec = vec![i as u16; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[i].clone(), + weight_values_list[i].clone(), + salt.clone(), + version_keys[i], + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + // 2. Prepare batch reveal parameters + let salts_list: Vec> = commit_info.iter().map(|(_, salt)| salt.clone()).collect(); + + // 3. Attempt to reveal before reveal period + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list.clone(), + version_keys.clone(), + ); + assert_err!(result, Error::::RevealTooEarly); + }); +} + +#[test] +fn test_batch_reveal_after_commits_expired() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0, 0]; + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, hotkey, U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + let mut commit_info = Vec::new(); + + // 1. Commit the first weight in epoch 0 + let salt0: Vec = vec![0u16; 8]; + let commit_hash0: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[0].clone(), + weight_values_list[0].clone(), + salt0.clone(), + version_keys[0], + )); + commit_info.push((commit_hash0, salt0)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash0 + )); + + // Advance to epoch 1 + step_epochs(1, netuid); + + // 2. Commit the second weight in epoch 1 + let salt1: Vec = vec![1u16; 8]; + let commit_hash1: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[1].clone(), + weight_values_list[1].clone(), + salt1.clone(), + version_keys[1], + )); + commit_info.push((commit_hash1, salt1)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash1 + )); + + // Advance to epoch 4 to ensure both commits have expired (assuming reveal_period is 1) + step_epochs(3, netuid); + + // 3. Prepare batch reveal parameters + let salts_list: Vec> = commit_info.iter().map(|(_, salt)| salt.clone()).collect(); + + // 4. Attempt to reveal after commits have expired + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list.clone(), + weight_values_list.clone(), + salts_list, + version_keys.clone(), + ); + assert_err!(result, Error::::ExpiredWeightCommit); + }); +} + +#[test] +fn test_batch_reveal_when_commit_reveal_disabled() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0]; + let uids_list: Vec> = vec![vec![0, 1]]; + let weight_values_list: Vec> = vec![vec![10, 20]]; + let salts_list: Vec> = vec![vec![0u16; 8]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); + + // 1. Attempt to perform batch reveal when commit-reveal is disabled + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list, + weight_values_list, + salts_list, + version_keys, + ); + assert_err!(result, Error::::CommitRevealDisabled); + }); +} + +#[test] +fn test_batch_reveal_with_out_of_order_commits() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let hotkey = U256::from(1); + let version_keys: Vec = vec![0, 0, 0]; + let uids_list: Vec> = vec![vec![0, 1], vec![1, 0], vec![0, 1]]; + let weight_values_list: Vec> = vec![vec![10, 20], vec![30, 40], vec![50, 60]]; + let tempo: u16 = 100; + + System::set_block_number(0); + add_network(netuid, tempo, 0); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, hotkey, U256::from(2), 100_000); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + + // 1. Commit multiple times + let mut commit_info = Vec::new(); + for i in 0..3 { + let salt: Vec = vec![i as u16; 8]; + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids_list[i].clone(), + weight_values_list[i].clone(), + salt.clone(), + version_keys[i], + )); + commit_info.push((commit_hash, salt)); + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + } + + step_epochs(1, netuid); + + // 2. Prepare batch reveal parameters out of order + let salts_list: Vec> = vec![ + commit_info[2].1.clone(), // Third commit + commit_info[0].1.clone(), // First commit + commit_info[1].1.clone(), // Second commit + ]; + let uids_list_out_of_order = vec![ + uids_list[2].clone(), + uids_list[0].clone(), + uids_list[1].clone(), + ]; + let weight_values_list_out_of_order = vec![ + weight_values_list[2].clone(), + weight_values_list[0].clone(), + weight_values_list[1].clone(), + ]; + let version_keys_out_of_order = vec![version_keys[2], version_keys[0], version_keys[1]]; + + // 3. Attempt batch reveal out of order + let result = SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids_list_out_of_order, + weight_values_list_out_of_order, + salts_list, + version_keys_out_of_order, + ); + + // 4. Ensure the batch reveal fails with InvalidRevealCommitHashNotMatch + assert_err!(result, Error::::InvalidRevealCommitHashNotMatch); + + // 5. Reveal the first commit to proceed correctly + let first_salt = commit_info[0].1.clone(); + let first_uids = uids_list[0].clone(); + let first_weights = weight_values_list[0].clone(); + let first_version_key = version_keys[0]; + + assert_ok!(SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + vec![first_uids], + vec![first_weights], + vec![first_salt], + vec![first_version_key], + )); + + // 6. Now attempt to reveal the remaining commits in order + let remaining_salts = vec![ + commit_info[1].1.clone(), // Second commit + commit_info[2].1.clone(), // Third commit + ]; + let remaining_uids_list = vec![uids_list[1].clone(), uids_list[2].clone()]; + let remaining_weight_values_list = + vec![weight_values_list[1].clone(), weight_values_list[2].clone()]; + let remaining_version_keys = vec![version_keys[1], version_keys[2]]; + + assert_ok!(SubtensorModule::do_batch_reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + remaining_uids_list, + remaining_weight_values_list, + remaining_salts, + remaining_version_keys, + )); + + // 7. Ensure all commits are removed + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!(commits.is_none()); + }); +} From adeac4cfd508282c8e89273f83cbbeef98dc5bb2 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Fri, 18 Oct 2024 18:42:08 -0700 Subject: [PATCH 171/213] fix doc comment --- pallets/subtensor/src/macros/dispatches.rs | 3 --- pallets/subtensor/src/subnets/weights.rs | 3 --- 2 files changed, 6 deletions(-) diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index 19baa2a4b..42c8e3c1a 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -154,9 +154,6 @@ mod dispatches { /// * `RevealTooEarly`: /// - Attempting to reveal weights outside the valid reveal period. /// - /// * `RevealOutOfOrder`: - /// - Attempting to reveal a commit out of the expected order. - /// /// * `InvalidRevealCommitHashNotMatch`: /// - The revealed hash does not match any committed hash. /// diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 61130d18a..ba673afa4 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -102,9 +102,6 @@ impl Pallet { /// * `RevealTooEarly`: /// - Attempting to reveal weights outside the valid reveal period. /// - /// * `RevealOutOfOrder`: - /// - Attempting to reveal a commit out of the expected order. - /// /// * `InvalidRevealCommitHashNotMatch`: /// - The revealed hash does not match any committed hash. pub fn do_reveal_weights( From d7a64ba809af85cbfecbea1ccab1a61edc916af6 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 21 Oct 2024 09:53:14 -0700 Subject: [PATCH 172/213] update benchmarks --- pallets/admin-utils/src/benchmarking.rs | 11 +++ pallets/admin-utils/src/lib.rs | 2 +- pallets/admin-utils/src/weights.rs | 22 ++++++ pallets/subtensor/src/benchmarks.rs | 78 ++++++++++++++++++++++ pallets/subtensor/src/macros/dispatches.rs | 4 +- 5 files changed, 114 insertions(+), 3 deletions(-) diff --git a/pallets/admin-utils/src/benchmarking.rs b/pallets/admin-utils/src/benchmarking.rs index 3d8b962f6..606814ff0 100644 --- a/pallets/admin-utils/src/benchmarking.rs +++ b/pallets/admin-utils/src/benchmarking.rs @@ -227,6 +227,17 @@ mod benchmarks { _(RawOrigin::Root, 1u16/*netuid*/, 1u16/*tempo*/)/*sudo_set_tempo*/; } + #[benchmark] + fn sudo_set_commit_reveal_weights_periods() { + pallet_subtensor::Pallet::::init_new_network( + 1u16, /*netuid*/ + 1u16, /*sudo_tempo*/ + ); + + #[extrinsic_call] + _(RawOrigin::Root, 1u16/*netuid*/, 3u64/*interval*/)/*set_commit_reveal_weights_periods()*/; + } + #[benchmark] fn sudo_set_commit_reveal_weights_enabled() { pallet_subtensor::Pallet::::init_new_network( diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index b2f59668e..a287bb1b4 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -1188,7 +1188,7 @@ pub mod pallet { /// # Weight /// Weight is handled by the `#[pallet::weight]` attribute. #[pallet::call_index(56)] - #[pallet::weight((0, DispatchClass::Operational, Pays::No))] + #[pallet::weight(T::WeightInfo::sudo_set_commit_reveal_weights_periods())] pub fn sudo_set_commit_reveal_weights_periods( origin: OriginFor, netuid: u16, diff --git a/pallets/admin-utils/src/weights.rs b/pallets/admin-utils/src/weights.rs index ba2247dfd..db8752ff7 100644 --- a/pallets/admin-utils/src/weights.rs +++ b/pallets/admin-utils/src/weights.rs @@ -60,6 +60,7 @@ pub trait WeightInfo { fn sudo_set_min_burn() -> Weight; fn sudo_set_network_registration_allowed() -> Weight; fn sudo_set_tempo() -> Weight; + fn sudo_set_commit_reveal_weights_periods() -> Weight; fn sudo_set_commit_reveal_weights_enabled() -> Weight; } @@ -412,6 +413,15 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } + fn sudo_set_commit_reveal_weights_periods() -> Weight { + // Proof Size summary in bytes: + // Measured: `456` + // Estimated: `3921` + // Minimum execution time: 19_070_000 picoseconds. + Weight::from_parts(19_380_000, 456) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } fn sudo_set_commit_reveal_weights_enabled() -> Weight { // Proof Size summary in bytes: // Measured: `1111` @@ -771,6 +781,18 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } + fn sudo_set_commit_reveal_weights_periods() -> Weight { + // -- Extrinsic Time -- + // Model: + // Time ~= 19.38 + // µs + // Reads = 1 + // Writes = 1 + // Recorded proof Size = 456 + Weight::from_parts(19_380_000, 456) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } fn sudo_set_commit_reveal_weights_enabled() -> Weight { // -- Extrinsic Time -- // Model: diff --git a/pallets/subtensor/src/benchmarks.rs b/pallets/subtensor/src/benchmarks.rs index bd48676b6..6fd1cbf8b 100644 --- a/pallets/subtensor/src/benchmarks.rs +++ b/pallets/subtensor/src/benchmarks.rs @@ -520,4 +520,82 @@ reveal_weights { // Benchmark setup complete, now execute the extrinsic }: swap_coldkey(RawOrigin::Root, old_coldkey.clone(), new_coldkey.clone()) +batch_reveal_weights { + let tempo: u16 = 0; + let netuid: u16 = 1; + let num_commits: usize = 10; + + let hotkey: T::AccountId = account("hot", 0, 1); + let coldkey: T::AccountId = account("cold", 0, 2); + + Subtensor::::init_new_network(netuid, tempo); + Subtensor::::set_network_registration_allowed(netuid, true); + Subtensor::::set_network_pow_registration_allowed(netuid, true); + Subtensor::::set_commit_reveal_weights_enabled(netuid, true); + Subtensor::::set_weights_set_rate_limit(netuid, 0); // Disable rate limiting for benchmarking + + let block_number: u64 = Subtensor::::get_current_block_as_u64(); + let (nonce, work): (u64, Vec) = Subtensor::::create_work_for_block_number( + netuid, + block_number, + 3, + &hotkey, + ); + + let origin = T::RuntimeOrigin::from(RawOrigin::Signed(hotkey.clone())); + assert_ok!(Subtensor::::register( + origin.clone(), + netuid, + block_number, + nonce, + work.clone(), + hotkey.clone(), + coldkey.clone(), + )); + + let uid: u16 = 0; + + Subtensor::::set_validator_permit_for_uid(netuid, uid, true); + + let mut uids_list = Vec::new(); + let mut values_list = Vec::new(); + let mut salts_list = Vec::new(); + let mut version_keys = Vec::new(); + + for i in 0..num_commits { + let uids: Vec = vec![uid]; + let values: Vec = vec![i as u16]; + let salt: Vec = vec![i as u16]; + let version_key_i: u64 = i as u64; + + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey.clone(), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key_i, + )); + + assert_ok!(Subtensor::::commit_weights( + T::RuntimeOrigin::from(RawOrigin::Signed(hotkey.clone())), + netuid, + commit_hash, + )); + + uids_list.push(uids); + values_list.push(values); + salts_list.push(salt); + version_keys.push(version_key_i); + } +}: batch_reveal_weights( + RawOrigin::Signed(hotkey.clone()), + netuid, + uids_list, + values_list, + salts_list, + version_keys +) + + } diff --git a/pallets/subtensor/src/macros/dispatches.rs b/pallets/subtensor/src/macros/dispatches.rs index 42c8e3c1a..e98ecbd6a 100644 --- a/pallets/subtensor/src/macros/dispatches.rs +++ b/pallets/subtensor/src/macros/dispatches.rs @@ -212,8 +212,8 @@ mod dispatches { /// * `InvalidInputLengths`: /// - The input vectors are of mismatched lengths. #[pallet::call_index(98)] - #[pallet::weight((Weight::from_parts(103_000_000, 0) - .saturating_add(T::DbWeight::get().reads(11)) + #[pallet::weight((Weight::from_parts(367_612_000, 0) + .saturating_add(T::DbWeight::get().reads(14)) .saturating_add(T::DbWeight::get().writes(3)), DispatchClass::Normal, Pays::No))] pub fn batch_reveal_weights( origin: T::RuntimeOrigin, From 75ccbf167bf1f8732b7c053283213c9d39cba820 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 23 Oct 2024 09:25:16 -0700 Subject: [PATCH 173/213] set priority for batch reveals --- pallets/subtensor/src/lib.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 02b164c0a..5081d69d3 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -1461,6 +1461,18 @@ where Err(InvalidTransaction::Custom(2).into()) } } + Some(Call::batch_reveal_weights { netuid, .. }) => { + if Self::check_weights_min_stake(who) { + let priority: u64 = Self::get_priority_set_weights(who, *netuid); + Ok(ValidTransaction { + priority, + longevity: 1, + ..Default::default() + }) + } else { + Err(InvalidTransaction::Custom(6).into()) + } + } Some(Call::set_weights { netuid, .. }) => { if Self::check_weights_min_stake(who) { let priority: u64 = Self::get_priority_set_weights(who, *netuid); From 7385f10a86fd6f1d30ba0a0f6d4c1f1b8fcef432 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 23 Oct 2024 14:42:38 -0700 Subject: [PATCH 174/213] add migrate_commit_reveal_2 --- pallets/subtensor/src/macros/hooks.rs | 4 +- .../migrations/migrate_commit_reveal_v2.rs | 82 +++++++++++++++++ pallets/subtensor/src/migrations/mod.rs | 1 + pallets/subtensor/tests/migration.rs | 88 ++++++++++++++++++- 4 files changed, 172 insertions(+), 3 deletions(-) create mode 100644 pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs diff --git a/pallets/subtensor/src/macros/hooks.rs b/pallets/subtensor/src/macros/hooks.rs index 76f140002..1077acb76 100644 --- a/pallets/subtensor/src/macros/hooks.rs +++ b/pallets/subtensor/src/macros/hooks.rs @@ -70,7 +70,9 @@ mod hooks { // Storage version v8 -> v9 .saturating_add(migrations::migrate_fix_total_coldkey_stake::migrate_fix_total_coldkey_stake::()) // Migrate Delegate Ids on chain - .saturating_add(migrations::migrate_chain_identity::migrate_set_hotkey_identities::()); + .saturating_add(migrations::migrate_chain_identity::migrate_set_hotkey_identities::()) + // Migrate Commit-Reval 2.0 + .saturating_add(migrations::migrate_commit_reveal_v2::migrate_commit_reveal_2::()); weight } diff --git a/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs new file mode 100644 index 000000000..6d0ef09f0 --- /dev/null +++ b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs @@ -0,0 +1,82 @@ +use super::*; +use crate::HasMigrationRun; +use frame_support::{traits::Get, weights::Weight}; +use scale_info::prelude::string::String; +use sp_io::{storage::clear_prefix, KillStorageResult}; + +pub fn migrate_commit_reveal_2() -> Weight { + let migration_name = b"migrate_commit_reveal_2".to_vec(); + let mut weight = T::DbWeight::get().reads(1); + + if HasMigrationRun::::get(&migration_name) { + log::info!( + "Migration '{:?}' has already run. Skipping.", + migration_name + ); + return weight; + } + + log::info!( + "Running migration '{}'", + String::from_utf8_lossy(&migration_name) + ); + + // ------------------------------ + // Step 1: Remove WeightCommitRevealInterval entries + // ------------------------------ + + const WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX: &[u8] = + b"pallet_subtensor::WeightCommitRevealInterval"; + let removal_results = clear_prefix(WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX, Some(u32::MAX)); + + let removed_entries_count = match removal_results { + KillStorageResult::AllRemoved(removed) => removed as u64, + KillStorageResult::SomeRemaining(removed) => { + log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2",); + removed as u64 + } + }; + + weight = weight.saturating_add(T::DbWeight::get().writes(removed_entries_count)); + + log::info!( + "Removed {:?} entries from WeightCommitRevealInterval.", + removed_entries_count + ); + + // ------------------------------ + // Step 2: Remove WeightCommits entries + // ------------------------------ + + const WEIGHT_COMMITS_PREFIX: &[u8] = b"pallet_subtensor::WeightCommits"; + let removal_results_commits = clear_prefix(WEIGHT_COMMITS_PREFIX, Some(u32::MAX)); + + let removed_commits_entries = match removal_results_commits { + KillStorageResult::AllRemoved(removed) => removed as u64, + KillStorageResult::SomeRemaining(removed) => { + log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2",); + removed as u64 + } + }; + + weight = weight.saturating_add(T::DbWeight::get().writes(removed_commits_entries)); + + log::info!( + "Removed {} entries from WeightCommits.", + removed_commits_entries + ); + + // ------------------------------ + // Step 3: Mark Migration as Completed + // ------------------------------ + + HasMigrationRun::::insert(&migration_name, true); + weight = weight.saturating_add(T::DbWeight::get().writes(1)); + + log::info!( + "Migration '{:?}' completed successfully.", + String::from_utf8_lossy(&migration_name) + ); + + weight +} diff --git a/pallets/subtensor/src/migrations/mod.rs b/pallets/subtensor/src/migrations/mod.rs index 6036b23e0..a0ee65998 100644 --- a/pallets/subtensor/src/migrations/mod.rs +++ b/pallets/subtensor/src/migrations/mod.rs @@ -1,5 +1,6 @@ use super::*; pub mod migrate_chain_identity; +pub mod migrate_commit_reveal_v2; pub mod migrate_create_root_network; pub mod migrate_delete_subnet_21; pub mod migrate_delete_subnet_3; diff --git a/pallets/subtensor/tests/migration.rs b/pallets/subtensor/tests/migration.rs index 6c40d7d78..4ce21dcf6 100644 --- a/pallets/subtensor/tests/migration.rs +++ b/pallets/subtensor/tests/migration.rs @@ -1,10 +1,18 @@ #![allow(unused, clippy::indexing_slicing, clippy::panic, clippy::unwrap_used)] mod mock; -use frame_support::{assert_ok, weights::Weight}; +use codec::{Decode, Encode}; +use frame_support::{ + assert_ok, + storage::unhashed::{get_raw, put_raw}, + traits::{StorageInstance, StoredMap}, + weights::Weight, + StorageHasher, Twox64Concat, +}; use frame_system::Config; use mock::*; use pallet_subtensor::*; -use sp_core::U256; +use sp_core::{H256, U256}; +use sp_runtime::traits::Zero; #[test] fn test_initialise_ti() { @@ -430,3 +438,79 @@ fn run_migration_and_check(migration_name: &'static str) -> frame_support::weigh // Return the weight of the executed migration weight } + +#[test] +fn test_migrate_commit_reveal_2() { + new_test_ext(1).execute_with(|| { + // ------------------------------ + // Step 1: Simulate Old Storage Entries + // ------------------------------ + const MIGRATION_NAME: &str = "migrate_commit_reveal_2"; + const WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX: &[u8] = + b"pallet_subtensor::WeightCommitRevealInterval"; + const WEIGHT_COMMITS_PREFIX: &[u8] = b"pallet_subtensor::WeightCommits"; + + let netuid: u16 = 1; + let interval_value: u64 = 50u64; + + let mut interval_key = WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX.to_vec(); + interval_key.extend_from_slice(&netuid.encode()); + + put_raw(&interval_key, &interval_value.encode()); + + let test_account: U256 = U256::from(1); + + let mut commit_key = WEIGHT_COMMITS_PREFIX.to_vec(); + commit_key.extend_from_slice(&Twox64Concat::hash(&netuid.encode())); + commit_key.extend_from_slice(&Twox64Concat::hash(&test_account.encode())); + + let commit_value: (H256, u64) = (H256::from_low_u64_be(42), 100); + put_raw(&commit_key, &commit_value.encode()); + + let stored_interval = get_raw(&interval_key).expect("Expected to get a value"); + assert_eq!( + u64::decode(&mut &stored_interval[..]).expect("Failed to decode interval value"), + interval_value + ); + + let stored_commit = get_raw(&commit_key).expect("Expected to get a value"); + assert_eq!( + <(H256, u64)>::decode(&mut &stored_commit[..]).expect("Failed to decode commit value"), + commit_value + ); + + assert!( + !HasMigrationRun::::get(MIGRATION_NAME.as_bytes().to_vec()), + "Migration should not have run yet" + ); + + // ------------------------------ + // Step 2: Run the Migration + // ------------------------------ + let weight = + pallet_subtensor::migrations::migrate_commit_reveal_v2::migrate_commit_reveal_2::( + ); + + assert!( + HasMigrationRun::::get(MIGRATION_NAME.as_bytes().to_vec()), + "Migration should be marked as run" + ); + + // ------------------------------ + // Step 3: Verify Migration Effects + // ------------------------------ + let stored_interval_after = get_raw(&interval_key); + assert!( + stored_interval_after.is_none(), + "WeightCommitRevealInterval should be cleared" + ); + + let stored_commit_after = get_raw(&commit_key); + assert!( + stored_commit_after.is_none(), + "WeightCommits entry should be cleared" + ); + + assert!(!weight.is_zero(), "Migration weight should be non-zero"); + }); +} From 8d5de806df593b5831f8bedae9702928b0ea9e3d Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 23 Oct 2024 15:46:06 -0700 Subject: [PATCH 175/213] fix storage item migration keys --- .../migrations/migrate_commit_reveal_v2.rs | 22 +++++++++------ pallets/subtensor/tests/migration.rs | 28 ++++++++++++++----- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs index 6d0ef09f0..cd93842c9 100644 --- a/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs +++ b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs @@ -2,7 +2,7 @@ use super::*; use crate::HasMigrationRun; use frame_support::{traits::Get, weights::Weight}; use scale_info::prelude::string::String; -use sp_io::{storage::clear_prefix, KillStorageResult}; +use sp_io::{hashing::twox_128, storage::clear_prefix, KillStorageResult}; pub fn migrate_commit_reveal_2() -> Weight { let migration_name = b"migrate_commit_reveal_2".to_vec(); @@ -25,14 +25,17 @@ pub fn migrate_commit_reveal_2() -> Weight { // Step 1: Remove WeightCommitRevealInterval entries // ------------------------------ - const WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX: &[u8] = - b"pallet_subtensor::WeightCommitRevealInterval"; - let removal_results = clear_prefix(WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX, Some(u32::MAX)); + let mut weight_commit_reveal_interval_prefix = Vec::new(); + weight_commit_reveal_interval_prefix.extend_from_slice(&twox_128("SubtensorModule".as_bytes())); + weight_commit_reveal_interval_prefix + .extend_from_slice(&twox_128("WeightCommitRevealInterval".as_bytes())); + + let removal_results = clear_prefix(&weight_commit_reveal_interval_prefix, Some(u32::MAX)); let removed_entries_count = match removal_results { KillStorageResult::AllRemoved(removed) => removed as u64, KillStorageResult::SomeRemaining(removed) => { - log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2",); + log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2"); removed as u64 } }; @@ -48,13 +51,16 @@ pub fn migrate_commit_reveal_2() -> Weight { // Step 2: Remove WeightCommits entries // ------------------------------ - const WEIGHT_COMMITS_PREFIX: &[u8] = b"pallet_subtensor::WeightCommits"; - let removal_results_commits = clear_prefix(WEIGHT_COMMITS_PREFIX, Some(u32::MAX)); + let mut weight_commits_prefix = Vec::new(); + weight_commits_prefix.extend_from_slice(&twox_128("SubtensorModule".as_bytes())); + weight_commits_prefix.extend_from_slice(&twox_128("WeightCommits".as_bytes())); + + let removal_results_commits = clear_prefix(&weight_commits_prefix, Some(u32::MAX)); let removed_commits_entries = match removal_results_commits { KillStorageResult::AllRemoved(removed) => removed as u64, KillStorageResult::SomeRemaining(removed) => { - log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2",); + log::info!("Failed To Remove Some Items During migrate_commit_reveal_v2"); removed as u64 } }; diff --git a/pallets/subtensor/tests/migration.rs b/pallets/subtensor/tests/migration.rs index 4ce21dcf6..1317bfb0f 100644 --- a/pallets/subtensor/tests/migration.rs +++ b/pallets/subtensor/tests/migration.rs @@ -12,6 +12,7 @@ use frame_system::Config; use mock::*; use pallet_subtensor::*; use sp_core::{H256, U256}; +use sp_io::hashing::twox_128; use sp_runtime::traits::Zero; #[test] @@ -446,23 +447,36 @@ fn test_migrate_commit_reveal_2() { // Step 1: Simulate Old Storage Entries // ------------------------------ const MIGRATION_NAME: &str = "migrate_commit_reveal_2"; - const WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX: &[u8] = - b"pallet_subtensor::WeightCommitRevealInterval"; - const WEIGHT_COMMITS_PREFIX: &[u8] = b"pallet_subtensor::WeightCommits"; + + let pallet_prefix = twox_128("SubtensorModule".as_bytes()); + let storage_prefix_interval = twox_128("WeightCommitRevealInterval".as_bytes()); + let storage_prefix_commits = twox_128("WeightCommits".as_bytes()); let netuid: u16 = 1; let interval_value: u64 = 50u64; - let mut interval_key = WEIGHT_COMMIT_REVEAL_INTERVAL_PREFIX.to_vec(); + // Construct the full key for WeightCommitRevealInterval + let mut interval_key = Vec::new(); + interval_key.extend_from_slice(&pallet_prefix); + interval_key.extend_from_slice(&storage_prefix_interval); interval_key.extend_from_slice(&netuid.encode()); put_raw(&interval_key, &interval_value.encode()); let test_account: U256 = U256::from(1); - let mut commit_key = WEIGHT_COMMITS_PREFIX.to_vec(); - commit_key.extend_from_slice(&Twox64Concat::hash(&netuid.encode())); - commit_key.extend_from_slice(&Twox64Concat::hash(&test_account.encode())); + // Construct the full key for WeightCommits (DoubleMap) + let mut commit_key = Vec::new(); + commit_key.extend_from_slice(&pallet_prefix); + commit_key.extend_from_slice(&storage_prefix_commits); + + // First key (netuid) hashed with Twox64Concat + let netuid_hashed = Twox64Concat::hash(&netuid.encode()); + commit_key.extend_from_slice(&netuid_hashed); + + // Second key (account) hashed with Twox64Concat + let account_hashed = Twox64Concat::hash(&test_account.encode()); + commit_key.extend_from_slice(&account_hashed); let commit_value: (H256, u64) = (H256::from_low_u64_be(42), 100); put_raw(&commit_key, &commit_value.encode()); From ecd3612d2af95111a077a2ebd1bbbdeb743aa8aa Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 24 Oct 2024 07:54:37 -0700 Subject: [PATCH 176/213] add highly_concurrent test --- pallets/subtensor/tests/weights.rs | 279 +++++++++++++++++++++++++++++ 1 file changed, 279 insertions(+) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index c52739482..4f925a80d 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -15,6 +15,7 @@ use sp_runtime::{ }; use sp_std::collections::vec_deque::VecDeque; use substrate_fixed::types::I32F32; +use scale_info::prelude::collections::HashMap; /*************************** pub fn set_weights() tests @@ -3675,3 +3676,281 @@ fn test_batch_reveal_with_out_of_order_commits() { assert!(commits.is_none()); }); } + +#[test] +fn test_highly_concurrent_commits_and_reveals_with_multiple_hotkeys() { + new_test_ext(1).execute_with(|| { + // ==== Test Configuration ==== + let netuid: u16 = 1; + let num_hotkeys: usize = 10; + let max_unrevealed_commits: usize = 10; + let commits_per_hotkey: usize = 20; + let initial_reveal_period: u64 = 5; + let initial_tempo: u16 = 100; + + // ==== Setup Network ==== + add_network(netuid, initial_tempo, 0); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + SubtensorModule::set_weights_set_rate_limit(netuid, 0); + SubtensorModule::set_reveal_period(netuid, initial_reveal_period); + SubtensorModule::set_max_registrations_per_block(netuid, u16::MAX); + SubtensorModule::set_target_registrations_per_interval(netuid, u16::MAX); + + // ==== Register Validators ==== + for uid in 0..5 { + let validator_id = U256::from(100 + uid as u64); + register_ok_neuron(netuid, validator_id, U256::from(200 + uid as u64), 300_000); + SubtensorModule::set_validator_permit_for_uid(netuid, uid, true); + } + + // ==== Register Hotkeys ==== + let mut hotkeys: Vec<::AccountId> = Vec::new(); + for i in 0..num_hotkeys { + let hotkey_id = U256::from(1000 + i as u64); + register_ok_neuron(netuid, hotkey_id, U256::from(2000 + i as u64), 100_000); + hotkeys.push(hotkey_id); + } + + // ==== Initialize Commit Information ==== + let mut commit_info_map: HashMap< + ::AccountId, + Vec<(H256, Vec, Vec, Vec, u64)>, + > = HashMap::new(); + + // Initialize the map + for hotkey in &hotkeys { + commit_info_map.insert(*hotkey, Vec::new()); + } + + // ==== Function to Generate Unique Data ==== + fn generate_unique_data(index: usize) -> (Vec, Vec, Vec, u64) { + let uids = vec![index as u16, (index + 1) as u16]; + let values = vec![(index * 10) as u16, ((index + 1) * 10) as u16]; + let salt = vec![(index % 100) as u16; 8]; + let version_key = index as u64; + (uids, values, salt, version_key) + } + + // ==== Simulate Concurrent Commits and Reveals ==== + for i in 0..commits_per_hotkey { + for hotkey in &hotkeys { + + let current_commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey) + .unwrap_or_default(); + if current_commits.len() >= max_unrevealed_commits { + continue; + } + + let (uids, values, salt, version_key) = generate_unique_data(i); + let commit_hash: H256 = BlakeTwo256::hash_of(&( + *hotkey, + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + + if let Some(commits) = commit_info_map.get_mut(hotkey) { + commits.push((commit_hash, salt.clone(), uids.clone(), values.clone(), version_key)); + } + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + commit_hash + )); + } + + // ==== Reveal Phase ==== + for hotkey in &hotkeys { + if let Some(commits) = commit_info_map.get_mut(hotkey) { + if commits.is_empty() { + continue; // No commits to reveal + } + + let (_commit_hash, salt, uids, values, version_key) = commits.first().expect("expected a value"); + + let reveal_result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + *version_key, + ); + + match reveal_result { + Ok(_) => { + commits.remove(0); + } + Err(e) => { + if e == Error::::RevealTooEarly.into() + || e == Error::::ExpiredWeightCommit.into() + || e == Error::::InvalidRevealCommitHashNotMatch.into() + { + log::info!("Expected error during reveal after epoch advancement: {:?}", e); + } else { + panic!( + "Unexpected error during reveal: {:?}, expected RevealTooEarly, ExpiredWeightCommit, or InvalidRevealCommitHashNotMatch", + e + ); + } + } + } + } + } + } + + // ==== Modify Network Parameters During Commits ==== + SubtensorModule::set_tempo(netuid, 150); + SubtensorModule::set_reveal_period(netuid, 7); + log::info!("Changed tempo to 150 and reveal_period to 7 during commits."); + + step_epochs(3, netuid); + + // ==== Continue Reveals After Epoch Advancement ==== + for hotkey in &hotkeys { + if let Some(commits) = commit_info_map.get_mut(hotkey) { + while !commits.is_empty() { + let (_commit_hash, salt, uids, values, version_key) = &commits[0]; + + // Attempt to reveal + let reveal_result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + *version_key, + ); + + match reveal_result { + Ok(_) => { + commits.remove(0); + } + Err(e) => { + // Check if the error is due to reveal being too early or commit expired + if e == Error::::RevealTooEarly.into() + || e == Error::::ExpiredWeightCommit.into() + || e == Error::::InvalidRevealCommitHashNotMatch.into() + { + log::info!("Expected error during reveal after epoch advancement: {:?}", e); + break; + } else { + panic!( + "Unexpected error during reveal after epoch advancement: {:?}, expected RevealTooEarly, ExpiredWeightCommit, or InvalidRevealCommitHashNotMatch", + e + ); + } + } + } + } + } + } + + // ==== Change Network Parameters Again ==== + SubtensorModule::set_tempo(netuid, 200); + SubtensorModule::set_reveal_period(netuid, 10); + log::info!("Changed tempo to 200 and reveal_period to 10 after initial reveals."); + + step_epochs(10, netuid); + + // ==== Final Reveal Attempts ==== + for (hotkey, commits) in commit_info_map.iter_mut() { + for (_commit_hash, salt, uids, values, version_key) in commits.iter() { + let reveal_result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + *version_key, + ); + + assert_eq!( + reveal_result, + Err(Error::::ExpiredWeightCommit.into()), + "Expected ExpiredWeightCommit error, got {:?}", + reveal_result + ); + } + } + + for hotkey in &hotkeys { + commit_info_map.insert(*hotkey, Vec::new()); + + for i in 0..max_unrevealed_commits { + let (uids, values, salt, version_key) = generate_unique_data(i + commits_per_hotkey); + let commit_hash: H256 = BlakeTwo256::hash_of(&( + *hotkey, + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + commit_hash + )); + } + + let (uids, values, salt, version_key) = generate_unique_data(max_unrevealed_commits + commits_per_hotkey); + let commit_hash: H256 = BlakeTwo256::hash_of(&( + *hotkey, + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + + assert_err!( + SubtensorModule::commit_weights( + RuntimeOrigin::signed(*hotkey), + netuid, + commit_hash + ), + Error::::TooManyUnrevealedCommits + ); + } + + // Attempt unauthorized reveal + let unauthorized_hotkey = hotkeys[0]; + let target_hotkey = hotkeys[1]; + if let Some(commits) = commit_info_map.get(&target_hotkey) { + if let Some((_commit_hash, salt, uids, values, version_key)) = commits.first() { + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(unauthorized_hotkey), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + *version_key, + ), + Error::::InvalidRevealCommitHashNotMatch + ); + } + } + + let non_committing_hotkey: ::AccountId = U256::from(9999); + assert_err!( + SubtensorModule::reveal_weights( + RuntimeOrigin::signed(non_committing_hotkey), + netuid, + vec![0, 1], + vec![10, 20], + vec![0; 8], + 0, + ), + Error::::NoWeightsCommitFound + ); + + assert_eq!(SubtensorModule::get_reveal_period(netuid), 10); + assert_eq!(SubtensorModule::get_tempo(netuid), 200); + }) +} From 6e21d6acb36837690a9f9f919f55f3725379fac1 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 24 Oct 2024 07:57:54 -0700 Subject: [PATCH 177/213] cargo fmt --- pallets/subtensor/tests/weights.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 4f925a80d..ddc2ccd77 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -8,6 +8,7 @@ use frame_support::{ }; use mock::*; use pallet_subtensor::{Error, Owner}; +use scale_info::prelude::collections::HashMap; use sp_core::{H256, U256}; use sp_runtime::{ traits::{BlakeTwo256, DispatchInfoOf, Hash, SignedExtension}, @@ -15,7 +16,6 @@ use sp_runtime::{ }; use sp_std::collections::vec_deque::VecDeque; use substrate_fixed::types::I32F32; -use scale_info::prelude::collections::HashMap; /*************************** pub fn set_weights() tests From 8b1f71ea55421ae124835dee41d03a06d7e1e467 Mon Sep 17 00:00:00 2001 From: Liam Date: Thu, 24 Oct 2024 21:36:04 +0400 Subject: [PATCH 178/213] better try-runtime ci --- .github/workflows/check-devnet.yml | 15 -------- .github/workflows/check-finney.yml | 14 -------- .github/workflows/check-testnet.yml | 15 -------- .github/workflows/try-runtime.yml | 54 +++++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 44 deletions(-) create mode 100644 .github/workflows/try-runtime.yml diff --git a/.github/workflows/check-devnet.yml b/.github/workflows/check-devnet.yml index 2cb586348..8f04d78cf 100644 --- a/.github/workflows/check-devnet.yml +++ b/.github/workflows/check-devnet.yml @@ -39,18 +39,3 @@ jobs: echo "network spec_version: $spec_version" if (( $(echo "$local_spec_version <= $spec_version" | bc -l) )); then echo "$local_spec_version ≯ $spec_version ❌"; exit 1; fi echo "$local_spec_version > $spec_version ✅" - - check-devnet-migrations: - name: check devnet migrations - runs-on: ubuntu-22.04 - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Run Try Runtime Checks - uses: "paritytech/try-runtime-gha@v0.1.0" - with: - runtime-package: "node-subtensor-runtime" - node-uri: "wss://dev.chain.opentensor.ai:443" - checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" diff --git a/.github/workflows/check-finney.yml b/.github/workflows/check-finney.yml index 665c9c8a9..947b9a902 100644 --- a/.github/workflows/check-finney.yml +++ b/.github/workflows/check-finney.yml @@ -39,17 +39,3 @@ jobs: echo "network spec_version: $spec_version" if (( $(echo "$local_spec_version <= $spec_version" | bc -l) )); then echo "$local_spec_version ≯ $spec_version ❌"; exit 1; fi echo "$local_spec_version > $spec_version ✅" - - check-finney-migrations: - name: check finney migrations - runs-on: SubtensorCI - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - name: Run Try Runtime Checks - uses: "paritytech/try-runtime-gha@v0.1.0" - with: - runtime-package: "node-subtensor-runtime" - node-uri: "wss://entrypoint-finney.opentensor.ai:443" - checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" diff --git a/.github/workflows/check-testnet.yml b/.github/workflows/check-testnet.yml index 95277c94a..a869129ab 100644 --- a/.github/workflows/check-testnet.yml +++ b/.github/workflows/check-testnet.yml @@ -39,18 +39,3 @@ jobs: echo "network spec_version: $spec_version" if (( $(echo "$local_spec_version <= $spec_version" | bc -l) )); then echo "$local_spec_version ≯ $spec_version ❌"; exit 1; fi echo "$local_spec_version > $spec_version ✅" - - check-testnet-migrations: - name: check testnet migrations - runs-on: ubuntu-22.04 - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Run Try Runtime Checks - uses: "paritytech/try-runtime-gha@v0.1.0" - with: - runtime-package: "node-subtensor-runtime" - node-uri: "wss://test.chain.opentensor.ai:443" - checks: "pre-and-post" - extra-args: "--disable-spec-version-check --no-weight-warnings" diff --git a/.github/workflows/try-runtime.yml b/.github/workflows/try-runtime.yml new file mode 100644 index 000000000..174e6db37 --- /dev/null +++ b/.github/workflows/try-runtime.yml @@ -0,0 +1,54 @@ +name: Try Runtime + +on: + pull_request: + branches: [main, devnet-ready, devnet, testnet, finney] + types: [labeled, unlabeled, synchronize] + +env: + CARGO_TERM_COLOR: always + +jobs: + check-devnet: + name: check devnet + runs-on: SubtensorCI + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Run Try Runtime Checks + uses: "paritytech/try-runtime-gha@v0.1.0" + with: + runtime-package: "node-subtensor-runtime" + node-uri: "wss://dev.chain.opentensor.ai:443" + checks: "all" + extra-args: "--disable-spec-version-check --no-weight-warnings" + + check-testnet: + name: check testnet + runs-on: SubtensorCI + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Run Try Runtime Checks + uses: "paritytech/try-runtime-gha@v0.1.0" + with: + runtime-package: "node-subtensor-runtime" + node-uri: "wss://test.chain.opentensor.ai:443" + checks: "all" + extra-args: "--disable-spec-version-check --no-weight-warnings" + + check-finney: + name: check finney + runs-on: SubtensorCI + steps: + - name: Checkout sources + uses: actions/checkout@v4 + - name: Run Try Runtime Checks + uses: "paritytech/try-runtime-gha@v0.1.0" + with: + runtime-package: "node-subtensor-runtime" + node-uri: "wss://archive.chain.opentensor.ai:443" + checks: "all" + extra-args: "--disable-spec-version-check --no-weight-warnings" From 8810f4bb2868650f642bf081a7a887c7ed842806 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Fri, 25 Oct 2024 16:13:13 -0700 Subject: [PATCH 179/213] store more blocks & batch reveals out of order --- pallets/subtensor/src/lib.rs | 2 +- pallets/subtensor/src/subnets/weights.rs | 348 +++++++++++++---------- pallets/subtensor/tests/swap_hotkey.rs | 4 +- pallets/subtensor/tests/weights.rs | 2 +- 4 files changed, 204 insertions(+), 152 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 5081d69d3..9a6d60f8f 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -1258,7 +1258,7 @@ pub mod pallet { u16, Twox64Concat, T::AccountId, - VecDeque<(H256, u64)>, + VecDeque<(H256, u64, u64, u64)>, OptionQuery, >; #[pallet::storage] diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index ba673afa4..8cbf7695b 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -39,14 +39,19 @@ impl Pallet { Error::::CommitRevealDisabled ); - // --- 3. Mutate the WeightCommits to retrieve existing commits for the user. + // --- 3. Calculate the reveal blocks based on tempo and reveal period. + let commit_block: u64 = Self::get_current_block_as_u64(); + let (first_reveal_block, last_reveal_block) = Self::get_reveal_blocks(netuid, commit_block); + + // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. WeightCommits::::try_mutate(netuid, &who, |maybe_commits| -> DispatchResult { - // --- 4. Take the existing commits or create a new VecDeque. - let mut commits: VecDeque<(H256, u64)> = maybe_commits.take().unwrap_or_default(); + // --- 5. Take the existing commits or create a new VecDeque. + let mut commits: VecDeque<(H256, u64, u64, u64)> = + maybe_commits.take().unwrap_or_default(); - // --- 5. Remove any expired commits from the front of the queue. - while let Some((_, commit_block)) = commits.front() { - if Self::is_commit_expired(netuid, *commit_block) { + // --- 6. Remove any expired commits from the front of the queue. + while let Some((_, commit_block_existing, _, _)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block_existing) { // Remove the expired commit commits.pop_front(); } else { @@ -54,16 +59,21 @@ impl Pallet { } } - // --- 6. Check if the current number of unrevealed commits is within the allowed limit. + // --- 7. Check if the current number of unrevealed commits is within the allowed limit. ensure!(commits.len() < 10, Error::::TooManyUnrevealedCommits); - // --- 7. Append the new commit to the queue. - commits.push_back((commit_hash, Self::get_current_block_as_u64())); + // --- 8. Append the new commit to the queue. + commits.push_back(( + commit_hash, + commit_block, + first_reveal_block, + last_reveal_block, + )); - // --- 8. Store the updated queue back to storage. + // --- 9. Store the updated queue back to storage. *maybe_commits = Some(commits); - // --- 9. Return ok. + // --- 10. Return ok. Ok(()) }) } @@ -131,7 +141,7 @@ impl Pallet { // --- 4. Remove any expired commits from the front of the queue, collecting their hashes. let mut expired_hashes = Vec::new(); - while let Some((hash, commit_block)) = commits.front() { + while let Some((hash, commit_block, _, _)) = commits.front() { if Self::is_commit_expired(netuid, *commit_block) { // Collect the expired commit hash expired_hashes.push(*hash); @@ -163,9 +173,12 @@ impl Pallet { } // --- 7. Search for the provided_hash in the non-expired commits. - if let Some(position) = commits.iter().position(|(hash, _)| *hash == provided_hash) { + if let Some(position) = commits + .iter() + .position(|(hash, _, _, _)| *hash == provided_hash) + { // --- 8. Get the commit block for the commit being revealed. - let (_, commit_block) = commits + let (_, commit_block, _, _) = commits .get(position) .ok_or(Error::::NoWeightsCommitFound)?; @@ -199,152 +212,174 @@ impl Pallet { }) } - /// ---- The implementation for batch revealing committed weights. - /// - /// # Args: - /// * `origin`: (`::RuntimeOrigin`): - /// - The signature of the revealing hotkey. - /// - /// * `netuid` (`u16`): - /// - The u16 network identifier. - /// - /// * `uids_list` (`Vec>`): - /// - A list of uids for each set of weights being revealed. - /// - /// * `values_list` (`Vec>`): - /// - A list of values for each set of weights being revealed. - /// - /// * `salts_list` (`Vec>`): - /// - A list of salts used to generate the commit hashes. - /// - /// * `version_keys` (`Vec`): - /// - A list of network version keys. - /// - /// # Raises: - /// * `CommitRevealDisabled`: - /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. - /// - /// * `NoWeightsCommitFound`: - /// - Attempting to reveal weights without an existing commit. - /// - /// * `ExpiredWeightCommit`: - /// - Attempting to reveal a weight commit that has expired. - /// - /// * `RevealTooEarly`: - /// - Attempting to reveal weights outside the valid reveal period. - /// - /// * `InvalidRevealCommitHashNotMatch`: - /// - The revealed hash does not match any committed hash. - /// - /// * `InvalidInputLengths`: - /// - The input vectors are of mismatched lengths. - pub fn do_batch_reveal_weights( - origin: T::RuntimeOrigin, - netuid: u16, - uids_list: Vec>, - values_list: Vec>, - salts_list: Vec>, - version_keys: Vec, - ) -> DispatchResult { - // --- 1. Check that the input lists are of the same length. - let num_reveals = uids_list.len(); - ensure!( - num_reveals == values_list.len() - && num_reveals == salts_list.len() - && num_reveals == version_keys.len(), - Error::::InputLengthsUnequal - ); - - // --- 2. Check the caller's signature (hotkey). - let who = ensure_signed(origin.clone())?; - - log::debug!( - "do_batch_reveal_weights( hotkey:{:?} netuid:{:?})", - who, - netuid - ); +/// ---- The implementation for batch revealing committed weights. +/// +/// # Args: +/// * `origin`: (`::RuntimeOrigin`): +/// - The signature of the revealing hotkey. +/// +/// * `netuid` (`u16`): +/// - The u16 network identifier. +/// +/// * `uids_list` (`Vec>`): +/// - A list of uids for each set of weights being revealed. +/// +/// * `values_list` (`Vec>`): +/// - A list of values for each set of weights being revealed. +/// +/// * `salts_list` (`Vec>`): +/// - A list of salts used to generate the commit hashes. +/// +/// * `version_keys` (`Vec`): +/// - A list of network version keys. +/// +/// # Raises: +/// * `CommitRevealDisabled`: +/// - Attempting to reveal weights when the commit-reveal mechanism is disabled. +/// +/// * `NoWeightsCommitFound`: +/// - Attempting to reveal weights without an existing commit. +/// +/// * `ExpiredWeightCommit`: +/// - Attempting to reveal a weight commit that has expired. +/// +/// * `RevealTooEarly`: +/// - Attempting to reveal weights outside the valid reveal period. +/// +/// * `InvalidRevealCommitHashNotMatch`: +/// - The revealed hash does not match any committed hash. +/// +/// * `InputLengthsUnequal`: +/// - The input vectors are of mismatched lengths. +pub fn do_batch_reveal_weights( + origin: T::RuntimeOrigin, + netuid: u16, + uids_list: Vec>, + values_list: Vec>, + salts_list: Vec>, + version_keys: Vec, +) -> DispatchResult { + // --- 1. Check that the input lists are of the same length. + let num_reveals = uids_list.len(); + ensure!( + num_reveals == values_list.len() + && num_reveals == salts_list.len() + && num_reveals == version_keys.len(), + Error::::InputLengthsUnequal + ); + + // --- 2. Check the caller's signature (hotkey). + let who = ensure_signed(origin.clone())?; + + log::debug!( + "do_batch_reveal_weights( hotkey:{:?} netuid:{:?})", + who, + netuid + ); + + // --- 3. Ensure commit-reveal is enabled for the network. + ensure!( + Self::get_commit_reveal_weights_enabled(netuid), + Error::::CommitRevealDisabled + ); + + // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. + WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { + let commits = maybe_commits + .as_mut() + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 5. Remove any expired commits from the front of the queue, collecting their hashes. + let mut expired_hashes = Vec::new(); + while let Some((hash, commit_block, _, _)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block) { + // Collect the expired commit hash + expired_hashes.push(*hash); + commits.pop_front(); + } else { + break; + } + } - // --- 3. Ensure commit-reveal is enabled for the network. - ensure!( - Self::get_commit_reveal_weights_enabled(netuid), - Error::::CommitRevealDisabled - ); + // --- 6. Prepare to collect all provided hashes and their corresponding reveals. + let mut provided_hashes = Vec::new(); + let mut reveals = Vec::new(); - // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. - WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { - let commits = maybe_commits - .as_mut() - .ok_or(Error::::NoWeightsCommitFound)?; + for ((uids, values), (salt, version_key)) in uids_list + .into_iter() + .zip(values_list) + .zip(salts_list.into_iter().zip(version_keys)) + { + // --- 6a. Hash the provided data. + let provided_hash: H256 = BlakeTwo256::hash_of(&( + who.clone(), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + provided_hashes.push(provided_hash); + reveals.push((uids, values, version_key, provided_hash)); + } - // --- 5. Remove any expired commits from the front of the queue, collecting their hashes. - let mut expired_hashes = Vec::new(); - while let Some((hash, commit_block)) = commits.front() { - if Self::is_commit_expired(netuid, *commit_block) { - // Collect the expired commit hash - expired_hashes.push(*hash); - commits.pop_front(); + // --- 7. Validate all reveals first to ensure atomicity. + // This prevents partial updates if any reveal fails. + for (_uids, _values, _version_key, provided_hash) in &reveals { + // --- 7a. Check if the provided_hash is in the non-expired commits. + if !commits.iter().any(|(hash, _, _, _)| *hash == *provided_hash) { + // --- 7b. If not found, check if it matches any expired commits. + if expired_hashes.contains(provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); } else { - break; + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); } } - // --- 6. Process each reveal. - for ((uids, values), (salt, version_key)) in uids_list - .into_iter() - .zip(values_list) - .zip(salts_list.into_iter().zip(version_keys)) - { - // --- 6a. Hash the provided data. - let provided_hash: H256 = BlakeTwo256::hash_of(&( - who.clone(), - netuid, - uids.clone(), - values.clone(), - salt.clone(), - version_key, - )); - - // --- 6b. Search for the provided_hash in the non-expired commits. - if let Some(position) = commits.iter().position(|(hash, _)| *hash == provided_hash) - { - // --- 6c. Get the commit block for the commit being revealed. - let (_, commit_block) = commits - .get(position) - .ok_or(Error::::NoWeightsCommitFound)?; - - // --- 6d. Ensure the commit is ready to be revealed in the current block range. - ensure!( - Self::is_reveal_block_range(netuid, *commit_block), - Error::::RevealTooEarly - ); - - // --- 6e. Remove all commits up to and including the one being revealed. - for _ in 0..=position { - commits.pop_front(); - } - - // --- 6f. Proceed to set the revealed weights. - Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; + // --- 7c. Find the commit corresponding to the provided_hash. + let commit = commits + .iter() + .find(|(hash, _, _, _)| *hash == *provided_hash) + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 7d. Check if the commit is within the reveal window. + let current_block: u64 = Self::get_current_block_as_u64(); + let (_, _, first_reveal_block, last_reveal_block) = commit; + ensure!( + current_block >= *first_reveal_block && current_block <= *last_reveal_block, + Error::::RevealTooEarly + ); + } + + // --- 8. All reveals are valid. Proceed to remove and process each reveal. + for (uids, values, version_key, provided_hash) in reveals { + // --- 8a. Find the position of the provided_hash. + if let Some(position) = commits.iter().position(|(hash, _, _, _)| *hash == provided_hash) { + // --- 8b. Remove the commit from the queue. + commits.remove(position); + + // --- 8c. Proceed to set the revealed weights. + Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; + } else { + // This case should not occur as we've already validated the existence of the hash. + // However, to ensure safety, we handle it. + if expired_hashes.contains(&provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); } else { - // The provided_hash does not match any non-expired commits. - // Check if it matches any expired commits - if expired_hashes.contains(&provided_hash) { - return Err(Error::::ExpiredWeightCommit.into()); - } else { - return Err(Error::::InvalidRevealCommitHashNotMatch.into()); - } + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); } } + } - // --- 7. If the queue is now empty, remove the storage entry for the user. - if commits.is_empty() { - *maybe_commits = None; - } + // --- 9. If the queue is now empty, remove the storage entry for the user. + if commits.is_empty() { + *maybe_commits = None; + } - // --- 8. Return ok. - Ok(()) - }) - } + // --- 10. Return ok. + Ok(()) + }) +} /// ---- The implementation for the extrinsic set_weights. /// @@ -703,6 +738,23 @@ impl Pallet { current_epoch > commit_epoch.saturating_add(reveal_period) } + pub fn get_reveal_blocks(netuid: u16, commit_block: u64) -> (u64, u64) { + let reveal_period: u64 = Self::get_reveal_period(netuid); + let tempo: u64 = Self::get_tempo(netuid) as u64; + let tempo_plus_one: u64 = tempo.saturating_add(1); + let netuid_plus_one: u64 = (netuid as u64).saturating_add(1); + + let commit_epoch: u64 = Self::get_epoch_index(netuid, commit_block); + let reveal_epoch: u64 = commit_epoch.saturating_add(reveal_period); + + let first_reveal_block = reveal_epoch + .saturating_mul(tempo_plus_one) + .saturating_sub(netuid_plus_one); + let last_reveal_block = first_reveal_block.saturating_add(tempo); + + (first_reveal_block, last_reveal_block) + } + pub fn set_reveal_period(netuid: u16, reveal_period: u64) { RevealPeriodEpochs::::insert(netuid, reveal_period); } diff --git a/pallets/subtensor/tests/swap_hotkey.rs b/pallets/subtensor/tests/swap_hotkey.rs index bf5ecb301..ad4d0414c 100644 --- a/pallets/subtensor/tests/swap_hotkey.rs +++ b/pallets/subtensor/tests/swap_hotkey.rs @@ -351,8 +351,8 @@ fn test_swap_weight_commits() { let new_hotkey = U256::from(2); let coldkey = U256::from(3); let netuid = 0u16; - let mut weight_commits: VecDeque<(H256, u64)> = VecDeque::new(); - weight_commits.push_back((H256::from_low_u64_be(100), 200)); + let mut weight_commits: VecDeque<(H256, u64, u64, u64)> = VecDeque::new(); + weight_commits.push_back((H256::from_low_u64_be(100), 200, 1, 1)); let mut weight = Weight::zero(); add_network(netuid, 0, 1); diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index ddc2ccd77..7f6592f23 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -2432,7 +2432,7 @@ fn test_expired_commits_handling_in_commit_and_reveal() { )); // 6. Verify that the number of unrevealed, non-expired commits is now 6 - let commits: VecDeque<(H256, u64)> = + let commits: VecDeque<(H256, u64, u64, u64)> = pallet_subtensor::WeightCommits::::get(netuid, hotkey) .expect("Expected a commit"); assert_eq!(commits.len(), 6); // 5 non-expired commits from epoch 1 + new commit From 6886a6d5d5992d8d03b2319057ad2b3b483d869f Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 28 Oct 2024 14:49:58 -0700 Subject: [PATCH 180/213] update test --- pallets/subtensor/tests/weights.rs | 69 +++++++++++------------------- 1 file changed, 25 insertions(+), 44 deletions(-) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 7f6592f23..c21c754af 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -3584,7 +3584,7 @@ fn test_batch_reveal_with_out_of_order_commits() { SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); - // 1. Commit multiple times + // 1. Commit multiple times (A, B, C) let mut commit_info = Vec::new(); for i in 0..3 { let salt: Vec = vec![i as u16; 8]; @@ -3606,25 +3606,25 @@ fn test_batch_reveal_with_out_of_order_commits() { step_epochs(1, netuid); - // 2. Prepare batch reveal parameters out of order + // 2. Prepare batch reveal parameters for commits A and C (out of order) let salts_list: Vec> = vec![ - commit_info[2].1.clone(), // Third commit - commit_info[0].1.clone(), // First commit - commit_info[1].1.clone(), // Second commit + commit_info[2].1.clone(), // Third commit (C) + commit_info[0].1.clone(), // First commit (A) ]; let uids_list_out_of_order = vec![ - uids_list[2].clone(), - uids_list[0].clone(), - uids_list[1].clone(), + uids_list[2].clone(), // C + uids_list[0].clone(), // A ]; let weight_values_list_out_of_order = vec![ - weight_values_list[2].clone(), - weight_values_list[0].clone(), - weight_values_list[1].clone(), + weight_values_list[2].clone(), // C + weight_values_list[0].clone(), // A + ]; + let version_keys_out_of_order = vec![ + version_keys[2], // C + version_keys[0], // A ]; - let version_keys_out_of_order = vec![version_keys[2], version_keys[0], version_keys[1]]; - // 3. Attempt batch reveal out of order + // 3. Attempt batch reveal of A and C out of order let result = SubtensorModule::do_batch_reveal_weights( RuntimeOrigin::signed(hotkey), netuid, @@ -3634,44 +3634,25 @@ fn test_batch_reveal_with_out_of_order_commits() { version_keys_out_of_order, ); - // 4. Ensure the batch reveal fails with InvalidRevealCommitHashNotMatch - assert_err!(result, Error::::InvalidRevealCommitHashNotMatch); - - // 5. Reveal the first commit to proceed correctly - let first_salt = commit_info[0].1.clone(); - let first_uids = uids_list[0].clone(); - let first_weights = weight_values_list[0].clone(); - let first_version_key = version_keys[0]; - - assert_ok!(SubtensorModule::do_batch_reveal_weights( - RuntimeOrigin::signed(hotkey), - netuid, - vec![first_uids], - vec![first_weights], - vec![first_salt], - vec![first_version_key], - )); + // 4. Ensure the batch reveal succeeds + assert_ok!(result); - // 6. Now attempt to reveal the remaining commits in order - let remaining_salts = vec![ - commit_info[1].1.clone(), // Second commit - commit_info[2].1.clone(), // Third commit - ]; - let remaining_uids_list = vec![uids_list[1].clone(), uids_list[2].clone()]; - let remaining_weight_values_list = - vec![weight_values_list[1].clone(), weight_values_list[2].clone()]; - let remaining_version_keys = vec![version_keys[1], version_keys[2]]; + // 5. Prepare and reveal the remaining commit (B) + let remaining_salt = commit_info[1].1.clone(); + let remaining_uids = uids_list[1].clone(); + let remaining_weights = weight_values_list[1].clone(); + let remaining_version_key = version_keys[1]; assert_ok!(SubtensorModule::do_batch_reveal_weights( RuntimeOrigin::signed(hotkey), netuid, - remaining_uids_list, - remaining_weight_values_list, - remaining_salts, - remaining_version_keys, + vec![remaining_uids], + vec![remaining_weights], + vec![remaining_salt], + vec![remaining_version_key], )); - // 7. Ensure all commits are removed + // 6. Ensure all commits are removed let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); assert!(commits.is_none()); }); From 59426cd4bb264a137071bd9a477be03e4e884a95 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Mon, 28 Oct 2024 17:06:59 -0700 Subject: [PATCH 181/213] add test_get_reveal_blocks & fmt --- pallets/subtensor/src/subnets/weights.rs | 312 ++++++++++++----------- pallets/subtensor/tests/weights.rs | 124 +++++++++ 2 files changed, 283 insertions(+), 153 deletions(-) diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 8cbf7695b..44c343d69 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -212,174 +212,180 @@ impl Pallet { }) } -/// ---- The implementation for batch revealing committed weights. -/// -/// # Args: -/// * `origin`: (`::RuntimeOrigin`): -/// - The signature of the revealing hotkey. -/// -/// * `netuid` (`u16`): -/// - The u16 network identifier. -/// -/// * `uids_list` (`Vec>`): -/// - A list of uids for each set of weights being revealed. -/// -/// * `values_list` (`Vec>`): -/// - A list of values for each set of weights being revealed. -/// -/// * `salts_list` (`Vec>`): -/// - A list of salts used to generate the commit hashes. -/// -/// * `version_keys` (`Vec`): -/// - A list of network version keys. -/// -/// # Raises: -/// * `CommitRevealDisabled`: -/// - Attempting to reveal weights when the commit-reveal mechanism is disabled. -/// -/// * `NoWeightsCommitFound`: -/// - Attempting to reveal weights without an existing commit. -/// -/// * `ExpiredWeightCommit`: -/// - Attempting to reveal a weight commit that has expired. -/// -/// * `RevealTooEarly`: -/// - Attempting to reveal weights outside the valid reveal period. -/// -/// * `InvalidRevealCommitHashNotMatch`: -/// - The revealed hash does not match any committed hash. -/// -/// * `InputLengthsUnequal`: -/// - The input vectors are of mismatched lengths. -pub fn do_batch_reveal_weights( - origin: T::RuntimeOrigin, - netuid: u16, - uids_list: Vec>, - values_list: Vec>, - salts_list: Vec>, - version_keys: Vec, -) -> DispatchResult { - // --- 1. Check that the input lists are of the same length. - let num_reveals = uids_list.len(); - ensure!( - num_reveals == values_list.len() - && num_reveals == salts_list.len() - && num_reveals == version_keys.len(), - Error::::InputLengthsUnequal - ); - - // --- 2. Check the caller's signature (hotkey). - let who = ensure_signed(origin.clone())?; - - log::debug!( - "do_batch_reveal_weights( hotkey:{:?} netuid:{:?})", - who, - netuid - ); - - // --- 3. Ensure commit-reveal is enabled for the network. - ensure!( - Self::get_commit_reveal_weights_enabled(netuid), - Error::::CommitRevealDisabled - ); - - // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. - WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { - let commits = maybe_commits - .as_mut() - .ok_or(Error::::NoWeightsCommitFound)?; - - // --- 5. Remove any expired commits from the front of the queue, collecting their hashes. - let mut expired_hashes = Vec::new(); - while let Some((hash, commit_block, _, _)) = commits.front() { - if Self::is_commit_expired(netuid, *commit_block) { - // Collect the expired commit hash - expired_hashes.push(*hash); - commits.pop_front(); - } else { - break; - } - } + /// ---- The implementation for batch revealing committed weights. + /// + /// # Args: + /// * `origin`: (`::RuntimeOrigin`): + /// - The signature of the revealing hotkey. + /// + /// * `netuid` (`u16`): + /// - The u16 network identifier. + /// + /// * `uids_list` (`Vec>`): + /// - A list of uids for each set of weights being revealed. + /// + /// * `values_list` (`Vec>`): + /// - A list of values for each set of weights being revealed. + /// + /// * `salts_list` (`Vec>`): + /// - A list of salts used to generate the commit hashes. + /// + /// * `version_keys` (`Vec`): + /// - A list of network version keys. + /// + /// # Raises: + /// * `CommitRevealDisabled`: + /// - Attempting to reveal weights when the commit-reveal mechanism is disabled. + /// + /// * `NoWeightsCommitFound`: + /// - Attempting to reveal weights without an existing commit. + /// + /// * `ExpiredWeightCommit`: + /// - Attempting to reveal a weight commit that has expired. + /// + /// * `RevealTooEarly`: + /// - Attempting to reveal weights outside the valid reveal period. + /// + /// * `InvalidRevealCommitHashNotMatch`: + /// - The revealed hash does not match any committed hash. + /// + /// * `InputLengthsUnequal`: + /// - The input vectors are of mismatched lengths. + pub fn do_batch_reveal_weights( + origin: T::RuntimeOrigin, + netuid: u16, + uids_list: Vec>, + values_list: Vec>, + salts_list: Vec>, + version_keys: Vec, + ) -> DispatchResult { + // --- 1. Check that the input lists are of the same length. + let num_reveals = uids_list.len(); + ensure!( + num_reveals == values_list.len() + && num_reveals == salts_list.len() + && num_reveals == version_keys.len(), + Error::::InputLengthsUnequal + ); - // --- 6. Prepare to collect all provided hashes and their corresponding reveals. - let mut provided_hashes = Vec::new(); - let mut reveals = Vec::new(); + // --- 2. Check the caller's signature (hotkey). + let who = ensure_signed(origin.clone())?; - for ((uids, values), (salt, version_key)) in uids_list - .into_iter() - .zip(values_list) - .zip(salts_list.into_iter().zip(version_keys)) - { - // --- 6a. Hash the provided data. - let provided_hash: H256 = BlakeTwo256::hash_of(&( - who.clone(), - netuid, - uids.clone(), - values.clone(), - salt.clone(), - version_key, - )); - provided_hashes.push(provided_hash); - reveals.push((uids, values, version_key, provided_hash)); - } + log::debug!( + "do_batch_reveal_weights( hotkey:{:?} netuid:{:?})", + who, + netuid + ); - // --- 7. Validate all reveals first to ensure atomicity. - // This prevents partial updates if any reveal fails. - for (_uids, _values, _version_key, provided_hash) in &reveals { - // --- 7a. Check if the provided_hash is in the non-expired commits. - if !commits.iter().any(|(hash, _, _, _)| *hash == *provided_hash) { - // --- 7b. If not found, check if it matches any expired commits. - if expired_hashes.contains(provided_hash) { - return Err(Error::::ExpiredWeightCommit.into()); + // --- 3. Ensure commit-reveal is enabled for the network. + ensure!( + Self::get_commit_reveal_weights_enabled(netuid), + Error::::CommitRevealDisabled + ); + + // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. + WeightCommits::::try_mutate_exists(netuid, &who, |maybe_commits| -> DispatchResult { + let commits = maybe_commits + .as_mut() + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 5. Remove any expired commits from the front of the queue, collecting their hashes. + let mut expired_hashes = Vec::new(); + while let Some((hash, commit_block, _, _)) = commits.front() { + if Self::is_commit_expired(netuid, *commit_block) { + // Collect the expired commit hash + expired_hashes.push(*hash); + commits.pop_front(); } else { - return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + break; } } - // --- 7c. Find the commit corresponding to the provided_hash. - let commit = commits - .iter() - .find(|(hash, _, _, _)| *hash == *provided_hash) - .ok_or(Error::::NoWeightsCommitFound)?; + // --- 6. Prepare to collect all provided hashes and their corresponding reveals. + let mut provided_hashes = Vec::new(); + let mut reveals = Vec::new(); - // --- 7d. Check if the commit is within the reveal window. - let current_block: u64 = Self::get_current_block_as_u64(); - let (_, _, first_reveal_block, last_reveal_block) = commit; - ensure!( - current_block >= *first_reveal_block && current_block <= *last_reveal_block, - Error::::RevealTooEarly - ); - } + for ((uids, values), (salt, version_key)) in uids_list + .into_iter() + .zip(values_list) + .zip(salts_list.into_iter().zip(version_keys)) + { + // --- 6a. Hash the provided data. + let provided_hash: H256 = BlakeTwo256::hash_of(&( + who.clone(), + netuid, + uids.clone(), + values.clone(), + salt.clone(), + version_key, + )); + provided_hashes.push(provided_hash); + reveals.push((uids, values, version_key, provided_hash)); + } - // --- 8. All reveals are valid. Proceed to remove and process each reveal. - for (uids, values, version_key, provided_hash) in reveals { - // --- 8a. Find the position of the provided_hash. - if let Some(position) = commits.iter().position(|(hash, _, _, _)| *hash == provided_hash) { - // --- 8b. Remove the commit from the queue. - commits.remove(position); + // --- 7. Validate all reveals first to ensure atomicity. + // This prevents partial updates if any reveal fails. + for (_uids, _values, _version_key, provided_hash) in &reveals { + // --- 7a. Check if the provided_hash is in the non-expired commits. + if !commits + .iter() + .any(|(hash, _, _, _)| *hash == *provided_hash) + { + // --- 7b. If not found, check if it matches any expired commits. + if expired_hashes.contains(provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); + } else { + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + } + } - // --- 8c. Proceed to set the revealed weights. - Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; - } else { - // This case should not occur as we've already validated the existence of the hash. - // However, to ensure safety, we handle it. - if expired_hashes.contains(&provided_hash) { - return Err(Error::::ExpiredWeightCommit.into()); + // --- 7c. Find the commit corresponding to the provided_hash. + let commit = commits + .iter() + .find(|(hash, _, _, _)| *hash == *provided_hash) + .ok_or(Error::::NoWeightsCommitFound)?; + + // --- 7d. Check if the commit is within the reveal window. + let current_block: u64 = Self::get_current_block_as_u64(); + let (_, _, first_reveal_block, last_reveal_block) = commit; + ensure!( + current_block >= *first_reveal_block && current_block <= *last_reveal_block, + Error::::RevealTooEarly + ); + } + + // --- 8. All reveals are valid. Proceed to remove and process each reveal. + for (uids, values, version_key, provided_hash) in reveals { + // --- 8a. Find the position of the provided_hash. + if let Some(position) = commits + .iter() + .position(|(hash, _, _, _)| *hash == provided_hash) + { + // --- 8b. Remove the commit from the queue. + commits.remove(position); + + // --- 8c. Proceed to set the revealed weights. + Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; } else { - return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + // This case should not occur as we've already validated the existence of the hash. + // However, to ensure safety, we handle it. + if expired_hashes.contains(&provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); + } else { + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); + } } } - } - // --- 9. If the queue is now empty, remove the storage entry for the user. - if commits.is_empty() { - *maybe_commits = None; - } + // --- 9. If the queue is now empty, remove the storage entry for the user. + if commits.is_empty() { + *maybe_commits = None; + } - // --- 10. Return ok. - Ok(()) - }) -} + // --- 10. Return ok. + Ok(()) + }) + } /// ---- The implementation for the extrinsic set_weights. /// diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index c21c754af..0625e1e20 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -3935,3 +3935,127 @@ fn test_highly_concurrent_commits_and_reveals_with_multiple_hotkeys() { assert_eq!(SubtensorModule::get_tempo(netuid), 200); }) } + +#[test] +fn test_get_reveal_blocks() { + new_test_ext(1).execute_with(|| { + // **1. Define Test Parameters** + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let version_key: u64 = 0; + let hotkey: U256 = U256::from(1); + + // **2. Generate the Commit Hash** + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + + // **3. Initialize the Block Number to 0** + System::set_block_number(0); + + // **4. Define Network Parameters** + let tempo: u16 = 5; + add_network(netuid, tempo, 0); + + // **5. Register Neurons and Configure the Network** + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 5); + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + // **6. Commit Weights at Block 0** + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + // **7. Retrieve the Reveal Blocks Using `get_reveal_blocks`** + let (first_reveal_block, last_reveal_block) = SubtensorModule::get_reveal_blocks(netuid, 0); + + // **8. Assert Correct Calculation of Reveal Blocks** + // With tempo=5, netuid=1, reveal_period=1: + // commit_epoch = (0 + 2) / 6 = 0 + // reveal_epoch = 0 + 1 = 1 + // first_reveal_block = 1 * 6 - 2 = 4 + // last_reveal_block = 4 + 5 = 9 + assert_eq!(first_reveal_block, 4); + assert_eq!(last_reveal_block, 9); + + // **9. Attempt to Reveal Before `first_reveal_block` (Block 3)** + step_block(3); // Advance to block 3 + let result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ); + assert_err!(result, Error::::RevealTooEarly); + + // **10. Advance to `first_reveal_block` (Block 4)** + step_block(1); // Advance to block 4 + let result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ); + assert_ok!(result); + + // **11. Attempt to Reveal Again at Block 4 (Should Fail)** + let result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ); + assert_err!(result, Error::::NoWeightsCommitFound); + + // **12. Advance to After `last_reveal_block` (Block 10)** + step_block(6); // Advance from block 4 to block 10 + + // **13. Attempt to Reveal at Block 10 (Should Fail)** + let result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ); + assert_err!(result, Error::::NoWeightsCommitFound); + + // **14. Attempt to Reveal Outside of Any Reveal Window (No Commit)** + let result = SubtensorModule::reveal_weights( + RuntimeOrigin::signed(hotkey), + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + ); + assert_err!(result, Error::::NoWeightsCommitFound); + + // **15. Verify that All Commits Have Been Removed from Storage** + let commits = pallet_subtensor::WeightCommits::::get(netuid, hotkey); + assert!( + commits.is_none(), + "Commits should be cleared after successful reveal" + ); + }) +} From 25a26e88e528d5225c22113f184303a9c86516a0 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Tue, 29 Oct 2024 03:28:07 -0700 Subject: [PATCH 182/213] update doc --- pallets/subtensor/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 9a6d60f8f..328a59f7e 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -1251,7 +1251,7 @@ pub mod pallet { /// ITEM( weights_min_stake ) pub type WeightsMinStake = StorageValue<_, u64, ValueQuery, DefaultWeightsMinStake>; #[pallet::storage] - /// --- MAP (netuid, who) --> VecDeque<(hash, commit_block)> | Stores a queue of commits for an account on a given netuid. + /// --- MAP (netuid, who) --> VecDeque<(hash, commit_block, first_reveal_block, last_reveal_block)> | Stores a queue of commits for an account on a given netuid. pub type WeightCommits = StorageDoubleMap< _, Twox64Concat, From 71d7c7095be6023432e3cec3f13a657b73deb5d2 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 29 Oct 2024 11:58:49 -0400 Subject: [PATCH 183/213] Resolve merge conflicts around stake delta feature --- .../subtensor/src/coinbase/run_coinbase.rs | 14 ++++++++++ pallets/subtensor/src/lib.rs | 10 +++---- pallets/subtensor/src/swap/swap_hotkey.rs | 28 +++++++++++++++++-- 3 files changed, 44 insertions(+), 8 deletions(-) diff --git a/pallets/subtensor/src/coinbase/run_coinbase.rs b/pallets/subtensor/src/coinbase/run_coinbase.rs index 723edc423..db86c7c58 100644 --- a/pallets/subtensor/src/coinbase/run_coinbase.rs +++ b/pallets/subtensor/src/coinbase/run_coinbase.rs @@ -382,4 +382,18 @@ impl Pallet { let remainder = block_plus_netuid.rem_euclid(tempo_plus_one); (tempo as u64).saturating_sub(remainder) } + + /// Calculates the nonviable stake for a nominator. + /// The nonviable stake is the stake that was added by the nominator since the last emission drain. + /// This stake will not receive emission until the next emission drain. + /// Note: if the stake delta is below zero, we return zero. We don't allow more stake than the nominator has. + pub fn get_nonviable_stake(hotkey: &T::AccountId, nominator: &T::AccountId) -> u64 { + let stake_delta = StakeDeltaSinceLastEmissionDrain::::get(hotkey, nominator); + if stake_delta.is_negative() { + 0 + } else { + // Should never fail the into, but we handle it anyway. + stake_delta.try_into().unwrap_or(u64::MAX) + } + } } diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 5081d69d3..9fb2adeb7 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -791,17 +791,17 @@ pub mod pallet { DefaultAccumulatedEmission, >; #[pallet::storage] - /// Map ( hot, cold ) --> block_number | Last add stake increase. - pub type LastAddStakeIncrease = StorageDoubleMap< + /// Map ( hot, cold ) --> stake: i128 | Stake added/removed since last emission drain. + pub type StakeDeltaSinceLastEmissionDrain = StorageDoubleMap< _, Blake2_128Concat, T::AccountId, Identity, T::AccountId, - u64, + i128, ValueQuery, - DefaultAccountTake, - >; + DefaultStakeDelta, + >; #[pallet::storage] /// DMAP ( parent, netuid ) --> Vec<(proportion,child)> pub type ChildKeys = StorageDoubleMap< diff --git a/pallets/subtensor/src/swap/swap_hotkey.rs b/pallets/subtensor/src/swap/swap_hotkey.rs index ca3d0b5a7..cf2ace997 100644 --- a/pallets/subtensor/src/swap/swap_hotkey.rs +++ b/pallets/subtensor/src/swap/swap_hotkey.rs @@ -206,7 +206,16 @@ impl Pallet { Delegates::::insert(new_hotkey, old_delegate_take); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); } - // 9. Swap all subnet specific info. + + // 9. swap PendingdHotkeyEmission + if PendingdHotkeyEmission::::contains_key(old_hotkey) { + let old_pending_hotkey_emission = PendingdHotkeyEmission::::get(old_hotkey); + PendingdHotkeyEmission::::remove(old_hotkey); + PendingdHotkeyEmission::::insert(new_hotkey, old_pending_hotkey_emission); + weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); + } + + // 10. Swap all subnet specific info. let all_netuids: Vec = Self::get_all_subnet_netuids(); for netuid in all_netuids { // 9.1 Remove the previous hotkey and insert the new hotkey from membership. @@ -226,7 +235,7 @@ impl Pallet { Uids::::insert(netuid, new_hotkey, old_uid); weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); - // 9.2.2 Swap the keys. + // 10.2.2 Swap the keys. Keys::::insert(netuid, old_uid, new_hotkey.clone()); weight.saturating_accrue(T::DbWeight::get().reads_writes(0, 1)); } @@ -277,7 +286,7 @@ impl Pallet { } } - // 9.7. Swap neuron TLS certificates. + // 10.7. Swap neuron TLS certificates. // NeuronCertificates( netuid, hotkey ) -> Vec -- the neuron certificate for the hotkey. if is_network_member { if let Ok(old_neuron_certificates) = @@ -355,6 +364,19 @@ impl Pallet { } } + // 14. Swap Stake Delta for all coldkeys. + for (coldkey, stake_delta) in StakeDeltaSinceLastEmissionDrain::::iter_prefix(old_hotkey) + { + let new_stake_delta = StakeDeltaSinceLastEmissionDrain::::get(new_hotkey, &coldkey); + StakeDeltaSinceLastEmissionDrain::::insert( + new_hotkey, + &coldkey, + new_stake_delta.saturating_add(stake_delta), + ); + StakeDeltaSinceLastEmissionDrain::::remove(old_hotkey, &coldkey); + weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); + } + // Return successful after swapping all the relevant terms. Ok(()) } From 0b117650bcaf4c69dd628abd299d69afc0eb5ba9 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 29 Oct 2024 12:10:27 -0400 Subject: [PATCH 184/213] Make testnet compatible with mainnet --- Cargo.lock | 93 ++++++++++--------- .../subtensor/src/coinbase/run_coinbase.rs | 50 +++++----- pallets/subtensor/src/lib.rs | 5 + pallets/subtensor/src/staking/add_stake.rs | 6 +- 4 files changed, 81 insertions(+), 73 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0902ad6de..ade99b423 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2357,6 +2357,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" + [[package]] name = "foreign-types" version = "0.3.2" @@ -3071,6 +3077,11 @@ name = "hashbrown" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] name = "hashlink" @@ -3282,9 +3293,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", @@ -3318,15 +3329,15 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-util", "http 1.1.0", "http-body 1.0.1", - "hyper 1.4.1", + "hyper 1.5.0", "pin-project-lite", "tokio", "tower-service", @@ -3650,9 +3661,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.24.5" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126b48a5acc3c52fbd5381a77898cb60e145123179588a29e7ac48f9c06e401b" +checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" dependencies = [ "jsonrpsee-core", "jsonrpsee-proc-macros", @@ -3664,9 +3675,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.5" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0e503369a76e195b65af35058add0e6900b794a4e9a9316900ddd3a87a80477" +checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" dependencies = [ "async-trait", "bytes", @@ -3687,9 +3698,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.5" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc660a9389e2748e794a40673a4155d501f32db667757cdb80edeff0306b489b" +checksum = "c06c01ae0007548e73412c08e2285ffe5d723195bf268bce67b1b77c3bb2a14d" dependencies = [ "heck 0.5.0", "proc-macro-crate 3.2.0", @@ -3700,15 +3711,15 @@ dependencies = [ [[package]] name = "jsonrpsee-server" -version = "0.24.5" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af6e6c9b6d975edcb443565d648b605f3e85a04ec63aa6941811a8894cc9cded" +checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.0", "hyper-util", "jsonrpsee-core", "jsonrpsee-types", @@ -3727,9 +3738,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.5" +version = "0.24.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8fb16314327cbc94fdf7965ef7e4422509cd5597f76d137bd104eb34aeede67" +checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" dependencies = [ "http 1.1.0", "serde", @@ -3908,7 +3919,7 @@ dependencies = [ "libp2p-identity", "log", "multiaddr 0.18.2", - "multihash 0.19.1", + "multihash 0.19.2", "multistream-select", "once_cell", "parking_lot 0.12.3", @@ -3953,7 +3964,7 @@ dependencies = [ "libp2p-identity", "libp2p-swarm", "log", - "lru 0.12.4", + "lru 0.12.5", "quick-protobuf", "quick-protobuf-codec", "smallvec", @@ -3970,7 +3981,7 @@ dependencies = [ "bs58 0.5.1", "ed25519-dalek", "hkdf", - "multihash 0.19.1", + "multihash 0.19.2", "quick-protobuf", "rand", "sha2 0.10.8", @@ -4059,7 +4070,7 @@ dependencies = [ "libp2p-identity", "log", "multiaddr 0.18.2", - "multihash 0.19.1", + "multihash 0.19.2", "once_cell", "quick-protobuf", "rand", @@ -4491,11 +4502,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.0", ] [[package]] @@ -4827,7 +4838,7 @@ dependencies = [ "data-encoding", "libp2p-identity", "multibase", - "multihash 0.19.1", + "multihash 0.19.2", "percent-encoding", "serde", "static_assertions", @@ -4882,12 +4893,12 @@ dependencies = [ [[package]] name = "multihash" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" +checksum = "cc41f430805af9d1cf4adae4ed2149c759b877b01d909a1f40256188d09345d2" dependencies = [ "core2", - "unsigned-varint 0.7.2", + "unsigned-varint 0.8.0", ] [[package]] @@ -4910,12 +4921,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "multimap" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" - [[package]] name = "multistream-select" version = "0.13.0" @@ -5377,9 +5382,9 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -5409,18 +5414,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.3.2+3.3.2" +version = "300.4.0+3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a211a18d945ef7e648cc6e0058f4c548ee46aab922ea203e0d30e966ea23647b" +checksum = "a709e02f2b4aca747929cca5ed248880847c650233cf8b8cdc48f40aaf4898a6" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -6527,7 +6532,7 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap 0.8.3", + "multimap", "petgraph", "prettyplease 0.1.25", "prost 0.11.9", @@ -6548,7 +6553,7 @@ dependencies = [ "heck 0.5.0", "itertools 0.12.1", "log", - "multimap 0.10.0", + "multimap", "once_cell", "petgraph", "prettyplease 0.2.22", @@ -7913,7 +7918,7 @@ dependencies = [ "litep2p", "log", "multiaddr 0.18.2", - "multihash 0.19.1", + "multihash 0.19.2", "rand", "thiserror", "zeroize", @@ -8025,7 +8030,7 @@ dependencies = [ "governor", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.0", "ip_network", "jsonrpsee", "log", @@ -9830,7 +9835,7 @@ version = "0.17.0" source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=v1.16.0-rc1#a427d8fb677b62635dfb78a6e530facdd2c362ec" dependencies = [ "http-body-util", - "hyper 1.4.1", + "hyper 1.5.0", "hyper-util", "log", "prometheus", diff --git a/pallets/subtensor/src/coinbase/run_coinbase.rs b/pallets/subtensor/src/coinbase/run_coinbase.rs index db86c7c58..badb811fa 100644 --- a/pallets/subtensor/src/coinbase/run_coinbase.rs +++ b/pallets/subtensor/src/coinbase/run_coinbase.rs @@ -265,69 +265,65 @@ impl Pallet { // --- 1.0 Drain the hotkey emission. PendingdHotkeyEmission::::insert(hotkey, 0); - // --- 2 Retrieve the last time this hotkey's emissions were drained. - let last_emission_drain: u64 = LastHotkeyEmissionDrain::::get(hotkey); - - // --- 3 Update the block value to the current block number. + // --- 2 Update the block value to the current block number. LastHotkeyEmissionDrain::::insert(hotkey, block_number); - // --- 4 Retrieve the total stake for the hotkey from all nominations. + // --- 3 Retrieve the total stake for the hotkey from all nominations. let total_hotkey_stake: u64 = Self::get_total_stake_for_hotkey(hotkey); - // --- 5 Calculate the emission take for the hotkey. + // --- 4 Calculate the emission take for the hotkey. let take_proportion: I64F64 = I64F64::from_num(Delegates::::get(hotkey)) .saturating_div(I64F64::from_num(u16::MAX)); let hotkey_take: u64 = (take_proportion.saturating_mul(I64F64::from_num(emission))).to_num::(); - // --- 6 Compute the remaining emission after deducting the hotkey's take. + // --- 5 Compute the remaining emission after deducting the hotkey's take. let emission_minus_take: u64 = emission.saturating_sub(hotkey_take); - // --- 7 Calculate the remaining emission after the hotkey's take. + // --- 6 Calculate the remaining emission after the hotkey's take. let mut remainder: u64 = emission_minus_take; - // --- 8 Iterate over each nominator and get all viable stake. + // --- 7 Iterate over each nominator and get all viable stake. let mut total_viable_nominator_stake: u64 = total_hotkey_stake; - for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { - if LastAddStakeIncrease::::get(hotkey, nominator) > last_emission_drain { - total_viable_nominator_stake = - total_viable_nominator_stake.saturating_sub(nominator_stake); - } + for (nominator, _) in Stake::::iter_prefix(hotkey) { + let nonviable_nomintaor_stake = Self::get_nonviable_stake(hotkey, &nominator); + + total_viable_nominator_stake = + total_viable_nominator_stake.saturating_sub(nonviable_nomintaor_stake); } - // --- 9 Iterate over each nominator. + // --- 8 Iterate over each nominator. if total_viable_nominator_stake != 0 { for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { - // --- 10 Check if the stake was manually increased by the user since the last emission drain for this hotkey. + // --- 9 Check if the stake was manually increased by the user since the last emission drain for this hotkey. // If it was, skip this nominator as they will not receive their proportion of the emission. - if LastAddStakeIncrease::::get(hotkey, nominator.clone()) > last_emission_drain { - continue; - } + let viable_nominator_stake = + nominator_stake.saturating_sub(Self::get_nonviable_stake(hotkey, &nominator)); - // --- 11 Calculate this nominator's share of the emission. - let nominator_emission: I64F64 = I64F64::from_num(emission_minus_take) - .saturating_mul(I64F64::from_num(nominator_stake)) + // --- 10 Calculate this nominator's share of the emission. + let nominator_emission: I64F64 = I64F64::from_num(viable_nominator_stake) .checked_div(I64F64::from_num(total_viable_nominator_stake)) - .unwrap_or(I64F64::from_num(0)); + .unwrap_or(I64F64::from_num(0)) + .saturating_mul(I64F64::from_num(emission_minus_take)); - // --- 12 Increase the stake for the nominator. + // --- 11 Increase the stake for the nominator. Self::increase_stake_on_coldkey_hotkey_account( &nominator, hotkey, nominator_emission.to_num::(), ); - // --- 13* Record event and Subtract the nominator's emission from the remainder. + // --- 12* Record event and Subtract the nominator's emission from the remainder. total_new_tao = total_new_tao.saturating_add(nominator_emission.to_num::()); remainder = remainder.saturating_sub(nominator_emission.to_num::()); } } - // --- 14 Finally, add the stake to the hotkey itself, including its take and the remaining emission. + // --- 13 Finally, add the stake to the hotkey itself, including its take and the remaining emission. let hotkey_new_tao: u64 = hotkey_take.saturating_add(remainder); Self::increase_stake_on_hotkey_account(hotkey, hotkey_new_tao); - // --- 15 Record new tao creation event and return the amount created. + // --- 14 Record new tao creation event and return the amount created. total_new_tao = total_new_tao.saturating_add(hotkey_new_tao); total_new_tao } diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 9fb2adeb7..fa24ccc8d 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -259,6 +259,11 @@ pub mod pallet { 0 } #[pallet::type_value] + /// Default stake delta. + pub fn DefaultStakeDelta() -> i128 { + 0 + } + #[pallet::type_value] /// Default stakes per interval. pub fn DefaultStakesPerInterval() -> (u64, u64) { (0, 0) diff --git a/pallets/subtensor/src/staking/add_stake.rs b/pallets/subtensor/src/staking/add_stake.rs index c9cbd7e04..72d8374bc 100644 --- a/pallets/subtensor/src/staking/add_stake.rs +++ b/pallets/subtensor/src/staking/add_stake.rs @@ -70,8 +70,10 @@ impl Pallet { Error::::StakeRateLimitExceeded ); - // Set the last time the stake increased for nominator drain protection. - LastAddStakeIncrease::::insert(&hotkey, &coldkey, Self::get_current_block_as_u64()); + // Track this addition in the stake delta. + StakeDeltaSinceLastEmissionDrain::::mutate(&hotkey, &coldkey, |stake_delta| { + *stake_delta = stake_delta.saturating_add_unsigned(stake_to_be_added as u128); + }); // If coldkey is not owner of the hotkey, it's a nomination stake. if !Self::coldkey_owns_hotkey(&coldkey, &hotkey) { From 2488fd23d3336e8691f8cdbb5aa212573db8c217 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 29 Oct 2024 12:11:45 -0400 Subject: [PATCH 185/213] Format --- pallets/subtensor/src/lib.rs | 4 ++-- pallets/subtensor/src/swap/swap_hotkey.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index fa24ccc8d..8f2910222 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -262,7 +262,7 @@ pub mod pallet { /// Default stake delta. pub fn DefaultStakeDelta() -> i128 { 0 - } + } #[pallet::type_value] /// Default stakes per interval. pub fn DefaultStakesPerInterval() -> (u64, u64) { @@ -806,7 +806,7 @@ pub mod pallet { i128, ValueQuery, DefaultStakeDelta, - >; + >; #[pallet::storage] /// DMAP ( parent, netuid ) --> Vec<(proportion,child)> pub type ChildKeys = StorageDoubleMap< diff --git a/pallets/subtensor/src/swap/swap_hotkey.rs b/pallets/subtensor/src/swap/swap_hotkey.rs index cf2ace997..2feff6289 100644 --- a/pallets/subtensor/src/swap/swap_hotkey.rs +++ b/pallets/subtensor/src/swap/swap_hotkey.rs @@ -214,7 +214,7 @@ impl Pallet { PendingdHotkeyEmission::::insert(new_hotkey, old_pending_hotkey_emission); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); } - + // 10. Swap all subnet specific info. let all_netuids: Vec = Self::get_all_subnet_netuids(); for netuid in all_netuids { From d81cbaa2b044ab1c8ce571def7015fa018b22267 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Tue, 29 Oct 2024 15:32:06 -0400 Subject: [PATCH 186/213] Make devnet-ready mergable with main --- .../subtensor/src/coinbase/run_coinbase.rs | 64 +++++++++++-------- pallets/subtensor/src/lib.rs | 13 ++-- pallets/subtensor/src/staking/add_stake.rs | 6 +- pallets/subtensor/src/swap/swap_hotkey.rs | 48 ++++++++++---- 4 files changed, 85 insertions(+), 46 deletions(-) diff --git a/pallets/subtensor/src/coinbase/run_coinbase.rs b/pallets/subtensor/src/coinbase/run_coinbase.rs index 723edc423..badb811fa 100644 --- a/pallets/subtensor/src/coinbase/run_coinbase.rs +++ b/pallets/subtensor/src/coinbase/run_coinbase.rs @@ -265,69 +265,65 @@ impl Pallet { // --- 1.0 Drain the hotkey emission. PendingdHotkeyEmission::::insert(hotkey, 0); - // --- 2 Retrieve the last time this hotkey's emissions were drained. - let last_emission_drain: u64 = LastHotkeyEmissionDrain::::get(hotkey); - - // --- 3 Update the block value to the current block number. + // --- 2 Update the block value to the current block number. LastHotkeyEmissionDrain::::insert(hotkey, block_number); - // --- 4 Retrieve the total stake for the hotkey from all nominations. + // --- 3 Retrieve the total stake for the hotkey from all nominations. let total_hotkey_stake: u64 = Self::get_total_stake_for_hotkey(hotkey); - // --- 5 Calculate the emission take for the hotkey. + // --- 4 Calculate the emission take for the hotkey. let take_proportion: I64F64 = I64F64::from_num(Delegates::::get(hotkey)) .saturating_div(I64F64::from_num(u16::MAX)); let hotkey_take: u64 = (take_proportion.saturating_mul(I64F64::from_num(emission))).to_num::(); - // --- 6 Compute the remaining emission after deducting the hotkey's take. + // --- 5 Compute the remaining emission after deducting the hotkey's take. let emission_minus_take: u64 = emission.saturating_sub(hotkey_take); - // --- 7 Calculate the remaining emission after the hotkey's take. + // --- 6 Calculate the remaining emission after the hotkey's take. let mut remainder: u64 = emission_minus_take; - // --- 8 Iterate over each nominator and get all viable stake. + // --- 7 Iterate over each nominator and get all viable stake. let mut total_viable_nominator_stake: u64 = total_hotkey_stake; - for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { - if LastAddStakeIncrease::::get(hotkey, nominator) > last_emission_drain { - total_viable_nominator_stake = - total_viable_nominator_stake.saturating_sub(nominator_stake); - } + for (nominator, _) in Stake::::iter_prefix(hotkey) { + let nonviable_nomintaor_stake = Self::get_nonviable_stake(hotkey, &nominator); + + total_viable_nominator_stake = + total_viable_nominator_stake.saturating_sub(nonviable_nomintaor_stake); } - // --- 9 Iterate over each nominator. + // --- 8 Iterate over each nominator. if total_viable_nominator_stake != 0 { for (nominator, nominator_stake) in Stake::::iter_prefix(hotkey) { - // --- 10 Check if the stake was manually increased by the user since the last emission drain for this hotkey. + // --- 9 Check if the stake was manually increased by the user since the last emission drain for this hotkey. // If it was, skip this nominator as they will not receive their proportion of the emission. - if LastAddStakeIncrease::::get(hotkey, nominator.clone()) > last_emission_drain { - continue; - } + let viable_nominator_stake = + nominator_stake.saturating_sub(Self::get_nonviable_stake(hotkey, &nominator)); - // --- 11 Calculate this nominator's share of the emission. - let nominator_emission: I64F64 = I64F64::from_num(emission_minus_take) - .saturating_mul(I64F64::from_num(nominator_stake)) + // --- 10 Calculate this nominator's share of the emission. + let nominator_emission: I64F64 = I64F64::from_num(viable_nominator_stake) .checked_div(I64F64::from_num(total_viable_nominator_stake)) - .unwrap_or(I64F64::from_num(0)); + .unwrap_or(I64F64::from_num(0)) + .saturating_mul(I64F64::from_num(emission_minus_take)); - // --- 12 Increase the stake for the nominator. + // --- 11 Increase the stake for the nominator. Self::increase_stake_on_coldkey_hotkey_account( &nominator, hotkey, nominator_emission.to_num::(), ); - // --- 13* Record event and Subtract the nominator's emission from the remainder. + // --- 12* Record event and Subtract the nominator's emission from the remainder. total_new_tao = total_new_tao.saturating_add(nominator_emission.to_num::()); remainder = remainder.saturating_sub(nominator_emission.to_num::()); } } - // --- 14 Finally, add the stake to the hotkey itself, including its take and the remaining emission. + // --- 13 Finally, add the stake to the hotkey itself, including its take and the remaining emission. let hotkey_new_tao: u64 = hotkey_take.saturating_add(remainder); Self::increase_stake_on_hotkey_account(hotkey, hotkey_new_tao); - // --- 15 Record new tao creation event and return the amount created. + // --- 14 Record new tao creation event and return the amount created. total_new_tao = total_new_tao.saturating_add(hotkey_new_tao); total_new_tao } @@ -382,4 +378,18 @@ impl Pallet { let remainder = block_plus_netuid.rem_euclid(tempo_plus_one); (tempo as u64).saturating_sub(remainder) } + + /// Calculates the nonviable stake for a nominator. + /// The nonviable stake is the stake that was added by the nominator since the last emission drain. + /// This stake will not receive emission until the next emission drain. + /// Note: if the stake delta is below zero, we return zero. We don't allow more stake than the nominator has. + pub fn get_nonviable_stake(hotkey: &T::AccountId, nominator: &T::AccountId) -> u64 { + let stake_delta = StakeDeltaSinceLastEmissionDrain::::get(hotkey, nominator); + if stake_delta.is_negative() { + 0 + } else { + // Should never fail the into, but we handle it anyway. + stake_delta.try_into().unwrap_or(u64::MAX) + } + } } diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 5081d69d3..8f2910222 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -259,6 +259,11 @@ pub mod pallet { 0 } #[pallet::type_value] + /// Default stake delta. + pub fn DefaultStakeDelta() -> i128 { + 0 + } + #[pallet::type_value] /// Default stakes per interval. pub fn DefaultStakesPerInterval() -> (u64, u64) { (0, 0) @@ -791,16 +796,16 @@ pub mod pallet { DefaultAccumulatedEmission, >; #[pallet::storage] - /// Map ( hot, cold ) --> block_number | Last add stake increase. - pub type LastAddStakeIncrease = StorageDoubleMap< + /// Map ( hot, cold ) --> stake: i128 | Stake added/removed since last emission drain. + pub type StakeDeltaSinceLastEmissionDrain = StorageDoubleMap< _, Blake2_128Concat, T::AccountId, Identity, T::AccountId, - u64, + i128, ValueQuery, - DefaultAccountTake, + DefaultStakeDelta, >; #[pallet::storage] /// DMAP ( parent, netuid ) --> Vec<(proportion,child)> diff --git a/pallets/subtensor/src/staking/add_stake.rs b/pallets/subtensor/src/staking/add_stake.rs index c9cbd7e04..72d8374bc 100644 --- a/pallets/subtensor/src/staking/add_stake.rs +++ b/pallets/subtensor/src/staking/add_stake.rs @@ -70,8 +70,10 @@ impl Pallet { Error::::StakeRateLimitExceeded ); - // Set the last time the stake increased for nominator drain protection. - LastAddStakeIncrease::::insert(&hotkey, &coldkey, Self::get_current_block_as_u64()); + // Track this addition in the stake delta. + StakeDeltaSinceLastEmissionDrain::::mutate(&hotkey, &coldkey, |stake_delta| { + *stake_delta = stake_delta.saturating_add_unsigned(stake_to_be_added as u128); + }); // If coldkey is not owner of the hotkey, it's a nomination stake. if !Self::coldkey_owns_hotkey(&coldkey, &hotkey) { diff --git a/pallets/subtensor/src/swap/swap_hotkey.rs b/pallets/subtensor/src/swap/swap_hotkey.rs index ca3d0b5a7..efa6c53f8 100644 --- a/pallets/subtensor/src/swap/swap_hotkey.rs +++ b/pallets/subtensor/src/swap/swap_hotkey.rs @@ -206,33 +206,42 @@ impl Pallet { Delegates::::insert(new_hotkey, old_delegate_take); weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); } - // 9. Swap all subnet specific info. + + // 9. swap PendingdHotkeyEmission + if PendingdHotkeyEmission::::contains_key(old_hotkey) { + let old_pending_hotkey_emission = PendingdHotkeyEmission::::get(old_hotkey); + PendingdHotkeyEmission::::remove(old_hotkey); + PendingdHotkeyEmission::::insert(new_hotkey, old_pending_hotkey_emission); + weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); + } + + // 10. Swap all subnet specific info. let all_netuids: Vec = Self::get_all_subnet_netuids(); for netuid in all_netuids { - // 9.1 Remove the previous hotkey and insert the new hotkey from membership. + // 10.1 Remove the previous hotkey and insert the new hotkey from membership. // IsNetworkMember( hotkey, netuid ) -> bool -- is the hotkey a subnet member. let is_network_member: bool = IsNetworkMember::::get(old_hotkey, netuid); IsNetworkMember::::remove(old_hotkey, netuid); IsNetworkMember::::insert(new_hotkey, netuid, is_network_member); weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); - // 9.2 Swap Uids + Keys. + // 10.2 Swap Uids + Keys. // Keys( netuid, hotkey ) -> uid -- the uid the hotkey has in the network if it is a member. // Uids( netuid, hotkey ) -> uid -- the uids that the hotkey has. if is_network_member { - // 9.2.1 Swap the UIDS + // 10.2.1 Swap the UIDS if let Ok(old_uid) = Uids::::try_get(netuid, old_hotkey) { Uids::::remove(netuid, old_hotkey); Uids::::insert(netuid, new_hotkey, old_uid); weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 2)); - // 9.2.2 Swap the keys. + // 10.2.2 Swap the keys. Keys::::insert(netuid, old_uid, new_hotkey.clone()); weight.saturating_accrue(T::DbWeight::get().reads_writes(0, 1)); } } - // 9.3 Swap Prometheus. + // 10.3 Swap Prometheus. // Prometheus( netuid, hotkey ) -> prometheus -- the prometheus data that a hotkey has in the network. if is_network_member { if let Ok(old_prometheus_info) = Prometheus::::try_get(netuid, old_hotkey) { @@ -242,7 +251,7 @@ impl Pallet { } } - // 9.4. Swap axons. + // 10.4. Swap axons. // Axons( netuid, hotkey ) -> axon -- the axon that the hotkey has. if is_network_member { if let Ok(old_axon_info) = Axons::::try_get(netuid, old_hotkey) { @@ -252,7 +261,7 @@ impl Pallet { } } - // 9.5 Swap WeightCommits + // 10.5 Swap WeightCommits // WeightCommits( hotkey ) --> Vec -- the weight commits for the hotkey. if is_network_member { if let Ok(old_weight_commits) = WeightCommits::::try_get(netuid, old_hotkey) { @@ -262,7 +271,7 @@ impl Pallet { } } - // 9.6. Swap the subnet loaded emission. + // 10.6. Swap the subnet loaded emission. // LoadedEmission( netuid ) --> Vec<(hotkey, u64)> -- the loaded emission for the subnet. if is_network_member { if let Some(mut old_loaded_emission) = LoadedEmission::::get(netuid) { @@ -277,7 +286,7 @@ impl Pallet { } } - // 9.7. Swap neuron TLS certificates. + // 10.7. Swap neuron TLS certificates. // NeuronCertificates( netuid, hotkey ) -> Vec -- the neuron certificate for the hotkey. if is_network_member { if let Ok(old_neuron_certificates) = @@ -290,7 +299,7 @@ impl Pallet { } } - // 10. Swap Stake. + // 11. Swap Stake. // Stake( hotkey, coldkey ) -> stake -- the stake that the hotkey controls on behalf of the coldkey. let stakes: Vec<(T::AccountId, u64)> = Stake::::iter_prefix(old_hotkey).collect(); // Clear the entire old prefix here. @@ -320,7 +329,7 @@ impl Pallet { weight.saturating_accrue(T::DbWeight::get().reads_writes(1, 1)); } - // 11. Swap ChildKeys. + // 12. Swap ChildKeys. // ChildKeys( parent, netuid ) --> Vec<(proportion,child)> -- the child keys of the parent. for netuid in Self::get_all_subnet_netuids() { // Get the children of the old hotkey for this subnet @@ -331,7 +340,7 @@ impl Pallet { ChildKeys::::insert(new_hotkey, netuid, my_children); } - // 12. Swap ParentKeys. + // 13. Swap ParentKeys. // ParentKeys( child, netuid ) --> Vec<(proportion,parent)> -- the parent keys of the child. for netuid in Self::get_all_subnet_netuids() { // Get the parents of the old hotkey for this subnet @@ -355,6 +364,19 @@ impl Pallet { } } + // 14. Swap Stake Delta for all coldkeys. + for (coldkey, stake_delta) in StakeDeltaSinceLastEmissionDrain::::iter_prefix(old_hotkey) + { + let new_stake_delta = StakeDeltaSinceLastEmissionDrain::::get(new_hotkey, &coldkey); + StakeDeltaSinceLastEmissionDrain::::insert( + new_hotkey, + &coldkey, + new_stake_delta.saturating_add(stake_delta), + ); + StakeDeltaSinceLastEmissionDrain::::remove(old_hotkey, &coldkey); + weight.saturating_accrue(T::DbWeight::get().reads_writes(2, 2)); + } + // Return successful after swapping all the relevant terms. Ok(()) } From 653ba75dde85712f140956eeefb2a2fec29e9451 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:14:22 -0400 Subject: [PATCH 187/213] fix cargo audit label --- .github/workflows/check-rust.yml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index e42e369cf..4ebdf6d56 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -212,16 +212,7 @@ jobs: cargo-audit: name: cargo audit runs-on: SubtensorCI - strategy: - matrix: - rust-branch: - - stable - rust-target: - - x86_64-unknown-linux-gnu - # - x86_64-apple-darwin - os: - - ubuntu-latest - # - macos-latest + if: ${{ github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'audit')) }} steps: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 From f94b4be77997045e9755d123cbef8d905fa606f4 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:20:54 -0400 Subject: [PATCH 188/213] fix --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 4ebdf6d56..6723a8d73 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -212,7 +212,7 @@ jobs: cargo-audit: name: cargo audit runs-on: SubtensorCI - if: ${{ github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'audit')) }} + if: ${{ github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'skip-cargo-audit')) }} steps: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 From c64caf1a556356c3ab94474219de29bddad916e3 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:29:45 -0400 Subject: [PATCH 189/213] bump spec version --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 6990cb907..54de0e56c 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -146,7 +146,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 197, + spec_version: 203, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, From 05de4a511edee9ca8a2c63474db1777bb8b58f68 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:51:48 -0400 Subject: [PATCH 190/213] fix toolchain --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 6723a8d73..5fe92f607 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -225,7 +225,7 @@ jobs: - name: Install Rust ${{ matrix.rust-branch }} uses: actions-rs/toolchain@v1.0.6 with: - toolchain: ${{ matrix.rust-branch }} + toolchain: stable components: rustfmt, clippy profile: minimal From beca39b4721c6cb9210d4f62b923c9684e746124 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:53:38 -0400 Subject: [PATCH 191/213] bump CI From 50f8a9dbcfea2257a6ae210db59f1f9b5cbaf7e4 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 16:56:12 -0400 Subject: [PATCH 192/213] fix --- .github/workflows/check-rust.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 5fe92f607..116ffe282 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -222,7 +222,7 @@ jobs: sudo apt-get update && sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler - - name: Install Rust ${{ matrix.rust-branch }} + - name: Install Rust Stable uses: actions-rs/toolchain@v1.0.6 with: toolchain: stable @@ -232,7 +232,7 @@ jobs: - name: Utilize Shared Rust Cache uses: Swatinem/rust-cache@v2.2.1 with: - key: ${{ matrix.os }}-${{ env.RUST_BIN_DIR }} + key: ubuntu-latest-${{ env.RUST_BIN_DIR }} - name: Install cargo-audit run: cargo install cargo-audit From f37dd7a83be6f34786e012f16694e673db6d90d9 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 17:44:41 -0400 Subject: [PATCH 193/213] hotfix --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 116ffe282..03dc22496 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -212,7 +212,7 @@ jobs: cargo-audit: name: cargo audit runs-on: SubtensorCI - if: ${{ github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'skip-cargo-audit')) }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-cargo-audit') }} steps: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 From a389bcde2956bf578e6fc33b104b2b7c7abe8789 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 17:52:02 -0400 Subject: [PATCH 194/213] bump CI From 69b7ac85e78fc3fc313bbb6bf320c5876b6aeda1 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 29 Oct 2024 17:56:13 -0400 Subject: [PATCH 195/213] fix again --- .github/workflows/check-rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-rust.yml b/.github/workflows/check-rust.yml index 03dc22496..b6a627314 100644 --- a/.github/workflows/check-rust.yml +++ b/.github/workflows/check-rust.yml @@ -212,7 +212,7 @@ jobs: cargo-audit: name: cargo audit runs-on: SubtensorCI - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-cargo-audit') }} + if: ${{ github.event_name != 'push' && !contains(github.event.pull_request.labels.*.name, 'skip-cargo-audit') }} steps: - name: Check-out repositoroy under $GITHUB_WORKSPACE uses: actions/checkout@v4 From a82a1f89ff81e9a412e4654974c8dd8b9434f738 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Tue, 29 Oct 2024 17:19:14 -0700 Subject: [PATCH 196/213] disable set weights rate limit for commit-reveal --- pallets/subtensor/src/subnets/weights.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 44c343d69..a449b8316 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -505,10 +505,12 @@ impl Pallet { // --- 9. Ensure the uid is not setting weights faster than the weights_set_rate_limit. let neuron_uid = Self::get_uid_for_net_and_hotkey(netuid, &hotkey)?; let current_block: u64 = Self::get_current_block_as_u64(); - ensure!( - Self::check_rate_limit(netuid, neuron_uid, current_block), - Error::::SettingWeightsTooFast - ); + if !Self::get_commit_reveal_weights_enabled(netuid) { + ensure!( + Self::check_rate_limit(netuid, neuron_uid, current_block), + Error::::SettingWeightsTooFast + ); + } // --- 10. Check that the neuron uid is an allowed validator permitted to set non-self weights. ensure!( From f5df0ba71921dc4dad47404480a6178379875906 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 09:27:44 -0700 Subject: [PATCH 197/213] doc comments and lints --- pallets/subtensor/src/lib.rs | 2 +- pallets/subtensor/src/subnets/weights.rs | 17 ++++------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index 328a59f7e..e500b7e84 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -569,7 +569,7 @@ pub mod pallet { 0 } #[pallet::type_value] - /// Default minimum stake for weights. + /// Default Reveal Period Epochs pub fn DefaultRevealPeriodEpochs() -> u64 { 1 } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index a449b8316..e56c36335 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -163,7 +163,6 @@ impl Pallet { // --- 6. After removing expired commits, check if any commits are left. if commits.is_empty() { - // No non-expired commits // Check if provided_hash matches any expired commits if expired_hashes.contains(&provided_hash) { return Err(Error::::ExpiredWeightCommit.into()); @@ -202,7 +201,6 @@ impl Pallet { Self::do_set_weights(origin, netuid, uids, values, version_key) } else { // --- 13. The provided_hash does not match any non-expired commits. - // Check if provided_hash matches any expired commits if expired_hashes.contains(&provided_hash) { Err(Error::::ExpiredWeightCommit.into()) } else { @@ -324,7 +322,6 @@ impl Pallet { } // --- 7. Validate all reveals first to ensure atomicity. - // This prevents partial updates if any reveal fails. for (_uids, _values, _version_key, provided_hash) in &reveals { // --- 7a. Check if the provided_hash is in the non-expired commits. if !commits @@ -346,10 +343,8 @@ impl Pallet { .ok_or(Error::::NoWeightsCommitFound)?; // --- 7d. Check if the commit is within the reveal window. - let current_block: u64 = Self::get_current_block_as_u64(); - let (_, _, first_reveal_block, last_reveal_block) = commit; ensure!( - current_block >= *first_reveal_block && current_block <= *last_reveal_block, + Self::is_reveal_block_range(netuid, commit.1), Error::::RevealTooEarly ); } @@ -366,14 +361,10 @@ impl Pallet { // --- 8c. Proceed to set the revealed weights. Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; + } else if expired_hashes.contains(&provided_hash) { + return Err(Error::::ExpiredWeightCommit.into()); } else { - // This case should not occur as we've already validated the existence of the hash. - // However, to ensure safety, we handle it. - if expired_hashes.contains(&provided_hash) { - return Err(Error::::ExpiredWeightCommit.into()); - } else { - return Err(Error::::InvalidRevealCommitHashNotMatch.into()); - } + return Err(Error::::InvalidRevealCommitHashNotMatch.into()); } } From b07c1ffa923f68d5b1c21a30ea6c15d06a70af4c Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 11:54:12 -0700 Subject: [PATCH 198/213] add events --- pallets/subtensor/src/macros/events.rs | 20 +++++++++++++++++ pallets/subtensor/src/subnets/weights.rs | 28 ++++++++++++++++++++---- 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/pallets/subtensor/src/macros/events.rs b/pallets/subtensor/src/macros/events.rs index ac6b69012..f3b03684d 100644 --- a/pallets/subtensor/src/macros/events.rs +++ b/pallets/subtensor/src/macros/events.rs @@ -204,5 +204,25 @@ mod events { ColdkeySwapScheduleDurationSet(BlockNumberFor), /// The duration of dissolve network has been set DissolveNetworkScheduleDurationSet(BlockNumberFor), + /// Weights have been successfully committed. + /// + /// - **who**: The account ID of the user committing the weights. + /// - **netuid**: The network identifier. + /// - **commit_hash**: The hash representing the committed weights. + WeightsCommitted(T::AccountId, u16, H256), + + /// Weights have been successfully revealed. + /// + /// - **who**: The account ID of the user revealing the weights. + /// - **netuid**: The network identifier. + /// - **commit_hash**: The hash of the revealed weights. + WeightsRevealed(T::AccountId, u16, H256), + + /// Weights have been successfully batch revealed. + /// + /// - **who**: The account ID of the user revealing the weights. + /// - **netuid**: The network identifier. + /// - **revealed_hashes**: A vector of hashes representing each revealed weight set. + WeightsBatchRevealed(T::AccountId, u16, Vec), } } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index e56c36335..082ef1638 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -73,7 +73,10 @@ impl Pallet { // --- 9. Store the updated queue back to storage. *maybe_commits = Some(commits); - // --- 10. Return ok. + // --- 10. Emit the WeightsCommitted event. + Self::deposit_event(Event::WeightsCommitted(who.clone(), netuid, commit_hash)); + + // --- 11. Return ok. Ok(()) }) } @@ -198,9 +201,15 @@ impl Pallet { } // --- 12. Proceed to set the revealed weights. - Self::do_set_weights(origin, netuid, uids, values, version_key) + Self::do_set_weights(origin, netuid, uids.clone(), values.clone(), version_key)?; + + // --- 13. Emit the WeightsRevealed event. + Self::deposit_event(Event::WeightsRevealed(who.clone(), netuid, provided_hash)); + + // --- 14. Return ok. + Ok(()) } else { - // --- 13. The provided_hash does not match any non-expired commits. + // --- 15. The provided_hash does not match any non-expired commits. if expired_hashes.contains(&provided_hash) { Err(Error::::ExpiredWeightCommit.into()) } else { @@ -302,6 +311,7 @@ impl Pallet { // --- 6. Prepare to collect all provided hashes and their corresponding reveals. let mut provided_hashes = Vec::new(); let mut reveals = Vec::new(); + let mut revealed_hashes: Vec = Vec::with_capacity(num_reveals); for ((uids, values), (salt, version_key)) in uids_list .into_iter() @@ -361,6 +371,9 @@ impl Pallet { // --- 8c. Proceed to set the revealed weights. Self::do_set_weights(origin.clone(), netuid, uids, values, version_key)?; + + // --- 8d. Collect the revealed hash. + revealed_hashes.push(provided_hash); } else if expired_hashes.contains(&provided_hash) { return Err(Error::::ExpiredWeightCommit.into()); } else { @@ -373,7 +386,14 @@ impl Pallet { *maybe_commits = None; } - // --- 10. Return ok. + // --- 10. Emit the WeightsBatchRevealed event with all revealed hashes. + Self::deposit_event(Event::WeightsBatchRevealed( + who.clone(), + netuid, + revealed_hashes, + )); + + // --- 11. Return ok. Ok(()) }) } From 96911e654d326e1469c7d2b744579025bb20f251 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 12:42:16 -0700 Subject: [PATCH 199/213] revert hyperparam name change --- pallets/admin-utils/src/benchmarking.rs | 4 ++-- pallets/admin-utils/src/lib.rs | 12 ++++++------ pallets/admin-utils/src/weights.rs | 6 +++--- pallets/admin-utils/tests/tests.rs | 4 ++-- pallets/subtensor/src/rpc_info/subnet_info.rs | 6 +++--- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/pallets/admin-utils/src/benchmarking.rs b/pallets/admin-utils/src/benchmarking.rs index 606814ff0..65ccf629e 100644 --- a/pallets/admin-utils/src/benchmarking.rs +++ b/pallets/admin-utils/src/benchmarking.rs @@ -228,14 +228,14 @@ mod benchmarks { } #[benchmark] - fn sudo_set_commit_reveal_weights_periods() { + fn sudo_set_commit_reveal_weights_interval() { pallet_subtensor::Pallet::::init_new_network( 1u16, /*netuid*/ 1u16, /*sudo_tempo*/ ); #[extrinsic_call] - _(RawOrigin::Root, 1u16/*netuid*/, 3u64/*interval*/)/*set_commit_reveal_weights_periods()*/; + _(RawOrigin::Root, 1u16/*netuid*/, 3u64/*interval*/)/*sudo_set_commit_reveal_weights_interval()*/; } #[benchmark] diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index a287bb1b4..23121792a 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -1188,11 +1188,11 @@ pub mod pallet { /// # Weight /// Weight is handled by the `#[pallet::weight]` attribute. #[pallet::call_index(56)] - #[pallet::weight(T::WeightInfo::sudo_set_commit_reveal_weights_periods())] - pub fn sudo_set_commit_reveal_weights_periods( + #[pallet::weight(T::WeightInfo::sudo_set_commit_reveal_weights_interval())] + pub fn sudo_set_commit_reveal_weights_interval( origin: OriginFor, netuid: u16, - periods: u64, + interval: u64, ) -> DispatchResult { pallet_subtensor::Pallet::::ensure_subnet_owner_or_root(origin, netuid)?; @@ -1201,11 +1201,11 @@ pub mod pallet { Error::::SubnetDoesNotExist ); - pallet_subtensor::Pallet::::set_reveal_period(netuid, periods); + pallet_subtensor::Pallet::::set_reveal_period(netuid, interval); log::debug!( - "SetWeightCommitPeriods( netuid: {:?}, periods: {:?} ) ", + "SetWeightCommitInterval( netuid: {:?}, interval: {:?} ) ", netuid, - periods + interval ); Ok(()) } diff --git a/pallets/admin-utils/src/weights.rs b/pallets/admin-utils/src/weights.rs index db8752ff7..bda9c7916 100644 --- a/pallets/admin-utils/src/weights.rs +++ b/pallets/admin-utils/src/weights.rs @@ -60,7 +60,7 @@ pub trait WeightInfo { fn sudo_set_min_burn() -> Weight; fn sudo_set_network_registration_allowed() -> Weight; fn sudo_set_tempo() -> Weight; - fn sudo_set_commit_reveal_weights_periods() -> Weight; + fn sudo_set_commit_reveal_weights_interval() -> Weight; fn sudo_set_commit_reveal_weights_enabled() -> Weight; } @@ -413,7 +413,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } - fn sudo_set_commit_reveal_weights_periods() -> Weight { + fn sudo_set_commit_reveal_weights_interval() -> Weight { // Proof Size summary in bytes: // Measured: `456` // Estimated: `3921` @@ -781,7 +781,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } - fn sudo_set_commit_reveal_weights_periods() -> Weight { + fn sudo_set_commit_reveal_weights_interval() -> Weight { // -- Extrinsic Time -- // Model: // Time ~= 19.38 diff --git a/pallets/admin-utils/tests/tests.rs b/pallets/admin-utils/tests/tests.rs index d2c36e29f..442275052 100644 --- a/pallets/admin-utils/tests/tests.rs +++ b/pallets/admin-utils/tests/tests.rs @@ -1414,7 +1414,7 @@ fn test_sudo_set_dissolve_network_schedule_duration() { } #[test] -fn sudo_set_commit_reveal_weights_periods() { +fn sudo_set_commit_reveal_weights_interval() { new_test_ext().execute_with(|| { let netuid: u16 = 1; add_network(netuid, 10); @@ -1422,7 +1422,7 @@ fn sudo_set_commit_reveal_weights_periods() { let to_be_set = 55; let init_value = SubtensorModule::get_reveal_period(netuid); - assert_ok!(AdminUtils::sudo_set_commit_reveal_weights_periods( + assert_ok!(AdminUtils::sudo_set_commit_reveal_weights_interval( <::RuntimeOrigin>::root(), netuid, to_be_set diff --git a/pallets/subtensor/src/rpc_info/subnet_info.rs b/pallets/subtensor/src/rpc_info/subnet_info.rs index 8c79db03a..bdd420821 100644 --- a/pallets/subtensor/src/rpc_info/subnet_info.rs +++ b/pallets/subtensor/src/rpc_info/subnet_info.rs @@ -51,7 +51,7 @@ pub struct SubnetInfov2 { identity: Option, } -#[freeze_struct("4ceb81dfe8a8f96d")] +#[freeze_struct("55b472510f10e76a")] #[derive(Decode, Encode, PartialEq, Eq, Clone, Debug)] pub struct SubnetHyperparams { rho: Compact, @@ -76,7 +76,7 @@ pub struct SubnetHyperparams { max_validators: Compact, adjustment_alpha: Compact, difficulty: Compact, - commit_reveal_periods: Compact, + commit_reveal_weights_interval: Compact, commit_reveal_weights_enabled: bool, alpha_high: Compact, alpha_low: Compact, @@ -280,7 +280,7 @@ impl Pallet { max_validators: max_validators.into(), adjustment_alpha: adjustment_alpha.into(), difficulty: difficulty.into(), - commit_reveal_periods: commit_reveal_periods.into(), + commit_reveal_weights_interval: commit_reveal_periods.into(), commit_reveal_weights_enabled, alpha_high: alpha_high.into(), alpha_low: alpha_low.into(), From 498d30ae0f419c1467172e60ce894a01cddbb611 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 14:31:16 -0700 Subject: [PATCH 200/213] add set weights rate limit to commit-reveal --- pallets/subtensor/src/macros/errors.rs | 2 + pallets/subtensor/src/subnets/weights.rs | 22 ++++++- pallets/subtensor/tests/weights.rs | 73 +++++++++++++++++++++++- 3 files changed, 92 insertions(+), 5 deletions(-) diff --git a/pallets/subtensor/src/macros/errors.rs b/pallets/subtensor/src/macros/errors.rs index 1e4bf9ae0..aab849994 100644 --- a/pallets/subtensor/src/macros/errors.rs +++ b/pallets/subtensor/src/macros/errors.rs @@ -188,5 +188,7 @@ mod errors { RevealTooEarly, /// Attempted to batch reveal weights with mismatched vector input lenghts. InputLengthsUnequal, + /// A transactor exceeded the rate limit for setting weights. + CommittingWeightsTooFast, } } diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index 082ef1638..e252089bc 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -39,8 +39,19 @@ impl Pallet { Error::::CommitRevealDisabled ); - // --- 3. Calculate the reveal blocks based on tempo and reveal period. + ensure!( + Self::is_hotkey_registered_on_network(netuid, &who), + Error::::HotKeyNotRegisteredInSubNet + ); + let commit_block: u64 = Self::get_current_block_as_u64(); + let neuron_uid: u16 = Self::get_uid_for_net_and_hotkey(netuid, &who)?; + ensure!( + Self::check_rate_limit(netuid, neuron_uid, commit_block), + Error::::CommittingWeightsTooFast + ); + + // --- 3. Calculate the reveal blocks based on tempo and reveal period. let (first_reveal_block, last_reveal_block) = Self::get_reveal_blocks(netuid, commit_block); // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. @@ -76,7 +87,10 @@ impl Pallet { // --- 10. Emit the WeightsCommitted event. Self::deposit_event(Event::WeightsCommitted(who.clone(), netuid, commit_hash)); - // --- 11. Return ok. + // --- 11. Set last update for the UID + Self::set_last_update_for_uid(netuid, neuron_uid, commit_block); + + // --- 12. Return ok. Ok(()) }) } @@ -563,7 +577,9 @@ impl Pallet { Weights::::insert(netuid, neuron_uid, zipped_weights); // --- 18. Set the activity for the weights on this network. - Self::set_last_update_for_uid(netuid, neuron_uid, current_block); + if !Self::get_commit_reveal_weights_enabled(netuid) { + Self::set_last_update_for_uid(netuid, neuron_uid, current_block); + } // --- 19. Emit the tracking event. log::debug!( diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 0625e1e20..0f2f1a2d6 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -1815,7 +1815,7 @@ fn test_toggle_commit_reveal_weights_and_set_weights() { #[test] fn test_tempo_change_during_commit_reveal_process() { - new_test_ext(1).execute_with(|| { + new_test_ext(0).execute_with(|| { let netuid: u16 = 1; let uids: Vec = vec![0, 1]; let weight_values: Vec = vec![10, 10]; @@ -1832,7 +1832,7 @@ fn test_tempo_change_during_commit_reveal_process() { version_key, )); - System::set_block_number(1); + System::set_block_number(0); let tempo: u16 = 100; add_network(netuid, tempo, 0); @@ -4059,3 +4059,72 @@ fn test_get_reveal_blocks() { ); }) } + +#[test] +fn test_commit_weights_rate_limit() { + new_test_ext(1).execute_with(|| { + let netuid: u16 = 1; + let uids: Vec = vec![0, 1]; + let weight_values: Vec = vec![10, 10]; + let salt: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8]; + let version_key: u64 = 0; + let hotkey: U256 = U256::from(1); + + let commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + salt.clone(), + version_key, + )); + System::set_block_number(11); + + let tempo: u16 = 5; + add_network(netuid, tempo, 0); + + register_ok_neuron(netuid, U256::from(3), U256::from(4), 300_000); + register_ok_neuron(netuid, U256::from(1), U256::from(2), 100_000); + SubtensorModule::set_weights_set_rate_limit(netuid, 10); // Rate limit is 10 blocks + SubtensorModule::set_validator_permit_for_uid(netuid, 0, true); + SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); + SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); + + let neuron_uid = SubtensorModule::get_uid_for_net_and_hotkey(netuid, &hotkey).unwrap(); + SubtensorModule::set_last_update_for_uid(netuid, neuron_uid, 0); + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + commit_hash + )); + + let new_salt: Vec = vec![9; 8]; + let new_commit_hash: H256 = BlakeTwo256::hash_of(&( + hotkey, + netuid, + uids.clone(), + weight_values.clone(), + new_salt.clone(), + version_key, + )); + assert_err!( + SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, new_commit_hash), + Error::::CommittingWeightsTooFast + ); + + step_block(5); + assert_err!( + SubtensorModule::commit_weights(RuntimeOrigin::signed(hotkey), netuid, new_commit_hash), + Error::::CommittingWeightsTooFast + ); + + step_block(5); // Current block is now 21 + + assert_ok!(SubtensorModule::commit_weights( + RuntimeOrigin::signed(hotkey), + netuid, + new_commit_hash + )); + }); +} From f95fb5b3c955d711cdb862e07fc9eb803a3b528d Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 15:19:47 -0700 Subject: [PATCH 201/213] clippy --- pallets/subtensor/tests/weights.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 0f2f1a2d6..30269a498 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -4090,7 +4090,8 @@ fn test_commit_weights_rate_limit() { SubtensorModule::set_validator_permit_for_uid(netuid, 1, true); SubtensorModule::set_commit_reveal_weights_enabled(netuid, true); - let neuron_uid = SubtensorModule::get_uid_for_net_and_hotkey(netuid, &hotkey).unwrap(); + let neuron_uid = + SubtensorModule::get_uid_for_net_and_hotkey(netuid, &hotkey).expect("expected uid"); SubtensorModule::set_last_update_for_uid(netuid, neuron_uid, 0); assert_ok!(SubtensorModule::commit_weights( From 8a97a128a103eb27231cdafbaa093537e62c278d Mon Sep 17 00:00:00 2001 From: johnreedv Date: Wed, 30 Oct 2024 15:25:23 -0700 Subject: [PATCH 202/213] use new index previously deployed to testnet with a different name for that index --- pallets/admin-utils/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index 23121792a..85c7ef62c 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -1187,7 +1187,7 @@ pub mod pallet { /// /// # Weight /// Weight is handled by the `#[pallet::weight]` attribute. - #[pallet::call_index(56)] + #[pallet::call_index(57)] #[pallet::weight(T::WeightInfo::sudo_set_commit_reveal_weights_interval())] pub fn sudo_set_commit_reveal_weights_interval( origin: OriginFor, From ba7e34c42458050ba6e71028f35597fb4a794c02 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 31 Oct 2024 08:32:49 -0700 Subject: [PATCH 203/213] update comments --- pallets/subtensor/src/subnets/weights.rs | 46 ++++++++++++++---------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/pallets/subtensor/src/subnets/weights.rs b/pallets/subtensor/src/subnets/weights.rs index e252089bc..87042f456 100644 --- a/pallets/subtensor/src/subnets/weights.rs +++ b/pallets/subtensor/src/subnets/weights.rs @@ -19,61 +19,71 @@ impl Pallet { /// /// # Raises: /// * `CommitRevealDisabled`: - /// - Attempting to commit when the commit-reveal mechanism is disabled. + /// - Raised if commit-reveal is disabled for the specified network. + /// + /// * `HotKeyNotRegisteredInSubNet`: + /// - Raised if the hotkey is not registered on the specified network. + /// + /// * `CommittingWeightsTooFast`: + /// - Raised if the hotkey's commit rate exceeds the permitted limit. /// /// * `TooManyUnrevealedCommits`: - /// - Attempting to commit when the user has more than the allowed limit of unrevealed commits. + /// - Raised if the hotkey has reached the maximum number of unrevealed commits. + /// + /// # Events: + /// * `WeightsCommitted`: + /// - Emitted upon successfully storing the weight hash. pub fn do_commit_weights( origin: T::RuntimeOrigin, netuid: u16, commit_hash: H256, ) -> DispatchResult { - // --- 1. Check the caller's signature (hotkey). + // 1. Verify the caller's signature (hotkey). let who = ensure_signed(origin)?; - log::debug!("do_commit_weights( hotkey:{:?} netuid:{:?})", who, netuid); + log::debug!("do_commit_weights(hotkey: {:?}, netuid: {:?})", who, netuid); - // --- 2. Ensure commit-reveal is enabled for the network. + // 2. Ensure commit-reveal is enabled. ensure!( Self::get_commit_reveal_weights_enabled(netuid), Error::::CommitRevealDisabled ); + // 3. Ensure the hotkey is registered on the network. ensure!( Self::is_hotkey_registered_on_network(netuid, &who), Error::::HotKeyNotRegisteredInSubNet ); - let commit_block: u64 = Self::get_current_block_as_u64(); - let neuron_uid: u16 = Self::get_uid_for_net_and_hotkey(netuid, &who)?; + // 4. Check that the commit rate does not exceed the allowed frequency. + let commit_block = Self::get_current_block_as_u64(); + let neuron_uid = Self::get_uid_for_net_and_hotkey(netuid, &who)?; ensure!( Self::check_rate_limit(netuid, neuron_uid, commit_block), Error::::CommittingWeightsTooFast ); - // --- 3. Calculate the reveal blocks based on tempo and reveal period. + // 5. Calculate the reveal blocks based on network tempo and reveal period. let (first_reveal_block, last_reveal_block) = Self::get_reveal_blocks(netuid, commit_block); - // --- 4. Mutate the WeightCommits to retrieve existing commits for the user. + // 6. Retrieve or initialize the VecDeque of commits for the hotkey. WeightCommits::::try_mutate(netuid, &who, |maybe_commits| -> DispatchResult { - // --- 5. Take the existing commits or create a new VecDeque. let mut commits: VecDeque<(H256, u64, u64, u64)> = maybe_commits.take().unwrap_or_default(); - // --- 6. Remove any expired commits from the front of the queue. + // 7. Remove any expired commits from the front of the queue. while let Some((_, commit_block_existing, _, _)) = commits.front() { if Self::is_commit_expired(netuid, *commit_block_existing) { - // Remove the expired commit commits.pop_front(); } else { break; } } - // --- 7. Check if the current number of unrevealed commits is within the allowed limit. + // 8. Verify that the number of unrevealed commits is within the allowed limit. ensure!(commits.len() < 10, Error::::TooManyUnrevealedCommits); - // --- 8. Append the new commit to the queue. + // 9. Append the new commit with calculated reveal blocks. commits.push_back(( commit_hash, commit_block, @@ -81,16 +91,16 @@ impl Pallet { last_reveal_block, )); - // --- 9. Store the updated queue back to storage. + // 10. Store the updated commits queue back to storage. *maybe_commits = Some(commits); - // --- 10. Emit the WeightsCommitted event. + // 11. Emit the WeightsCommitted event Self::deposit_event(Event::WeightsCommitted(who.clone(), netuid, commit_hash)); - // --- 11. Set last update for the UID + // 12. Update the last commit block for the hotkey's UID. Self::set_last_update_for_uid(netuid, neuron_uid, commit_block); - // --- 12. Return ok. + // 13. Return success. Ok(()) }) } From 93c3f9c0730926cce9cde265838fd907111e489b Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 31 Oct 2024 08:58:16 -0700 Subject: [PATCH 204/213] expand test --- pallets/subtensor/tests/weights.rs | 59 ++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/pallets/subtensor/tests/weights.rs b/pallets/subtensor/tests/weights.rs index 30269a498..7dbeba288 100644 --- a/pallets/subtensor/tests/weights.rs +++ b/pallets/subtensor/tests/weights.rs @@ -4127,5 +4127,64 @@ fn test_commit_weights_rate_limit() { netuid, new_commit_hash )); + + SubtensorModule::set_commit_reveal_weights_enabled(netuid, false); + let weights_keys: Vec = vec![0]; + let weight_values: Vec = vec![1]; + + assert_err!( + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + netuid, + weights_keys.clone(), + weight_values.clone(), + 0 + ), + Error::::SettingWeightsTooFast + ); + + step_block(10); + + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + netuid, + weights_keys.clone(), + weight_values.clone(), + 0 + )); + + assert_err!( + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + netuid, + weights_keys.clone(), + weight_values.clone(), + 0 + ), + Error::::SettingWeightsTooFast + ); + + step_block(5); + + assert_err!( + SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + netuid, + weights_keys.clone(), + weight_values.clone(), + 0 + ), + Error::::SettingWeightsTooFast + ); + + step_block(5); + + assert_ok!(SubtensorModule::set_weights( + RuntimeOrigin::signed(hotkey), + netuid, + weights_keys.clone(), + weight_values.clone(), + 0 + )); }); } From 08e62d0b025fae278bdd6bef0e0182ad35937c40 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 31 Oct 2024 09:57:22 -0700 Subject: [PATCH 205/213] bump migration version --- pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs index cd93842c9..b8b831b61 100644 --- a/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs +++ b/pallets/subtensor/src/migrations/migrate_commit_reveal_v2.rs @@ -5,7 +5,7 @@ use scale_info::prelude::string::String; use sp_io::{hashing::twox_128, storage::clear_prefix, KillStorageResult}; pub fn migrate_commit_reveal_2() -> Weight { - let migration_name = b"migrate_commit_reveal_2".to_vec(); + let migration_name = b"migrate_commit_reveal_2_v2".to_vec(); let mut weight = T::DbWeight::get().reads(1); if HasMigrationRun::::get(&migration_name) { From bc08dc855ff071da4e07ed7d960b69a906f88ab7 Mon Sep 17 00:00:00 2001 From: johnreedv Date: Thu, 31 Oct 2024 09:58:50 -0700 Subject: [PATCH 206/213] update migration test --- pallets/subtensor/tests/migration.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pallets/subtensor/tests/migration.rs b/pallets/subtensor/tests/migration.rs index 1317bfb0f..4ddef882c 100644 --- a/pallets/subtensor/tests/migration.rs +++ b/pallets/subtensor/tests/migration.rs @@ -446,7 +446,7 @@ fn test_migrate_commit_reveal_2() { // ------------------------------ // Step 1: Simulate Old Storage Entries // ------------------------------ - const MIGRATION_NAME: &str = "migrate_commit_reveal_2"; + const MIGRATION_NAME: &str = "migrate_commit_reveal_2_v2"; let pallet_prefix = twox_128("SubtensorModule".as_bytes()); let storage_prefix_interval = twox_128("WeightCommitRevealInterval".as_bytes()); From 61336480f3b1a35495f8f3880ef3f780acde614b Mon Sep 17 00:00:00 2001 From: Cameron Fairchild Date: Thu, 31 Oct 2024 13:02:53 -0400 Subject: [PATCH 207/213] reduce key swap cost to 0.1 TAO --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 6990cb907..727e28079 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -956,7 +956,7 @@ parameter_types! { pub const SubtensorInitialNetworkLockReductionInterval: u64 = 14 * 7200; pub const SubtensorInitialNetworkRateLimit: u64 = 7200; pub const SubtensorInitialTargetStakesPerInterval: u16 = 1; - pub const SubtensorInitialKeySwapCost: u64 = 1_000_000_000; + pub const SubtensorInitialKeySwapCost: u64 = 100_000_000; // 0.1 TAO pub const InitialAlphaHigh: u16 = 58982; // Represents 0.9 as per the production default pub const InitialAlphaLow: u16 = 45875; // Represents 0.7 as per the production default pub const InitialLiquidAlphaOn: bool = false; // Default value for LiquidAlphaOn From 8e7e1403cadd5071f935ea8cc7cfe63752454d06 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 31 Oct 2024 14:25:13 -0400 Subject: [PATCH 208/213] bump spec version to 204 --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 58787e705..67d2db64c 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -146,7 +146,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 203, + spec_version: 204, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, From 362d0b826367fc1c71ca68d6a12b859777f49ee6 Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 31 Oct 2024 12:45:53 -0700 Subject: [PATCH 209/213] alice default port from 9946 to 9944 --- scripts/localnet.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/localnet.sh b/scripts/localnet.sh index 51e3d05a8..793506597 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -72,7 +72,7 @@ alice_start=( --chain="$FULL_PATH" --alice --port 30334 - --rpc-port 9946 + --rpc-port 9944 --validator --rpc-cors=all --allow-private-ipv4 From 04766ecac1904f3fa3429d9199d934119ca4089e Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Thu, 31 Oct 2024 16:20:21 -0400 Subject: [PATCH 210/213] bump spec version to 205 --- runtime/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 67d2db64c..7d33bff03 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -146,7 +146,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 204, + spec_version: 205, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, From 90fdbb1b0c527e724cfe8531c93b87ecaf7c1537 Mon Sep 17 00:00:00 2001 From: Greg Zaitsev Date: Mon, 4 Nov 2024 17:09:50 -0500 Subject: [PATCH 211/213] Remove manual_inspect from Cargo.toml clippy config --- Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 42b5c473b..ba65ac182 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,6 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" -# manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From 87d21f896578d3784b46c96a3754a4ae7f6e35ff Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Mon, 4 Nov 2024 17:43:18 -0500 Subject: [PATCH 212/213] try re-adding --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index ba65ac182..54de1b0a0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,6 +43,7 @@ indexing-slicing = "deny" arithmetic-side-effects = "deny" type_complexity = "allow" unwrap-used = "deny" +manual_inspect = "allow" [workspace.dependencies] cargo-husky = { version = "1", default-features = false } From 9b1f243e004023225260f272d512adb592f84bc7 Mon Sep 17 00:00:00 2001 From: Sam Johnson Date: Tue, 5 Nov 2024 13:58:35 -0500 Subject: [PATCH 213/213] bump CI