From b6ac01ec95ff22b643e838b688b56adfcf5ac100 Mon Sep 17 00:00:00 2001 From: Magnus Kulke Date: Thu, 24 Oct 2024 15:56:39 +0200 Subject: [PATCH] Handle variable report_data index sizes fixes #61 A caller of `get_report_with_report_data()` might provide report_data of varying sizes. We need to verify the size of the allocated nv index that holds the report data and recreate it if necessary. Integration tests have been added to test this on CVM hardware. Signed-off-by: Magnus Kulke --- .github/workflows/rust.yml | 13 +- az-cvm-vtpm/Cargo.toml | 3 +- az-cvm-vtpm/az-snp-vtpm/Cargo.toml | 9 +- az-cvm-vtpm/az-snp-vtpm/README.md | 8 ++ .../az-snp-vtpm/tests/integration_tests.rs | 50 ++++++++ az-cvm-vtpm/az-tdx-vtpm/Cargo.toml | 6 +- az-cvm-vtpm/az-tdx-vtpm/README.md | 9 ++ .../az-tdx-vtpm/tests/integration_tests.rs | 50 ++++++++ az-cvm-vtpm/src/vtpm/mod.rs | 112 ++++++++++++------ 9 files changed, 212 insertions(+), 48 deletions(-) create mode 100644 az-cvm-vtpm/az-snp-vtpm/tests/integration_tests.rs create mode 100644 az-cvm-vtpm/az-tdx-vtpm/tests/integration_tests.rs diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1fa9f77..4c3cdf2 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -22,18 +22,12 @@ jobs: - name: Install deps run: sudo apt-get update && sudo apt-get install -y libtss2-dev - - uses: actions-rs/toolchain@v1 + - uses: actions-rust-lang/setup-rust-toolchain@v1 with: - profile: minimal toolchain: stable + components: rustfmt, clippy override: true - - name: Install additional components - shell: bash - run: | - rustup component add rustfmt - rustup component add clippy - - name: Build run: cargo build --verbose --all @@ -46,6 +40,9 @@ jobs: - name: Run tests run: cargo test --verbose --all + - name: Compile integration tests + run: cargo test --verbose --all --features integration_test --no-run + - name: Format run: cargo fmt --all -- --check diff --git a/az-cvm-vtpm/Cargo.toml b/az-cvm-vtpm/Cargo.toml index a216902..5ce1dcd 100644 --- a/az-cvm-vtpm/Cargo.toml +++ b/az-cvm-vtpm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "az-cvm-vtpm" -version = "0.7.0" +version = "0.7.1" edition = "2021" repository = "https://github.com/kinvolk/azure-cvm-tooling/" license = "MIT" @@ -48,3 +48,4 @@ thiserror = "1.0.38" sev = "4.0.0" ureq = { version = "2.6.2", default-features = false, features = ["json"] } zerocopy = { version = "0.7.26", features = ["derive"] } +hex = "0.4" diff --git a/az-cvm-vtpm/az-snp-vtpm/Cargo.toml b/az-cvm-vtpm/az-snp-vtpm/Cargo.toml index 153687c..46249bb 100644 --- a/az-cvm-vtpm/az-snp-vtpm/Cargo.toml +++ b/az-cvm-vtpm/az-snp-vtpm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "az-snp-vtpm" -version = "0.7.0" +version = "0.7.1" edition = "2021" repository = "https://github.com/kinvolk/azure-cvm-tooling/" license = "MIT" @@ -17,7 +17,7 @@ path = "src/main.rs" required-features = ["attester", "verifier"] [dependencies] -az-cvm-vtpm = { path = "..", version = "0.7.0" } +az-cvm-vtpm = { path = "..", version = "0.7.1" } bincode.workspace = true clap.workspace = true openssl = { workspace = true, optional = true } @@ -26,7 +26,12 @@ sev.workspace = true thiserror.workspace = true ureq.workspace = true +[dev-dependencies] +serde_json.workspace = true +hex.workspace = true + [features] default = ["attester", "verifier"] attester = [] verifier = ["az-cvm-vtpm/openssl", "openssl", "ureq/tls"] +integration_test = [] diff --git a/az-cvm-vtpm/az-snp-vtpm/README.md b/az-cvm-vtpm/az-snp-vtpm/README.md index 4ab38c9..7da756a 100644 --- a/az-cvm-vtpm/az-snp-vtpm/README.md +++ b/az-cvm-vtpm/az-snp-vtpm/README.md @@ -71,3 +71,11 @@ signs ┌─ ┌─┴────────────┐ │ │ │ └─ └─┬────────────┘ │ └──────────────┘ ``` + +## Integration Tests + +The integration test suite can run on an SNP CVM. It needs to be executed as root and the tests have to run sequentially. + +```bash +sudo -E env "PATH=$PATH" cargo t --features integration_test -- --test-threads 1 +``` diff --git a/az-cvm-vtpm/az-snp-vtpm/tests/integration_tests.rs b/az-cvm-vtpm/az-snp-vtpm/tests/integration_tests.rs new file mode 100644 index 0000000..aae6fe2 --- /dev/null +++ b/az-cvm-vtpm/az-snp-vtpm/tests/integration_tests.rs @@ -0,0 +1,50 @@ +use az_snp_vtpm::{hcl, report, vtpm}; +use serde::Deserialize; + +#[cfg(feature = "integration_test")] +#[test] +fn get_report_with_varying_report_data_len() { + let mut report_data = "test".as_bytes(); + vtpm::get_report_with_report_data(report_data).unwrap(); + report_data = "test_test".as_bytes(); + vtpm::get_report_with_report_data(report_data).unwrap(); +} + +#[derive(Deserialize, Debug)] +struct VarDataUserData { + #[serde(rename = "user-data")] + user_data: String, +} + +#[cfg(feature = "integration_test")] +#[test] +fn get_report_with_report_data() { + let mut report_data: [u8; 64] = [0; 64]; + report_data[42] = 42; + let bytes = vtpm::get_report_with_report_data(&report_data).unwrap(); + let hcl_report = hcl::HclReport::new(bytes).unwrap(); + let var_data = hcl_report.var_data(); + let VarDataUserData { user_data } = serde_json::from_slice(var_data).unwrap(); + assert_eq!(user_data.to_lowercase(), hex::encode(report_data)); + + let var_data_hash = hcl_report.var_data_sha256(); + let snp_report: report::AttestationReport = hcl_report.try_into().unwrap(); + assert_eq!(var_data_hash, snp_report.report_data[..32]); +} + +#[cfg(feature = "integration_test")] +#[test] +fn get_report() { + let bytes = vtpm::get_report().unwrap(); + let hcl_report = hcl::HclReport::new(bytes).unwrap(); + + let var_data_hash = hcl_report.var_data_sha256(); + let snp_report: report::AttestationReport = hcl_report.try_into().unwrap(); + assert_eq!(var_data_hash, snp_report.report_data[..32]); +} + +#[cfg(feature = "integration_test")] +#[test] +fn ak_pub() { + let _ = vtpm::get_ak_pub().unwrap(); +} diff --git a/az-cvm-vtpm/az-tdx-vtpm/Cargo.toml b/az-cvm-vtpm/az-tdx-vtpm/Cargo.toml index 5521a5d..50efc1c 100644 --- a/az-cvm-vtpm/az-tdx-vtpm/Cargo.toml +++ b/az-cvm-vtpm/az-tdx-vtpm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "az-tdx-vtpm" -version = "0.7.0" +version = "0.7.1" edition = "2021" repository = "https://github.com/kinvolk/azure-cvm-tooling/" license = "MIT" @@ -16,7 +16,7 @@ name = "tdx-vtpm" path = "src/main.rs" [dependencies] -az-cvm-vtpm = { path = "..", version = "0.7.0" } +az-cvm-vtpm = { path = "..", version = "0.7.1" } base64-url = "3.0.0" bincode.workspace = true serde.workspace = true @@ -27,8 +27,10 @@ zerocopy.workspace = true [dev-dependencies] openssl.workspace = true +hex.workspace = true [features] default = ["attester", "verifier"] attester = [] verifier = ["az-cvm-vtpm/verifier"] +integration_test =[] diff --git a/az-cvm-vtpm/az-tdx-vtpm/README.md b/az-cvm-vtpm/az-tdx-vtpm/README.md index 67ebe3a..82647c0 100644 --- a/az-cvm-vtpm/az-tdx-vtpm/README.md +++ b/az-cvm-vtpm/az-tdx-vtpm/README.md @@ -20,3 +20,12 @@ On the TDX CVM, retrieve a TD Quote and write it to disk: ```bash sudo ./tdx-vtpm ``` + +## Integration Tests + +The integration test suite can run on a TDX CVM. It needs to be executed as root and the tests have to run sequentially. + +```bash +sudo -E env "PATH=$PATH" cargo t --features integration_test -- --test-threads 1 +``` + diff --git a/az-cvm-vtpm/az-tdx-vtpm/tests/integration_tests.rs b/az-cvm-vtpm/az-tdx-vtpm/tests/integration_tests.rs new file mode 100644 index 0000000..6b7ff24 --- /dev/null +++ b/az-cvm-vtpm/az-tdx-vtpm/tests/integration_tests.rs @@ -0,0 +1,50 @@ +use az_tdx_vtpm::{hcl, tdx, vtpm}; +use serde::Deserialize; + +#[cfg(feature = "integration_test")] +#[test] +fn get_report_with_varying_report_data_len() { + let mut report_data = "test".as_bytes(); + vtpm::get_report_with_report_data(report_data).unwrap(); + report_data = "test_test".as_bytes(); + vtpm::get_report_with_report_data(report_data).unwrap(); +} + +#[derive(Deserialize, Debug)] +struct VarDataUserData { + #[serde(rename = "user-data")] + user_data: String, +} + +#[cfg(feature = "integration_test")] +#[test] +fn get_report_with_report_data() { + let mut report_data: [u8; 64] = [0; 64]; + report_data[42] = 42; + let bytes = vtpm::get_report_with_report_data(&report_data).unwrap(); + let hcl_report = hcl::HclReport::new(bytes).unwrap(); + let var_data = hcl_report.var_data(); + let VarDataUserData { user_data } = serde_json::from_slice(var_data).unwrap(); + assert_eq!(user_data.to_lowercase(), hex::encode(report_data)); + + let var_data_hash = hcl_report.var_data_sha256(); + let td_report: tdx::TdReport = hcl_report.try_into().unwrap(); + assert_eq!(var_data_hash, td_report.report_mac.reportdata[..32]); +} + +#[cfg(feature = "integration_test")] +#[test] +fn get_report() { + let bytes = vtpm::get_report().unwrap(); + let hcl_report = hcl::HclReport::new(bytes).unwrap(); + + let var_data_hash = hcl_report.var_data_sha256(); + let td_report: tdx::TdReport = hcl_report.try_into().unwrap(); + assert_eq!(var_data_hash, td_report.report_mac.reportdata[..32]); +} + +#[cfg(feature = "integration_test")] +#[test] +fn ak_pub() { + let _ = vtpm::get_ak_pub().unwrap(); +} diff --git a/az-cvm-vtpm/src/vtpm/mod.rs b/az-cvm-vtpm/src/vtpm/mod.rs index 49a8ad4..48a0616 100644 --- a/az-cvm-vtpm/src/vtpm/mod.rs +++ b/az-cvm-vtpm/src/vtpm/mod.rs @@ -3,19 +3,19 @@ use core::time::Duration; use serde::{Deserialize, Serialize}; -use std::io::Write; use std::thread; use thiserror::Error; use tss_esapi::abstraction::{nv, pcr, public::DecodedKey}; use tss_esapi::attributes::NvIndexAttributesBuilder; -use tss_esapi::handles::{NvIndexTpmHandle, PcrHandle, TpmHandle}; +use tss_esapi::handles::{NvIndexHandle, NvIndexTpmHandle, PcrHandle, TpmHandle}; use tss_esapi::interface_types::algorithm::HashingAlgorithm; -use tss_esapi::interface_types::resource_handles::NvAuth; +use tss_esapi::interface_types::resource_handles::{NvAuth, Provision}; use tss_esapi::interface_types::session_handles::AuthSession; use tss_esapi::structures::pcr_selection_list::PcrSelectionListBuilder; use tss_esapi::structures::pcr_slot::PcrSlot; use tss_esapi::structures::{ - Attest, AttestInfo, Data, DigestValues, NvPublicBuilder, Signature, SignatureScheme, + Attest, AttestInfo, Data, DigestValues, MaxNvBuffer, NvPublicBuilder, Signature, + SignatureScheme, }; use tss_esapi::tcti_ldr::{DeviceConfig, TctiNameConf}; use tss_esapi::traits::{Marshall, UnMarshall}; @@ -97,7 +97,8 @@ pub enum ReportError { /// Get a HCL report from an nvindex pub fn get_report() -> Result, ReportError> { - let (nv_index, mut context) = get_session_context()?; + let nv_index = NvIndexTpmHandle::new(VTPM_HCL_REPORT_NV_INDEX)?; + let mut context = get_session_context()?; let report = nv::read_full(&mut context, NvAuth::Owner, nv_index)?; Ok(report) @@ -107,62 +108,103 @@ pub fn get_report() -> Result, ReportError> { /// in the HCL report in its user_data field and mixed into a hash in the TEE report's report_data. /// The Function contains a 3 seconds delay to avoid retrieving a stale report. pub fn get_report_with_report_data(report_data: &[u8]) -> Result, ReportError> { - let (nv_index, mut context) = get_session_context()?; + let mut context = get_session_context()?; let nv_index_report_data = NvIndexTpmHandle::new(INDEX_REPORT_DATA)?; write_nv_index(&mut context, nv_index_report_data, report_data)?; thread::sleep(Duration::new(3, 0)); + let nv_index = NvIndexTpmHandle::new(VTPM_HCL_REPORT_NV_INDEX)?; let report = nv::read_full(&mut context, NvAuth::Owner, nv_index)?; Ok(report) } -fn get_session_context() -> Result<(NvIndexTpmHandle, Context), ReportError> { - use tss_esapi::handles::NvIndexTpmHandle; - let nv_index = NvIndexTpmHandle::new(VTPM_HCL_REPORT_NV_INDEX)?; - +fn get_session_context() -> Result { let conf: TctiNameConf = TctiNameConf::Device(DeviceConfig::default()); let mut context = Context::new(conf)?; let auth_session = AuthSession::Password; context.set_sessions((Some(auth_session), None, None)); - Ok((nv_index, context)) + Ok(context) } -fn write_nv_index( +enum NvSearchResult { + Found, + NotFound, + SizeMismatch, +} + +fn find_index( context: &mut Context, nv_index: NvIndexTpmHandle, - data: &[u8], -) -> Result<(), ReportError> { - let owner_nv_index_attributes = NvIndexAttributesBuilder::new() + len: usize, +) -> Result { + let list = nv::list(context)?; + let result = list + .iter() + .find(|(public, _)| public.nv_index() == nv_index); + let Some((public, _)) = result else { + return Ok(NvSearchResult::NotFound); + }; + if public.data_size() != len { + return Ok(NvSearchResult::SizeMismatch); + } + + Ok(NvSearchResult::Found) +} + +fn create_index( + context: &mut Context, + handle: NvIndexTpmHandle, + len: usize, +) -> Result { + let attributes = NvIndexAttributesBuilder::new() .with_owner_write(true) .with_owner_read(true) - .with_pp_read(true) .build()?; - let owner_nv_public = NvPublicBuilder::new() - .with_nv_index(nv_index) + let owner = NvPublicBuilder::new() + .with_nv_index(handle) .with_index_name_algorithm(HashingAlgorithm::Sha256) - .with_index_attributes(owner_nv_index_attributes) - .with_data_area_size(data.len()) + .with_index_attributes(attributes) + .with_data_area_size(len) .build()?; - let mut rw = match nv::list(context)? - .iter() - .find(|(public, _)| public.nv_index() == nv_index) - { - Some(_) => nv::NvOpenOptions::ExistingIndex { - nv_index_handle: nv_index, - auth_handle: NvAuth::Owner, - }, - None => nv::NvOpenOptions::NewIndex { - nv_public: owner_nv_public, - auth_handle: NvAuth::Owner, - }, - } - .open(context)?; + let index = context.nv_define_space(Provision::Owner, None, owner)?; + Ok(index) +} + +fn resolve_handle( + context: &mut Context, + handle: NvIndexTpmHandle, +) -> Result { + let key_handle = context.execute_without_session(|c| c.tr_from_tpm_public(handle.into()))?; + Ok(key_handle.into()) +} - rw.write_all(data).map_err(|_| ReportError::NvWriteFailed) +fn delete_index(context: &mut Context, handle: NvIndexTpmHandle) -> Result<(), ReportError> { + let index = resolve_handle(context, handle)?; + context.nv_undefine_space(Provision::Owner, index)?; + Ok(()) +} + +fn write_nv_index( + context: &mut Context, + handle: NvIndexTpmHandle, + data: &[u8], +) -> Result<(), ReportError> { + let buffer = MaxNvBuffer::try_from(data)?; + let result = find_index(context, handle, data.len())?; + let index = match result { + NvSearchResult::NotFound => create_index(context, handle, data.len())?, + NvSearchResult::SizeMismatch => { + delete_index(context, handle)?; + create_index(context, handle, data.len())? + } + _ => resolve_handle(context, handle)?, + }; + context.nv_write(NvAuth::Owner, index, buffer, 0)?; + Ok(()) } #[derive(Error, Debug)]