From 4179af6f8f929a855bb3b400d3d3055a7ddc40e0 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 11:58:44 +0200 Subject: [PATCH 01/21] feat(flake): add cargo test to checkPhase --- flake.nix | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/flake.nix b/flake.nix index 7339447..a103db4 100644 --- a/flake.nix +++ b/flake.nix @@ -120,6 +120,13 @@ buildInputs = [ nixfmt-rfc-style ]; + doCheck = true; + checkPhase = '' + runHook preCheck + cargo test + runHook postCheck + ''; + meta = with lib; { homepage = "https://github.com/tsandrini/flake-parts-builder"; description = "Nix flakes interactive template builder based on flake-parts written in Rust."; From c4fdea4c9c3e248952a3ac2186428d52c8612c9b Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 11:59:22 +0200 Subject: [PATCH 02/21] ci(cachix-push): push also flake-parts --- .github/workflows/cachix-push.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cachix-push.yml b/.github/workflows/cachix-push.yml index 54cc20f..e08d1db 100644 --- a/.github/workflows/cachix-push.yml +++ b/.github/workflows/cachix-push.yml @@ -27,7 +27,10 @@ jobs: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: "Building project packages..." - run: nix build + run: | + nix build .#builder + nix build .#flake-parts + nix build .#flake-parts-bootstrap - name: "Building project devshells..." run: nix develop --command echo OK From a17eb1b2ac03f665184d8ee483618591c95670b5 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 11:59:48 +0200 Subject: [PATCH 03/21] refactor(builder): fix flake.nix.tmpl formatting --- src/assets/flake.nix.template | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/assets/flake.nix.template b/src/assets/flake.nix.template index 0764158..b35d7b1 100644 --- a/src/assets/flake.nix.template +++ b/src/assets/flake.nix.template @@ -1,6 +1,6 @@ # --- flake.nix { -description = "TODO Add description of your new project"; + description = "TODO Add description of your new project"; inputs = { # --- BASE DEPENDENCIES --- From e6cde20adb20bdb7d839cf8a335d89f142144d27 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 12:00:38 +0200 Subject: [PATCH 04/21] test(builder): add main cli unit testing --- src/main.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main.rs b/src/main.rs index 7505165..7fd1b3f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -46,8 +46,13 @@ fn main() -> Result<()> { } } -#[test] -fn verify_cli() { - use clap::CommandFactory; - Cli::command().debug_assert(); +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn verify_cli() { + use clap::CommandFactory; + Cli::command().debug_assert(); + } } From 973a64feb64af24bc2b684b5ef4643c262b3759b Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 12:00:57 +0200 Subject: [PATCH 05/21] test(builder): add templates.rs unit tests --- src/templates.rs | 230 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 230 insertions(+) diff --git a/src/templates.rs b/src/templates.rs index 6422b39..bb51431 100644 --- a/src/templates.rs +++ b/src/templates.rs @@ -94,3 +94,233 @@ impl FlakeContext { Ok(rendered) } } + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_flake_inputs_context_new() { + let inputs = json!({"input1": "value1", "input2": "value2"}); + let context = FlakeInputsContext::new(inputs.clone()); + assert_eq!(context.inputs, inputs); + } + + #[test] + fn test_flake_inputs_context_from_merged_metadata() { + let metadata1 = FlakePartMetadata { + description: "Metadata 1".to_string(), + inputs: json!({"input1": "value1"}), + dependencies: vec![], + conflicts: vec![], + extra_trusted_public_keys: vec![], + extra_substituters: vec![], + }; + let metadata2 = FlakePartMetadata { + description: "Metadata 2".to_string(), + inputs: json!({"input2": "value2"}), + dependencies: vec![], + conflicts: vec![], + extra_trusted_public_keys: vec![], + extra_substituters: vec![], + }; + let metadata = vec![&metadata1, &metadata2]; + + let context = FlakeInputsContext::from_merged_metadata(&metadata); + assert_eq!( + context.inputs, + json!({"input1": "value1", "input2": "value2"}) + ); + } + + #[test] + fn test_flake_inputs_context_render_with_simple_inputs() -> Result<()> { + let inputs = json!({"input1": { + "url": "github:org1/repo1", + }, "input2": { + "url": "github:org2/repo2", + }}); + let context = FlakeInputsContext::new(inputs); + let rendered = context.render()?; + let cleaned_rendered = rendered.split_whitespace().collect::(); + + let expected = r#" + input1.url = "github:org1/repo1"; + input2.url = "github:org2/repo2"; + "#; + + let cleaned_expected = expected.split_whitespace().collect::(); + + assert_eq!(cleaned_rendered, cleaned_expected); + Ok(()) + } + + #[test] + fn test_flake_inputs_context_render_with_complex_inputs() -> Result<()> { + let inputs = json!({"input1": { + "url": "github:org1/repo1", + "flake": false + }, "input2": { + "url": "github:org2/repo2", + "inputs": { + "input1": { + "follows": "input1" + } + } + }}); + let context = FlakeInputsContext::new(inputs); + let rendered = context.render()?; + let cleaned_rendered = rendered.split_whitespace().collect::(); + + let expected = r#" + input1 = { + url = "github:org1/repo1"; + flake = false; + }; + input2 = { + url = "github:org2/repo2"; + inputs.input1.follows = "input1"; + }; + "#; + + let cleaned_expected = expected.split_whitespace().collect::(); + + assert_eq!(cleaned_rendered, cleaned_expected); + Ok(()) + } + + #[test] + fn test_flake_context_new() { + let inputs_context = FlakeInputsContext::new(json!({})); + let trusted_keys = vec!["key1".to_string(), "key2".to_string()]; + let substituters = vec!["sub1".to_string(), "sub2".to_string()]; + let context = FlakeContext::new(inputs_context, trusted_keys.clone(), substituters.clone()); + + assert_eq!(context.extra_trusted_public_keys, trusted_keys); + assert_eq!(context.extra_substituters, substituters); + } + + #[test] + fn test_flake_context_from_merged_metadata() { + let metadata1 = FlakePartMetadata { + description: "Metadata 1".to_string(), + inputs: json!({"input1": "value1"}), + dependencies: vec![], + conflicts: vec![], + extra_trusted_public_keys: vec!["key1".to_string()], + extra_substituters: vec!["sub1".to_string()], + }; + let metadata2 = FlakePartMetadata { + description: "Metadata 2".to_string(), + inputs: json!({"input2": "value2"}), + dependencies: vec![], + conflicts: vec![], + extra_trusted_public_keys: vec!["key2".to_string()], + extra_substituters: vec!["sub2".to_string()], + }; + let metadata = vec![&metadata1, &metadata2]; + + let context = FlakeContext::from_merged_metadata(&metadata); + assert_eq!( + context.flake_inputs_context.inputs, + json!({"input1": "value1", "input2": "value2"}) + ); + assert_eq!( + context.extra_trusted_public_keys, + vec!["key1".to_string(), "key2".to_string()] + ); + assert_eq!( + context.extra_substituters, + vec!["sub1".to_string(), "sub2".to_string()] + ); + } + + #[test] + fn test_flake_context_render() -> Result<()> { + let inputs_context = FlakeInputsContext::new(json!({"input1": { + "url": "github:org1/repo1", + }, "input2": { + "url": "github:org2/repo2", + }})); + let trusted_keys = vec!["key1".to_string(), "key2".to_string()]; + let substituters = vec!["sub1".to_string(), "sub2".to_string()]; + let context = FlakeContext::new(inputs_context, trusted_keys, substituters); + + let rendered = context.render()?; + println!("{}", rendered); + let cleaned_rendered = rendered.split_whitespace().collect::(); + + let expected = r#" + # --- flake.nix + { + description = "TODO Add description of your new project"; + + inputs = { + # --- BASE DEPENDENCIES --- + nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; + flake-parts.url = "github:hercules-ci/flake-parts"; + + # --- YOUR DEPENDENCIES --- + input1.url = "github:org1/repo1"; + input2.url = "github:org2/repo2"; + }; + + # NOTE Here you can add additional binary cache substituers that you trust. + # There are also some sensible default caches commented out that you + # might consider using, however, you are advised to doublecheck the keys. + nixConfig = { + extra-trusted-public-keys = [ + # "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" + # "nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs=" + # "key1" + # "key2" + ]; + extra-substituters = [ + # "https://cache.nixos.org" + # "https://nix-community.cachix.org/" + # "sub1" + # "sub2" + ]; + }; + + outputs = + inputs@{ flake-parts, ... }: + let + inherit (inputs.nixpkgs) lib; + inherit (import ./flake-parts/_bootstrap.nix { inherit lib; }) loadParts; + in + flake-parts.lib.mkFlake { inherit inputs; } { + + # We recursively traverse all of the flakeModules in ./flake-parts and + # import only the final modules, meaning that you can have an arbitrary + # nested structure that suffices your needs. For example + # + # - ./flake-parts + # - modules/ + # - nixos/ + # - myNixosModule1.nix + # - myNixosModule2.nix + # - default.nix + # - home-manager/ + # - myHomeModule1.nix + # - myHomeModule2.nix + # - default.nix + # - sharedModules.nix + # - pkgs/ + # - myPackage1.nix + # - myPackage2.nix + # - default.nix + # - mySimpleModule.nix + # - _not_a_module.nix + imports = loadParts ./flake-parts; + }; + } + "#; + + let cleaned_expected = expected.split_whitespace().collect::(); + + assert_eq!(cleaned_rendered, cleaned_expected); + Ok(()) + } +} From 0c0844355e6090eef9e47e1e3fdacd28205f4e05 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 12:01:15 +0200 Subject: [PATCH 06/21] test(builder): add fs_utils.rs unit tests --- src/fs_utils.rs | 167 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 139 insertions(+), 28 deletions(-) diff --git a/src/fs_utils.rs b/src/fs_utils.rs index df64be7..a0db100 100644 --- a/src/fs_utils.rs +++ b/src/fs_utils.rs @@ -10,6 +10,41 @@ use walkdir::WalkDir; use crate::config::META_FILE; +pub fn reset_permissions(path: &str) -> std::io::Result<()> { + for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) { + let path = entry.path(); + let metadata = fs::metadata(path)?; + + if metadata.is_dir() { + fs::set_permissions(path, Permissions::from_mode(0o755))?; + } else if metadata.is_file() { + fs::set_permissions(path, Permissions::from_mode(0o644))?; + } + } + Ok(()) +} + +pub fn regex_in_dir_recursive(dir: &str, pattern: &str, replacement: &str) -> io::Result<()> { + let re = Regex::new(pattern).unwrap(); + + for entry in WalkDir::new(dir).into_iter().filter_map(|e| e.ok()) { + if entry.file_type().is_file() { + let path = entry.path(); + let mut contents = String::new(); + { + let mut file = File::open(path)?; + file.read_to_string(&mut contents)?; + } + let new_contents = re.replace_all(&contents, replacement).to_string(); + if new_contents != contents { + let mut file = File::create(path)?; + file.write_all(new_contents.as_bytes())?; + } + } + } + Ok(()) +} + // TODO might implement a "merging" strategy instead of skipping/overwriting // but currently not entirely sure about its use case #[allow(dead_code)] @@ -81,37 +116,113 @@ pub fn merge_dirs(src: &Path, dst: &Path, options: &CopyOptions) -> Result<()> { Ok(()) } -pub fn reset_permissions(path: &str) -> std::io::Result<()> { - for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) { - let path = entry.path(); - let metadata = fs::metadata(path)?; +#[cfg(test)] +mod tests { + use super::*; + use std::fs::{create_dir_all, File}; + use tempfile::TempDir; - if metadata.is_dir() { - fs::set_permissions(path, Permissions::from_mode(0o755))?; - } else if metadata.is_file() { - fs::set_permissions(path, Permissions::from_mode(0o644))?; - } + #[test] + fn test_reset_permissions() -> io::Result<()> { + let temp_dir = TempDir::new()?; + let test_dir = temp_dir.path().join("test_dir"); + create_dir_all(&test_dir)?; + + // Create a test file with different permissions + let test_file = test_dir.join("test_file.txt"); + File::create(&test_file)?; + fs::set_permissions(&test_file, Permissions::from_mode(0o777))?; + + reset_permissions(test_dir.to_str().unwrap())?; + + let metadata = fs::metadata(&test_file)?; + assert_eq!(metadata.permissions().mode() & 0o777, 0o644); + + let dir_metadata = fs::metadata(&test_dir)?; + assert_eq!(dir_metadata.permissions().mode() & 0o777, 0o755); + + Ok(()) } - Ok(()) -} -pub fn regex_in_dir_recursive(dir: &str, pattern: &str, replacement: &str) -> io::Result<()> { - let re = Regex::new(pattern).unwrap(); + #[test] + fn test_reset_permissions_recursive() -> io::Result<()> { + let temp_dir = TempDir::new()?; + let root_dir = temp_dir.path().join("root"); + let nested_dir = root_dir.join("level1").join("level2"); + create_dir_all(&nested_dir)?; - for entry in WalkDir::new(dir).into_iter().filter_map(|e| e.ok()) { - if entry.file_type().is_file() { - let path = entry.path(); - let mut contents = String::new(); - { - let mut file = File::open(path)?; - file.read_to_string(&mut contents)?; - } - let new_contents = re.replace_all(&contents, replacement).to_string(); - if new_contents != contents { - let mut file = File::create(path)?; - file.write_all(new_contents.as_bytes())?; - } - } + // Create files at different levels with different permissions + let root_file = root_dir.join("root_file.txt"); + let nested_file = nested_dir.join("nested_file.txt"); + File::create(&root_file)?; + File::create(&nested_file)?; + + fs::set_permissions(&root_file, Permissions::from_mode(0o777))?; + fs::set_permissions(&nested_file, Permissions::from_mode(0o600))?; + fs::set_permissions(&nested_dir, Permissions::from_mode(0o700))?; + + reset_permissions(root_dir.to_str().unwrap())?; + + // Check permissions + assert_eq!( + fs::metadata(&root_file)?.permissions().mode() & 0o777, + 0o644 + ); + assert_eq!( + fs::metadata(&nested_file)?.permissions().mode() & 0o777, + 0o644 + ); + assert_eq!( + fs::metadata(&nested_dir)?.permissions().mode() & 0o777, + 0o755 + ); + assert_eq!(fs::metadata(&root_dir)?.permissions().mode() & 0o777, 0o755); + + Ok(()) + } + + #[test] + fn test_regex_in_dir_recursive() -> io::Result<()> { + let temp_dir = TempDir::new()?; + let test_file = temp_dir.path().join("test_file.txt"); + + let initial_content = "Hello, world! This is a test."; + fs::write(&test_file, initial_content)?; + + regex_in_dir_recursive(temp_dir.path().to_str().unwrap(), r"world", "universe")?; + + let new_content = fs::read_to_string(&test_file)?; + assert_eq!(new_content, "Hello, universe! This is a test."); + + Ok(()) + } + + #[test] + fn test_regex_in_dir_recursive_nested() -> io::Result<()> { + let temp_dir = TempDir::new()?; + let root_dir = temp_dir.path().join("root"); + let nested_dir = root_dir.join("level1").join("level2"); + create_dir_all(&nested_dir)?; + + // Create files at different levels with test content + let root_file = root_dir.join("root_file.txt"); + let nested_file = nested_dir.join("nested_file.txt"); + + fs::write(&root_file, "Hello, world! This is the root file.")?; + fs::write(&nested_file, "Goodbye, world! This is the nested file.")?; + + regex_in_dir_recursive(root_dir.to_str().unwrap(), r"world", "universe")?; + + // Check content of both files + let root_content = fs::read_to_string(&root_file)?; + let nested_content = fs::read_to_string(&nested_file)?; + + assert_eq!(root_content, "Hello, universe! This is the root file."); + assert_eq!( + nested_content, + "Goodbye, universe! This is the nested file." + ); + + Ok(()) } - Ok(()) } From 341eb6699d8aadd056effd81e2b98bc34f2277cf Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 23:37:43 +0200 Subject: [PATCH 07/21] test(builder): add nix.rs tests --- .github/workflows/cachix-push.yml | 6 +- src/nix.rs | 216 ++++++++++++++++++++++++++++-- src/parts.rs | 5 +- 3 files changed, 210 insertions(+), 17 deletions(-) diff --git a/.github/workflows/cachix-push.yml b/.github/workflows/cachix-push.yml index e08d1db..b410cb5 100644 --- a/.github/workflows/cachix-push.yml +++ b/.github/workflows/cachix-push.yml @@ -28,9 +28,9 @@ jobs: - name: "Building project packages..." run: | - nix build .#builder - nix build .#flake-parts - nix build .#flake-parts-bootstrap + nix build .#builder --show-trace --accept-flake-config + nix build .#flake-parts --show-trace --accept-flake-config + nix build .#flake-parts-bootstrap --show-trace --accept-flake-config - name: "Building project devshells..." run: nix develop --command echo OK diff --git a/src/nix.rs b/src/nix.rs index aa53660..91e27c5 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -1,8 +1,50 @@ use color_eyre::eyre::Result; use std::path::PathBuf; use std::process::Command; +use thiserror::Error; -use crate::config::META_FILE; +#[derive(Error, Debug)] +pub enum EvalNixFileError { + #[error("provided path is invalid: {0}")] + InvalidPathError(PathBuf), + + #[error("failed to run `nix eval --json --file` on: {0}")] + NixCommandError(#[from] std::io::Error), + + #[error("failed to convert output to utf8: {0}")] + UTF8ConversionError(#[from] std::string::FromUtf8Error), +} + +pub fn eval_nix_file(path: &PathBuf, to_json: bool) -> Result { + if path.exists() == false { + return Err(EvalNixFileError::InvalidPathError(path.clone())); + } + let path = path + .to_str() + .ok_or_else(|| EvalNixFileError::InvalidPathError(path.clone()))?; + + let mut command = Command::new("nix"); + command.arg("eval"); + command.arg("--file").arg(path); + if to_json { + command.arg("--json"); + } + + let output = command.output()?; + + if !output.status.success() { + return Err(EvalNixFileError::NixCommandError(std::io::Error::new( + std::io::ErrorKind::Other, + format!( + "nix eval command failed with status: {:?}", + output.status.code() + ), + ))); + } + + let stdout = String::from_utf8(output.stdout)?; + Ok(stdout.trim().to_string()) +} pub fn get_flake_store_path(flake_uri: &str) -> Result { let nix_info = Command::new("nix") @@ -13,20 +55,170 @@ pub fn get_flake_store_path(flake_uri: &str) -> Result { Ok(PathBuf::from(output.trim())) } -pub fn eval_meta_file(path: &PathBuf) -> Result { - let nix_eval = Command::new("nix") - .args(["eval", "--json", "--file", META_FILE]) - .current_dir(&path) - .output()?; - - let output = String::from_utf8(nix_eval.stdout)?; - Ok(output) -} - pub fn nixfmt_file(path: &PathBuf) -> Result<()> { - let path = path.to_str().unwrap(); // TODO + let path = path.to_str().ok_or(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Invalid path", + ))?; Command::new("nixfmt").args([&path]).output()?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use std::fs::File; + use std::io::Write; + use tempfile::TempDir; + + fn clean_string(s: &str) -> String { + s.split_whitespace().collect::() + } + + #[test] + fn test_valid_nix_file() -> Result<()> { + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!( + file, + r#" + {{ + description = "Test description"; + inputs = {{ + test.url = "github:test/repo"; + }}; + }} + "# + )?; + + let result = eval_nix_file(&file_path, true)?; + let expected = + r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#; + + assert_eq!(clean_string(&result), clean_string(expected)); + + Ok(()) + } + + #[test] + fn test_nonexistent_path() { + let invalid_path = PathBuf::from("/nonexistent/path"); + let result = eval_nix_file(&invalid_path, true); + assert!(matches!(result, Err(EvalNixFileError::InvalidPathError(_)))); + } + + #[test] + fn test_invalid_path() { + let invalid_path = PathBuf::from(""); + let result = eval_nix_file(&invalid_path, true); + assert!(matches!(result, Err(EvalNixFileError::InvalidPathError(_)))); + } + + #[test] + fn test_non_json_output() -> Result<()> { + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!(file, r#""Hello, World!""#)?; + + let result = eval_nix_file(&file_path, false)?; + assert_eq!(clean_string(&result), clean_string("\"Hello, World!\"")); + + Ok(()) + } + + #[test] + fn test_complex_nix_file() -> Result<()> { + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!( + file, + r#" + {{ + description = "Flake bindings for the `github:cachix/devenv` development environment."; + inputs = {{ + devenv.url = "github:cachix/devenv"; + devenv-root = {{ + url = "file+file:///dev/null"; + flake = false; + }}; + mk-shell-bin.url = "github:rrbutani/nix-mk-shell-bin"; + nix2container = {{ + url = "github:nlewo/nix2container"; + inputs.nixpkgs.follows = "nixpkgs"; + }}; + }}; + conflicts = [ "shells" ]; + extraTrustedPublicKeys = [ "https://devenv.cachix.org" ]; + extraSubstituters = [ "devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=" ]; + }} + "# + )?; + + let result = eval_nix_file(&file_path, true)?; + let expected = r#" + { + "conflicts":["shells"], + "description":"Flake bindings for the `github:cachix/devenv` development environment.", + "extraSubstituters":["devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="], + "extraTrustedPublicKeys":["https://devenv.cachix.org"], + "inputs":{ + "devenv":{"url":"github:cachix/devenv"}, + "devenv-root":{"flake":false,"url":"file+file:///dev/null"}, + "mk-shell-bin":{"url":"github:rrbutani/nix-mk-shell-bin"}, + "nix2container":{ + "inputs":{"nixpkgs":{"follows":"nixpkgs"}}, + "url":"github:nlewo/nix2container" + } + } + } + "#; + + assert_eq!(clean_string(&result), clean_string(expected)); + + Ok(()) + } + + #[test] + fn test_nix_command_error() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("invalid.nix"); + let mut file = File::create(&file_path).unwrap(); + write!(file, "this is not a valid nix expression").unwrap(); + + let result = eval_nix_file(&file_path, true); + assert!(matches!(result, Err(EvalNixFileError::NixCommandError(_)))); + } + + #[test] + fn test_json_vs_non_json_output() -> Result<()> { + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!( + file, + r#" + {{ + x = 42; + y = "Hello"; + }} + "# + )?; + + let json_result = eval_nix_file(&file_path, true)?; + let non_json_result = eval_nix_file(&file_path, false)?; + + let expected_json = r#"{"x":42,"y":"Hello"}"#; + assert_eq!(clean_string(&json_result), clean_string(expected_json)); + + // For non-JSON output, we can't predict the exact formatting, so we'll check for key elements + assert!(non_json_result.contains("x = 42")); + assert!(non_json_result.contains("y = \"Hello\"")); + + Ok(()) + } +} diff --git a/src/parts.rs b/src/parts.rs index 993432a..bfbd9ca 100644 --- a/src/parts.rs +++ b/src/parts.rs @@ -6,7 +6,8 @@ use std::fs; use std::path::PathBuf; use thiserror::Error; -use crate::nix::{eval_meta_file, get_flake_store_path}; +use crate::config::META_FILE; +use crate::nix::{eval_nix_file, get_flake_store_path}; #[derive(Debug, Clone)] pub struct FlakePart { @@ -171,7 +172,7 @@ impl FlakePart { .to_str() .ok_or(FlakePartParseError::InvalidPathError())?; - let eval_output = eval_meta_file(&nix_store_path)?; + let eval_output = eval_nix_file(&nix_store_path.join(META_FILE), true)?; let metadata: FlakePartMetadata = serde_json::from_str(&eval_output) .map_err(|e| FlakePartParseError::MetadataConversionError(e))?; From e28fb62d9c017dc075b3e7824b9a8de8bbede120 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Mon, 29 Jul 2024 23:38:03 +0200 Subject: [PATCH 08/21] ci(flake-check): add test builds to flake-check --- .github/workflows/flake-check.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/flake-check.yml b/.github/workflows/flake-check.yml index 793bf09..0dacbed 100644 --- a/.github/workflows/flake-check.yml +++ b/.github/workflows/flake-check.yml @@ -27,5 +27,11 @@ jobs: - name: "Running `nix flake check`..." run: nix flake check --show-trace --accept-flake-config + - name: "Running `nix build ...`..." + run: | + nix build .#builder --show-trace --accept-flake-config + nix build .#flake-parts --show-trace --accept-flake-config + nix build .#flake-parts-bootstrap --show-trace --accept-flake-config + - name: "Checking flake inputs for stale & insecure nixpkgs versions..." uses: DeterminateSystems/flake-checker-action@main From d7f6fb2d65e15e3fde6586318404f220f93f3e52 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Tue, 30 Jul 2024 13:29:17 +0200 Subject: [PATCH 09/21] test(builder): fix nix.rs tests, add NIX_BIN_PATH --- Cargo.lock | 34 +++++++++++++++++++++++++ Cargo.toml | 1 + flake.nix | 30 +++++++++++++++++++--- src/nix.rs | 73 ++++++++++++++++++++++++++++++++++-------------------- 4 files changed, 107 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c24dc02..b2c4424 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -187,6 +187,12 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + [[package]] name = "errno" version = "0.3.9" @@ -229,6 +235,7 @@ dependencies = [ "termcolor", "thiserror", "walkdir", + "which", ] [[package]] @@ -249,6 +256,15 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys", +] + [[package]] name = "indenter" version = "0.3.3" @@ -594,6 +610,18 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "which" +version = "6.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d9c5ed668ee1f17edb3b627225343d210006a90bb1e3745ce1f30b1fb115075" +dependencies = [ + "either", + "home", + "rustix", + "winsafe", +] + [[package]] name = "winapi-util" version = "0.1.8" @@ -675,3 +703,9 @@ name = "windows_x86_64_msvc" version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" diff --git a/Cargo.toml b/Cargo.toml index 3339f4f..0efb092 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,3 +19,4 @@ tempfile = "3.10.1" termcolor = "1.4.1" thiserror = "1.0.61" walkdir = "2.5.0" +which = "6.0.2" diff --git a/flake.nix b/flake.nix index a103db4..2ee64f6 100644 --- a/flake.nix +++ b/flake.nix @@ -92,6 +92,7 @@ lib, rustPlatform, nixfmt-rfc-style, + nix, tsandrini, }: rustPlatform.buildRustPackage { @@ -116,14 +117,32 @@ runHook postUnpack ''; - cargoSha256 = "sha256-JYCiIbStvpmO4CO3Sp7tMHUdWpFMKiveE5ATIyK0UVo="; + cargoSha256 = "sha256-8LlvOf5llQE2NSAXYdRnoX5vob0WxcUGARO47eV1oYE="; - buildInputs = [ nixfmt-rfc-style ]; + buildInputs = [ + nixfmt-rfc-style + nix + ]; + + NIX_BIN_PATH = "${nix}/bin/nix"; doCheck = true; checkPhase = '' runHook preCheck - cargo test + dirs=(store var var/nix var/log/nix etc home) + + for dir in $dirs; do + mkdir -p "$TMPDIR/$dir" + done + + export NIX_STORE_DIR=$TMPDIR/store + export NIX_LOCALSTATE_DIR=$TMPDIR/var + export NIX_STATE_DIR=$TMPDIR/var/nix + export NIX_LOG_DIR=$TMPDIR/var/log/nix + export NIX_CONF_DIR=$TMPDIR/etc + export HOME=$TMPDIR/home + + cargo test --frozen --release runHook postCheck ''; @@ -137,7 +156,10 @@ }; }; in - pkgs.callPackage package { inherit tsandrini; }; + pkgs.callPackage package { + inherit tsandrini; + nix = pkgs.nixVersions.stable; + }; docs = let diff --git a/src/nix.rs b/src/nix.rs index 91e27c5..f0d30f6 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -4,26 +4,42 @@ use std::process::Command; use thiserror::Error; #[derive(Error, Debug)] -pub enum EvalNixFileError { +pub enum NixError { #[error("provided path is invalid: {0}")] InvalidPathError(PathBuf), - - #[error("failed to run `nix eval --json --file` on: {0}")] - NixCommandError(#[from] std::io::Error), - + #[error("failed to run nix command: {0}")] + NixCommandError(String), #[error("failed to convert output to utf8: {0}")] UTF8ConversionError(#[from] std::string::FromUtf8Error), + #[error("nix command not found. Please ensure 'nix' is installed and in your PATH.")] + NixNotFound, + #[error("IO error: {0}")] + IoError(#[from] std::io::Error), } -pub fn eval_nix_file(path: &PathBuf, to_json: bool) -> Result { - if path.exists() == false { - return Err(EvalNixFileError::InvalidPathError(path.clone())); - } +pub fn get_nix_binary() -> Option { + std::env::var_os("NIX_BIN_PATH") + .map(PathBuf::from) + .or_else(|| which::which("nix").ok()) +} + +pub fn nix_command() -> Command { + let mut cmd = Command::new(get_nix_binary().expect("Nix executable not found")); + cmd.args(&[ + "--extra-experimental-features", + "nix-command", + "--extra-experimental-features", + "flakes", + ]); + cmd +} + +pub fn eval_nix_file(path: &PathBuf, to_json: bool) -> Result { let path = path .to_str() - .ok_or_else(|| EvalNixFileError::InvalidPathError(path.clone()))?; + .ok_or_else(|| NixError::InvalidPathError(path.clone()))?; - let mut command = Command::new("nix"); + let mut command = nix_command(); command.arg("eval"); command.arg("--file").arg(path); if to_json { @@ -33,26 +49,29 @@ pub fn eval_nix_file(path: &PathBuf, to_json: bool) -> Result Result { - let nix_info = Command::new("nix") - .args(["build", "--no-link", "--print-out-paths", &flake_uri]) - .output()?; +pub fn get_flake_store_path(flake_uri: &str) -> Result { + let mut command = nix_command(); + command.args(["build", "--no-link", "--print-out-paths", flake_uri]); - let output = String::from_utf8(nix_info.stdout)?; - Ok(PathBuf::from(output.trim())) + let output = command.output()?; + + if !output.status.success() { + return Err(NixError::NixCommandError( + String::from_utf8_lossy(&output.stderr).to_string(), + )); + } + + let stdout = String::from_utf8(output.stdout)?; + Ok(PathBuf::from(stdout.trim())) } pub fn nixfmt_file(path: &PathBuf) -> Result<()> { @@ -107,14 +126,14 @@ mod tests { fn test_nonexistent_path() { let invalid_path = PathBuf::from("/nonexistent/path"); let result = eval_nix_file(&invalid_path, true); - assert!(matches!(result, Err(EvalNixFileError::InvalidPathError(_)))); + assert!(matches!(result, Err(NixError::NixCommandError(_)))); } #[test] fn test_invalid_path() { let invalid_path = PathBuf::from(""); let result = eval_nix_file(&invalid_path, true); - assert!(matches!(result, Err(EvalNixFileError::InvalidPathError(_)))); + assert!(matches!(result, Err(NixError::NixCommandError(_)))); } #[test] @@ -191,7 +210,7 @@ mod tests { write!(file, "this is not a valid nix expression").unwrap(); let result = eval_nix_file(&file_path, true); - assert!(matches!(result, Err(EvalNixFileError::NixCommandError(_)))); + assert!(matches!(result, Err(NixError::NixCommandError(_)))); } #[test] From ff2b13475112bc71fdb823aa76a42afcf57f7a95 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 14:05:28 +0200 Subject: [PATCH 10/21] test(builder): add nix db transaction lock --- Cargo.lock | 199 +++++++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 3 + src/nix.rs | 35 ++-------- 3 files changed, 209 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b2c4424..e5ff3bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -75,6 +75,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + [[package]] name = "backtrace" version = "0.3.71" @@ -231,6 +237,7 @@ dependencies = [ "regex", "serde", "serde_json", + "serial_test", "tempfile", "termcolor", "thiserror", @@ -244,6 +251,83 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + [[package]] name = "gimli" version = "0.28.1" @@ -301,6 +385,22 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + [[package]] name = "memchr" version = "2.7.4" @@ -346,12 +446,41 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + [[package]] name = "pin-project-lite" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "proc-macro2" version = "1.0.85" @@ -370,6 +499,15 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "redox_syscall" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +dependencies = [ + "bitflags", +] + [[package]] name = "regex" version = "1.10.5" @@ -433,6 +571,27 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scc" +version = "2.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ccfb12511cdb770157ace92d7dda771e498445b78f9886e8cdbc5140a4eced" +dependencies = [ + "sdd", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sdd" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "177258b64c0faaa9ffd3c65cd3262c2bc7e2588dbbd9c1641d0346145c1bbda8" + [[package]] name = "serde" version = "1.0.203" @@ -464,6 +623,31 @@ dependencies = [ "serde", ] +[[package]] +name = "serial_test" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" +dependencies = [ + "futures", + "log", + "once_cell", + "parking_lot", + "scc", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -473,6 +657,21 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + [[package]] name = "strsim" version = "0.11.1" diff --git a/Cargo.toml b/Cargo.toml index 0efb092..797e9ae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,3 +20,6 @@ termcolor = "1.4.1" thiserror = "1.0.61" walkdir = "2.5.0" which = "6.0.2" + +[dev-dependencies] +serial_test = "3.1.1" diff --git a/src/nix.rs b/src/nix.rs index f0d30f6..3ebef70 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -88,6 +88,7 @@ pub fn nixfmt_file(path: &PathBuf) -> Result<()> { #[cfg(test)] mod tests { use super::*; + use serial_test::serial; use std::fs::File; use std::io::Write; use tempfile::TempDir; @@ -97,6 +98,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_valid_nix_file() -> Result<()> { let temp_dir = TempDir::new()?; let file_path = temp_dir.path().join("test.nix"); @@ -123,6 +125,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_nonexistent_path() { let invalid_path = PathBuf::from("/nonexistent/path"); let result = eval_nix_file(&invalid_path, true); @@ -130,6 +133,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_invalid_path() { let invalid_path = PathBuf::from(""); let result = eval_nix_file(&invalid_path, true); @@ -137,6 +141,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_non_json_output() -> Result<()> { let temp_dir = TempDir::new()?; let file_path = temp_dir.path().join("test.nix"); @@ -150,6 +155,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_complex_nix_file() -> Result<()> { let temp_dir = TempDir::new()?; let file_path = temp_dir.path().join("test.nix"); @@ -203,6 +209,7 @@ mod tests { } #[test] + #[serial(nix_transaction)] fn test_nix_command_error() { let temp_dir = TempDir::new().unwrap(); let file_path = temp_dir.path().join("invalid.nix"); @@ -212,32 +219,4 @@ mod tests { let result = eval_nix_file(&file_path, true); assert!(matches!(result, Err(NixError::NixCommandError(_)))); } - - #[test] - fn test_json_vs_non_json_output() -> Result<()> { - let temp_dir = TempDir::new()?; - let file_path = temp_dir.path().join("test.nix"); - let mut file = File::create(&file_path)?; - write!( - file, - r#" - {{ - x = 42; - y = "Hello"; - }} - "# - )?; - - let json_result = eval_nix_file(&file_path, true)?; - let non_json_result = eval_nix_file(&file_path, false)?; - - let expected_json = r#"{"x":42,"y":"Hello"}"#; - assert_eq!(clean_string(&json_result), clean_string(expected_json)); - - // For non-JSON output, we can't predict the exact formatting, so we'll check for key elements - assert!(non_json_result.contains("x = 42")); - assert!(non_json_result.contains("y = \"Hello\"")); - - Ok(()) - } } From e8a81d6b9b978d7a4929f92970350b69aed07dbb Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 14:07:26 +0200 Subject: [PATCH 11/21] feat(flake): move builder tests to a flake check --- flake.nix | 89 ++++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 61 insertions(+), 28 deletions(-) diff --git a/flake.nix b/flake.nix index 2ee64f6..33a334c 100644 --- a/flake.nix +++ b/flake.nix @@ -82,8 +82,8 @@ ... }: { - packages = rec { - default = builder; + packages = { + default = config.packages.builder; builder = let @@ -126,26 +126,6 @@ NIX_BIN_PATH = "${nix}/bin/nix"; - doCheck = true; - checkPhase = '' - runHook preCheck - dirs=(store var var/nix var/log/nix etc home) - - for dir in $dirs; do - mkdir -p "$TMPDIR/$dir" - done - - export NIX_STORE_DIR=$TMPDIR/store - export NIX_LOCALSTATE_DIR=$TMPDIR/var - export NIX_STATE_DIR=$TMPDIR/var/nix - export NIX_LOG_DIR=$TMPDIR/var/log/nix - export NIX_CONF_DIR=$TMPDIR/etc - export HOME=$TMPDIR/home - - cargo test --frozen --release - runHook postCheck - ''; - meta = with lib; { homepage = "https://github.com/tsandrini/flake-parts-builder"; description = "Nix flakes interactive template builder based on flake-parts written in Rust."; @@ -175,19 +155,17 @@ cargoSha256 = "sha256-Jsha+Aoe5R6g4H7KNX2VX62S+NGj1SrobeCakjgFw24="; - doCheck = false; - buildPhase = '' cargo doc --no-deps --release ''; meta = builder.meta // { - description = "Documentation for ${builder.meta.description}"; + description = "Documentation for the ${builder.meta.description}"; mainProgram = null; }; }; in - pkgs.callPackage package { inherit builder; }; + pkgs.callPackage package { inherit (config.packages) builder; }; flake-parts = let @@ -242,8 +220,63 @@ pkgs.callPackage package { inherit tsandrini mkFlakeParts; }; }; - devShells = rec { - default = dev; + checks = { + builder-tests = + let + package = + { + lib, + rustPlatform, + builder, + }: + rustPlatform.buildRustPackage { + inherit (builder) + src + unpackPhase + version + buildInputs + NIX_BIN_PATH + ; + name = "${builder.name}-tests"; + + cargoSha256 = "sha256-CPAaHaELJlWEsYgI8zkesLJQO5zJzLz17HINoIloa9c="; + + dontBuild = true; + dontInstall = true; + doCheck = true; + + checkPhase = '' + runHook preCheck + dirs=(store var var/nix var/log/nix etc home) + + for dir in $dirs; do + mkdir -p "$TMPDIR/$dir" + done + + export NIX_STORE_DIR=$TMPDIR/store + export NIX_LOCALSTATE_DIR=$TMPDIR/var + export NIX_STATE_DIR=$TMPDIR/var/nix + export NIX_LOG_DIR=$TMPDIR/var/log/nix + export NIX_CONF_DIR=$TMPDIR/etc + export HOME=$TMPDIR/home + + cargo test --frozen --release + mkdir -p $out && touch $out/test-success + + runHook postCheck + ''; + + meta = builder.meta // { + description = "Test suite for the ${builder.meta.description}"; + mainProgram = null; + }; + }; + in + pkgs.callPackage package { inherit (config.packages) builder; }; + }; + + devShells = { + default = config.devShells.dev; dev = let From 87271559e14dc87b8f165269fe4f69d57230f8c4 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 16:37:05 +0200 Subject: [PATCH 12/21] fix(flake): update builder cargoSha256 --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 33a334c..bff2ed2 100644 --- a/flake.nix +++ b/flake.nix @@ -117,7 +117,7 @@ runHook postUnpack ''; - cargoSha256 = "sha256-8LlvOf5llQE2NSAXYdRnoX5vob0WxcUGARO47eV1oYE="; + cargoSha256 = "sha256-ZuehJ7qF+7jyTHsvQLr7V1xfBhTw10OrlFdPk9CU9XE="; buildInputs = [ nixfmt-rfc-style From ef8daa881c18f1ee9809752446a080a56d410635 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 16:37:36 +0200 Subject: [PATCH 13/21] docs(README): update with new info --- README.md | 51 +++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 47 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f5a462f..9c7b753 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,9 @@ such domain. And this is what `flake-parts-builder` solves! It serves as a **dynamic extension** to `nix flake init -t`, nothing more, nothing less! +So let's forget about `nix flake init -t` and embrace `nix run` instead :sunglasses: + +----- Okay, but what exactly does it do then? @@ -105,6 +108,7 @@ parts using `--disable-base` if you wish so) ```bash flake-parts-builder list ``` + ```md # github:tsandrini/flake-parts-builder#flake-parts - +github: (Collection) GitHub related parts @@ -220,14 +224,53 @@ an attrset with the following structure. - `extraSubstituters`: merged with all of the required parts and pasted into the final `flake.nix`, for security purposes they are all commented out -## FAQ 🗣️ +## Additional questions, issues 🗣️ + +### How can I use a custom version of the `nix` binary? + +If installed via the nix package manager, `flake-parts-builder` will use +an isolated version of `pkgs.nixVersions.stable` with +`--extra-experimental-features 'nix-command flakes'` enabled. However, if you'd +like to use a custom version instead, simply pass it via `$NIX_BIN_PATH`, +for example + +```bash +NIX_BIN_PATH=/bin/patched-nix flake-parts-builder init -p +home-manager,shells myNewProject +``` + +### Why not use `flake.templates` instead? + +The `flake.templates` flake output is a static property by design that needs +to point to a fixed path with fixed content known ahead of time, which makes +it heavily impractical for any kind of dynamic evaluation. One could, +given the set of parts, prepare all of the possible combination of templates +with some patching script and +directly update the source code of `flake.nix`, however ... At the time of +this writing there are currently $27+1$ flake parts provided by this flake in +the base collection of parts, which would result in + +```math +2^{28} - 1 = 268435455 +``` + +total combinations of templates and with an average part size of +$8.59 \pm 2.60$ KB this would result in $2.14$ total terabytes of data +with just one part per template. :skull: -### 1. Why not use `flake.templates` instead? +I hope this is enough of an answer. -### 2. Can't we just stuff this functionality into flakeModules? +### Can't we just stuff this functionality into `flakeModules`? I totally agree there is a fine line between a reusable piece of functionality and boilerplate template code and I personally can't think of a general enough definition that would discern them and also be somehow useful. However, I do -believe there is a practical clearly visible difference between them that most +believe there is a practical, clearly visible difference between them that most programmers can just simply look and see, let's for example take .... +[devenv/dev.nix](flake-parts/devenv/flake-parts/devenv/dev.nix) or +[nix-topology/topology.nix](flake-parts/nix-topology/flake-parts/nix-topology/topology.nix) +or even +[flake-check.yml](flake-parts/gh-actions-check/.github/workflows/flake-check.yml), +you can clearly **"see"** that this isn't a good candidate for a `flakeModule`, +they are too specific, they typically represent the end user options of some +existing `flakeModule`s. Wrapping this code into another layer of modularity +doesn't make sense, since this is meant to be a piece of configuration code. From 59a56e1a0554fa3a0410c75394bd084cb20d651a Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 19:54:32 +0200 Subject: [PATCH 14/21] feat(flake-parts): add parallelism to gh-actions --- .../.github/workflows/cachix-push.yml | 15 +++++++++++---- .../.github/workflows/flake-check.yml | 10 +++++++++- .../.github/workflows/update-flake-lock.yml | 3 +++ .../.github/workflows/flakehub-publish.yml | 3 +++ .../gh-actions-pages/.github/workflows/pages.yml | 3 +++ 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml b/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml index 751a690..42c175f 100644 --- a/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml +++ b/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml @@ -19,6 +19,9 @@ jobs: - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Settings up cachix binary cache..." uses: cachix/cachix-action@v15 @@ -29,9 +32,13 @@ jobs: # If you chose API tokens for write access OR if you have a private cache authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - name: "Building project packages..." - run: nix build + # NOTE Install any necessary packages here + - name: "Setting up packages..." + run: | + nix profile install nixpkgs#nix-fast-build # parallel nix builder - - name: "Building project devshells..." - run: nix develop --command echo OK + - name: "Building project packages..." + run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" + - name: "Building project devShells..." + run: nix-fast-build -- --skip-cached --no-nom --flake ".#devShells" diff --git a/flake-parts/gh-actions-check/.github/workflows/flake-check.yml b/flake-parts/gh-actions-check/.github/workflows/flake-check.yml index 793bf09..e1b3139 100644 --- a/flake-parts/gh-actions-check/.github/workflows/flake-check.yml +++ b/flake-parts/gh-actions-check/.github/workflows/flake-check.yml @@ -20,12 +20,20 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main + # NOTE Install any necessary packages here + - name: "Setting up packages..." + run: | + nix profile install nixpkgs#nix-fast-build # parallel nix builder + - name: "Running `nix flake check`..." - run: nix flake check --show-trace --accept-flake-config + run: nix-fast-build -- --skip-cached --no-nom - name: "Checking flake inputs for stale & insecure nixpkgs versions..." uses: DeterminateSystems/flake-checker-action@main diff --git a/flake-parts/gh-actions-flake-update/.github/workflows/update-flake-lock.yml b/flake-parts/gh-actions-flake-update/.github/workflows/update-flake-lock.yml index 9f6fd4f..c90a139 100644 --- a/flake-parts/gh-actions-flake-update/.github/workflows/update-flake-lock.yml +++ b/flake-parts/gh-actions-flake-update/.github/workflows/update-flake-lock.yml @@ -15,6 +15,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main diff --git a/flake-parts/gh-actions-flakehub/.github/workflows/flakehub-publish.yml b/flake-parts/gh-actions-flakehub/.github/workflows/flakehub-publish.yml index 8ec5374..e0e016a 100644 --- a/flake-parts/gh-actions-flakehub/.github/workflows/flakehub-publish.yml +++ b/flake-parts/gh-actions-flakehub/.github/workflows/flakehub-publish.yml @@ -19,6 +19,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main diff --git a/flake-parts/gh-actions-pages/.github/workflows/pages.yml b/flake-parts/gh-actions-pages/.github/workflows/pages.yml index 08c3086..1d60f88 100644 --- a/flake-parts/gh-actions-pages/.github/workflows/pages.yml +++ b/flake-parts/gh-actions-pages/.github/workflows/pages.yml @@ -28,6 +28,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main From 41a7827769400ff00e07942cf647a6a68eeb12e6 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 19:55:37 +0200 Subject: [PATCH 15/21] ci(workflows): add parallelism & better conf --- .github/workflows/cachix-push.yml | 16 ++++++++++------ .github/workflows/flake-check.yml | 15 ++++++++++----- .github/workflows/flakehub-publish.yml | 3 +++ .github/workflows/update-flake-lock.yml | 3 +++ 4 files changed, 26 insertions(+), 11 deletions(-) diff --git a/.github/workflows/cachix-push.yml b/.github/workflows/cachix-push.yml index b410cb5..6d2ea66 100644 --- a/.github/workflows/cachix-push.yml +++ b/.github/workflows/cachix-push.yml @@ -16,6 +16,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main @@ -26,12 +29,13 @@ jobs: name: tsandrini authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - name: "Building project packages..." + # NOTE Install any necessary packages here + - name: "Setting up packages..." run: | - nix build .#builder --show-trace --accept-flake-config - nix build .#flake-parts --show-trace --accept-flake-config - nix build .#flake-parts-bootstrap --show-trace --accept-flake-config + nix profile install nixpkgs#nix-fast-build # parallel nix builder - - name: "Building project devshells..." - run: nix develop --command echo OK + - name: "Building project packages..." + run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" + - name: "Building project devShells..." + run: nix-fast-build -- --skip-cached --no-nom --flake ".#devShells" diff --git a/.github/workflows/flake-check.yml b/.github/workflows/flake-check.yml index 0dacbed..acf1974 100644 --- a/.github/workflows/flake-check.yml +++ b/.github/workflows/flake-check.yml @@ -20,18 +20,23 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main + # NOTE Install any necessary packages here + - name: "Setting up packages..." + run: | + nix profile install nixpkgs#nix-fast-build # parallel nix builder + - name: "Running `nix flake check`..." - run: nix flake check --show-trace --accept-flake-config + run: nix-fast-build -- --skip-cached --no-nom - name: "Running `nix build ...`..." - run: | - nix build .#builder --show-trace --accept-flake-config - nix build .#flake-parts --show-trace --accept-flake-config - nix build .#flake-parts-bootstrap --show-trace --accept-flake-config + run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" - name: "Checking flake inputs for stale & insecure nixpkgs versions..." uses: DeterminateSystems/flake-checker-action@main diff --git a/.github/workflows/flakehub-publish.yml b/.github/workflows/flakehub-publish.yml index bbbe349..4894ae8 100644 --- a/.github/workflows/flakehub-publish.yml +++ b/.github/workflows/flakehub-publish.yml @@ -19,6 +19,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main diff --git a/.github/workflows/update-flake-lock.yml b/.github/workflows/update-flake-lock.yml index 6728430..a16e8ec 100644 --- a/.github/workflows/update-flake-lock.yml +++ b/.github/workflows/update-flake-lock.yml @@ -15,6 +15,9 @@ jobs: - name: "Installing and configuring the nix package manager..." uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + accept-flake-config = true - name: "Setting up magic-nix-cache..." uses: DeterminateSystems/magic-nix-cache-action@main From 63173a061c2051cd5e7cbf0df811a3ea73021f10 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 19:59:34 +0200 Subject: [PATCH 16/21] test(flake): remove test check and use checkPhase --- flake.nix | 70 ++++++++++++------------------------------------------- 1 file changed, 15 insertions(+), 55 deletions(-) diff --git a/flake.nix b/flake.nix index bff2ed2..2b5c45b 100644 --- a/flake.nix +++ b/flake.nix @@ -117,6 +117,21 @@ runHook postUnpack ''; + preCheck = '' + dirs=(store var var/nix var/log/nix etc home) + + for dir in $dirs; do + mkdir -p "$TMPDIR/$dir" + done + + export NIX_STORE_DIR=$TMPDIR/store + export NIX_LOCALSTATE_DIR=$TMPDIR/var + export NIX_STATE_DIR=$TMPDIR/var/nix + export NIX_LOG_DIR=$TMPDIR/var/log/nix + export NIX_CONF_DIR=$TMPDIR/etc + export HOME=$TMPDIR/home + ''; + cargoSha256 = "sha256-ZuehJ7qF+7jyTHsvQLr7V1xfBhTw10OrlFdPk9CU9XE="; buildInputs = [ @@ -220,61 +235,6 @@ pkgs.callPackage package { inherit tsandrini mkFlakeParts; }; }; - checks = { - builder-tests = - let - package = - { - lib, - rustPlatform, - builder, - }: - rustPlatform.buildRustPackage { - inherit (builder) - src - unpackPhase - version - buildInputs - NIX_BIN_PATH - ; - name = "${builder.name}-tests"; - - cargoSha256 = "sha256-CPAaHaELJlWEsYgI8zkesLJQO5zJzLz17HINoIloa9c="; - - dontBuild = true; - dontInstall = true; - doCheck = true; - - checkPhase = '' - runHook preCheck - dirs=(store var var/nix var/log/nix etc home) - - for dir in $dirs; do - mkdir -p "$TMPDIR/$dir" - done - - export NIX_STORE_DIR=$TMPDIR/store - export NIX_LOCALSTATE_DIR=$TMPDIR/var - export NIX_STATE_DIR=$TMPDIR/var/nix - export NIX_LOG_DIR=$TMPDIR/var/log/nix - export NIX_CONF_DIR=$TMPDIR/etc - export HOME=$TMPDIR/home - - cargo test --frozen --release - mkdir -p $out && touch $out/test-success - - runHook postCheck - ''; - - meta = builder.meta // { - description = "Test suite for the ${builder.meta.description}"; - mainProgram = null; - }; - }; - in - pkgs.callPackage package { inherit (config.packages) builder; }; - }; - devShells = { default = config.devShells.dev; From f8fff2c1df1be5245ce974a66fe34937f7d00eea Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 20:05:04 +0200 Subject: [PATCH 17/21] fix(gh-workflows): fix nix-fast-build args --- .github/workflows/cachix-push.yml | 4 ++-- .github/workflows/flake-check.yml | 4 ++-- .../gh-actions-cachix/.github/workflows/cachix-push.yml | 4 ++-- .../gh-actions-check/.github/workflows/flake-check.yml | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/cachix-push.yml b/.github/workflows/cachix-push.yml index 6d2ea66..0057b41 100644 --- a/.github/workflows/cachix-push.yml +++ b/.github/workflows/cachix-push.yml @@ -35,7 +35,7 @@ jobs: nix profile install nixpkgs#nix-fast-build # parallel nix builder - name: "Building project packages..." - run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" + run: nix-fast-build --skip-cached --no-nom --flake ".#packages" - name: "Building project devShells..." - run: nix-fast-build -- --skip-cached --no-nom --flake ".#devShells" + run: nix-fast-build --skip-cached --no-nom --flake ".#devShells" diff --git a/.github/workflows/flake-check.yml b/.github/workflows/flake-check.yml index acf1974..f12a76a 100644 --- a/.github/workflows/flake-check.yml +++ b/.github/workflows/flake-check.yml @@ -33,10 +33,10 @@ jobs: nix profile install nixpkgs#nix-fast-build # parallel nix builder - name: "Running `nix flake check`..." - run: nix-fast-build -- --skip-cached --no-nom + run: nix-fast-build --skip-cached --no-nom - name: "Running `nix build ...`..." - run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" + run: nix-fast-build --skip-cached --no-nom --flake ".#packages" - name: "Checking flake inputs for stale & insecure nixpkgs versions..." uses: DeterminateSystems/flake-checker-action@main diff --git a/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml b/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml index 42c175f..2672a8a 100644 --- a/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml +++ b/flake-parts/gh-actions-cachix/.github/workflows/cachix-push.yml @@ -38,7 +38,7 @@ jobs: nix profile install nixpkgs#nix-fast-build # parallel nix builder - name: "Building project packages..." - run: nix-fast-build -- --skip-cached --no-nom --flake ".#packages" + run: nix-fast-build --skip-cached --no-nom --flake ".#packages" - name: "Building project devShells..." - run: nix-fast-build -- --skip-cached --no-nom --flake ".#devShells" + run: nix-fast-build --skip-cached --no-nom --flake ".#devShells" diff --git a/flake-parts/gh-actions-check/.github/workflows/flake-check.yml b/flake-parts/gh-actions-check/.github/workflows/flake-check.yml index e1b3139..287b054 100644 --- a/flake-parts/gh-actions-check/.github/workflows/flake-check.yml +++ b/flake-parts/gh-actions-check/.github/workflows/flake-check.yml @@ -33,7 +33,7 @@ jobs: nix profile install nixpkgs#nix-fast-build # parallel nix builder - name: "Running `nix flake check`..." - run: nix-fast-build -- --skip-cached --no-nom + run: nix-fast-build --skip-cached --no-nom - name: "Checking flake inputs for stale & insecure nixpkgs versions..." uses: DeterminateSystems/flake-checker-action@main From 9dab4e958b1c64bb4f6255a2d6b484de5a78d805 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Wed, 31 Jul 2024 21:22:40 +0200 Subject: [PATCH 18/21] refactor(flake): move docs directly to builder drv --- flake.nix | 35 +++++++++-------------------------- 1 file changed, 9 insertions(+), 26 deletions(-) diff --git a/flake.nix b/flake.nix index 2b5c45b..c31ce08 100644 --- a/flake.nix +++ b/flake.nix @@ -134,6 +134,15 @@ cargoSha256 = "sha256-ZuehJ7qF+7jyTHsvQLr7V1xfBhTw10OrlFdPk9CU9XE="; + postBuild = '' + cargo doc --no-deps --release + ''; + + postInstall = '' + mkdir -p $out/doc + cp -r target/doc $out/ + ''; + buildInputs = [ nixfmt-rfc-style nix @@ -156,32 +165,6 @@ nix = pkgs.nixVersions.stable; }; - docs = - let - package = - { - lib, - rustPlatform, - builder, - }: - rustPlatform.buildRustPackage { - inherit (builder) src unpackPhase version; - name = "${builder.name}-docs"; - - cargoSha256 = "sha256-Jsha+Aoe5R6g4H7KNX2VX62S+NGj1SrobeCakjgFw24="; - - buildPhase = '' - cargo doc --no-deps --release - ''; - - meta = builder.meta // { - description = "Documentation for the ${builder.meta.description}"; - mainProgram = null; - }; - }; - in - pkgs.callPackage package { inherit (config.packages) builder; }; - flake-parts = let package = From 30f6a5c0ad559e81e1f902dba54655c49cf72d48 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Thu, 1 Aug 2024 16:44:20 +0200 Subject: [PATCH 19/21] test(builder): NixCmdInterface, Nix/Mock-Executor --- src/cmd/add.rs | 6 +- src/cmd/init.rs | 34 ++- src/cmd/list.rs | 5 +- src/main.rs | 9 +- src/nix.rs | 608 ++++++++++++++++++++++++++++++++++------------- src/parts.rs | 110 ++++++++- src/templates.rs | 1 - 7 files changed, 570 insertions(+), 203 deletions(-) diff --git a/src/cmd/add.rs b/src/cmd/add.rs index 3e417ab..3f7d7f9 100644 --- a/src/cmd/add.rs +++ b/src/cmd/add.rs @@ -5,6 +5,7 @@ use tempfile::tempdir; use crate::cmd::init::{parse_required_parts_tuples, prepare_tmpdir, InitCommand}; use crate::config::{BASE_DERIVATION_NAME, SELF_FLAKE_URI}; +use crate::nix::NixCmdInterface; use crate::parts::FlakePartsStore; use crate::templates::FlakeInputsContext; @@ -26,7 +27,7 @@ pub struct AddCommand { pub init: InitCommand, } -pub fn add(mut cmd: AddCommand) -> Result<()> { +pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.init.shared_args.disable_base_parts { cmd.init .shared_args @@ -40,7 +41,7 @@ pub fn add(mut cmd: AddCommand) -> Result<()> { .shared_args .parts_stores .iter() - .map(|store| FlakePartsStore::from_flake_uri(&store)) + .map(|store| FlakePartsStore::from_flake_uri(&store, &nix_cmd)) .collect::>>()?; let parts_tuples = parse_required_parts_tuples(&cmd.init, &stores)?; @@ -58,6 +59,7 @@ pub fn add(mut cmd: AddCommand) -> Result<()> { let tmpdir = tempdir()?; prepare_tmpdir( + &nix_cmd, &tmpdir, &parts_tuples, path.to_str(), diff --git a/src/cmd/init.rs b/src/cmd/init.rs index a510ca6..94600e8 100644 --- a/src/cmd/init.rs +++ b/src/cmd/init.rs @@ -11,7 +11,7 @@ use crate::config::{ BASE_DERIVATION_NAME, BOOTSTRAP_DERIVATION_NAME, META_FILE, NAMEPLACEHOLDER, SELF_FLAKE_URI, }; use crate::fs_utils::{regex_in_dir_recursive, reset_permissions}; -use crate::nix::nixfmt_file; +use crate::nix::NixCmdInterface; use crate::parts::{FlakePartTuple, FlakePartsStore}; use crate::templates::FlakeContext; @@ -125,9 +125,6 @@ pub fn parse_required_parts_tuples<'a>( let user_req_flake_strings = cmd.parts.clone(); - // TODO remove - println!("User required parts: {:?}", user_req_flake_strings); - let (resolved_deps, unresolved_deps) = { let start_indices: Vec = all_parts_tuples .iter() @@ -155,9 +152,6 @@ pub fn parse_required_parts_tuples<'a>( .chain(resolved_deps.iter()) .collect::>(); - // TODO remove - println!("All required parts: {:?}", all_req_flake_strings); - let final_parts_tuples = all_parts_tuples .into_iter() .filter(|part_tuple| { @@ -168,14 +162,6 @@ pub fn parse_required_parts_tuples<'a>( }) .collect::>(); - let final_parts_uris = final_parts_tuples - .iter() - .map(|flake_part| flake_part.to_flake_uri(None)) - .collect::>(); - - // TODO remove - println!("Final parts: {:?}", final_parts_uris); - let missing_parts = FlakePartTuple::find_missing_parts_in(&final_parts_tuples, &user_req_flake_strings); @@ -204,6 +190,7 @@ pub fn parse_required_parts_tuples<'a>( } pub fn prepare_tmpdir( + nix_cmd: &impl NixCmdInterface, tmpdir: &TempDir, parts_tuples: &Vec, target_name: Option<&str>, @@ -225,7 +212,6 @@ pub fn prepare_tmpdir( // TODO fails if no META_FILE is present // check if meta exists and delete it if yes - std::fs::remove_file(tmp_path.join(META_FILE))?; reset_permissions(tmp_path.to_str().unwrap())?; @@ -240,7 +226,8 @@ pub fn prepare_tmpdir( let rendered = flake_context.render()?; fs::write(tmp_path.join("flake.nix"), rendered)?; - nixfmt_file(&tmp_path.join("flake.nix"))?; + nix_cmd.nixfmt_file(&tmp_path.join("flake.nix"))?; + // nixfmt_file(&tmp_path.join("flake.nix"))?; } // This becomes None when `.`, `../`,etc... is passed @@ -251,7 +238,7 @@ pub fn prepare_tmpdir( Ok(()) } -pub fn init(mut cmd: InitCommand) -> Result<()> { +pub fn init(mut cmd: InitCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.shared_args.disable_base_parts { cmd.shared_args .parts_stores @@ -272,7 +259,7 @@ pub fn init(mut cmd: InitCommand) -> Result<()> { .shared_args .parts_stores .iter() - .map(|store| FlakePartsStore::from_flake_uri(&store)) + .map(|store| FlakePartsStore::from_flake_uri(&store, &nix_cmd)) .collect::>>()?; let parts_tuples = parse_required_parts_tuples(&cmd, &stores)?; @@ -284,7 +271,14 @@ pub fn init(mut cmd: InitCommand) -> Result<()> { } let tmpdir = tempdir()?; - prepare_tmpdir(&tmpdir, &parts_tuples, path.to_str(), &cmd.strategy, true)?; + prepare_tmpdir( + &nix_cmd, + &tmpdir, + &parts_tuples, + path.to_str(), + &cmd.strategy, + true, + )?; dir::copy( &tmpdir, diff --git a/src/cmd/list.rs b/src/cmd/list.rs index 66cb71b..e47ec27 100644 --- a/src/cmd/list.rs +++ b/src/cmd/list.rs @@ -5,6 +5,7 @@ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::cmd::SharedArgs; use crate::config::{BASE_DERIVATION_NAME, BOOTSTRAP_DERIVATION_NAME, SELF_FLAKE_URI}; +use crate::nix::NixCmdInterface; use crate::parts::FlakePartsStore; /// List all available flake-parts in all parts stores provided by the user. @@ -14,7 +15,7 @@ pub struct ListCommand { pub shared_args: SharedArgs, } -pub fn list(mut cmd: ListCommand) -> Result<()> { +pub fn list(mut cmd: ListCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.shared_args.disable_base_parts { cmd.shared_args .parts_stores @@ -36,7 +37,7 @@ pub fn list(mut cmd: ListCommand) -> Result<()> { writeln!(&mut stdout, " # {}", flake_uri)?; // TODO maybe some error message instead of unwrap? - FlakePartsStore::from_flake_uri(&flake_uri) + FlakePartsStore::from_flake_uri(&flake_uri, &nix_cmd) .unwrap() .parts .iter() diff --git a/src/main.rs b/src/main.rs index 7fd1b3f..4524a50 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,6 +15,7 @@ pub mod templates; use crate::cmd::add::{add, AddCommand}; use crate::cmd::init::{init, InitCommand}; use crate::cmd::list::{list, ListCommand}; +use crate::nix::NixExecutor; /// Nix flakes interactive template builder based on flake-parts written /// in Rust. @@ -33,16 +34,16 @@ enum Commands { } // TODO add logging -// TODO add tests // TODO better docs fn main() -> Result<()> { color_eyre::install()?; let cli = Cli::parse(); + let nix_cmd = NixExecutor::from_env()?; match cli.command { - Commands::Init(cmd) => init(cmd), - Commands::List(cmd) => list(cmd), - Commands::Add(cmd) => add(cmd), + Commands::List(cmd) => list(cmd, nix_cmd), + Commands::Init(cmd) => init(cmd, nix_cmd), + Commands::Add(cmd) => add(cmd, nix_cmd), } } diff --git a/src/nix.rs b/src/nix.rs index 3ebef70..227bb48 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -3,86 +3,149 @@ use std::path::PathBuf; use std::process::Command; use thiserror::Error; -#[derive(Error, Debug)] -pub enum NixError { - #[error("provided path is invalid: {0}")] - InvalidPathError(PathBuf), +#[derive(Error, Debug, Clone)] +pub enum NixCmdInterfaceError { + #[error("failed to run cmd due to invalid path: {0}")] + InvalidPath(PathBuf), + + #[error("failed to convert output to UTF-8: {0}")] + UTF8ConversionError(String), + #[error("failed to run nix command: {0}")] NixCommandError(String), - #[error("failed to convert output to utf8: {0}")] - UTF8ConversionError(#[from] std::string::FromUtf8Error), - #[error("nix command not found. Please ensure 'nix' is installed and in your PATH.")] - NixNotFound, - #[error("IO error: {0}")] - IoError(#[from] std::io::Error), } -pub fn get_nix_binary() -> Option { - std::env::var_os("NIX_BIN_PATH") - .map(PathBuf::from) - .or_else(|| which::which("nix").ok()) +pub trait NixCmdInterface { + // TODO figure out how to remove the static lifetime + type Error: From + std::error::Error + Send + Sync + 'static; + + fn eval_nix_file(&self, path: &PathBuf, to_json: bool) -> Result; + fn store_path_of_flake(&self, flake_uri: &str) -> Result; // TODO maybe rename + fn nixfmt_file(&self, path: &PathBuf) -> Result<(), Self::Error>; } -pub fn nix_command() -> Command { - let mut cmd = Command::new(get_nix_binary().expect("Nix executable not found")); - cmd.args(&[ - "--extra-experimental-features", - "nix-command", - "--extra-experimental-features", - "flakes", - ]); - cmd +pub struct NixExecutor { + nix_binary: PathBuf, } -pub fn eval_nix_file(path: &PathBuf, to_json: bool) -> Result { - let path = path - .to_str() - .ok_or_else(|| NixError::InvalidPathError(path.clone()))?; +#[derive(Error, Debug)] +pub enum NixExecutorError { + #[error("{0}")] + NixCmdInterfaceError(#[from] NixCmdInterfaceError), + + #[error("nix binary not found")] + NixBinaryNotFound, - let mut command = nix_command(); - command.arg("eval"); - command.arg("--file").arg(path); - if to_json { - command.arg("--json"); + #[error("nix command failed with nonzero status: {0}")] + NonzeroStatusError(String), +} + +impl NixExecutor { + pub fn new(nix_binary: PathBuf) -> Self { + Self { nix_binary } } - let output = command.output()?; + pub fn from_env() -> Result { + let nix_binary = std::env::var_os("NIX_BIN_PATH") + .map(PathBuf::from) + .or_else(|| which::which("nix").ok()) + .ok_or(NixExecutorError::NixBinaryNotFound)?; - if !output.status.success() { - return Err(NixError::NixCommandError( - String::from_utf8_lossy(&output.stderr).to_string(), - )); + Ok(Self::new(nix_binary)) } - let stdout = String::from_utf8(output.stdout)?; - Ok(stdout.trim().to_string()) + fn nix_command(&self) -> Command { + let mut cmd = Command::new(&self.nix_binary); + cmd.args(&[ + "--extra-experimental-features", + "nix-command", + "--extra-experimental-features", + "flakes", + ]); + cmd + } } -pub fn get_flake_store_path(flake_uri: &str) -> Result { - let mut command = nix_command(); - command.args(["build", "--no-link", "--print-out-paths", flake_uri]); - - let output = command.output()?; +impl NixCmdInterface for NixExecutor { + type Error = NixExecutorError; + + fn eval_nix_file(&self, path: &PathBuf, to_json: bool) -> Result { + let path = path.to_str().ok_or(NixExecutorError::NixCmdInterfaceError( + NixCmdInterfaceError::InvalidPath(path.clone()), + ))?; + + let mut command = self.nix_command(); + command.arg("eval"); + command.arg("--file").arg(path); + if to_json { + command.arg("--json"); + } + + let output = command.output().map_err(|e| { + NixExecutorError::NixCmdInterfaceError(NixCmdInterfaceError::NixCommandError( + e.to_string(), + )) + })?; + + if !output.status.success() { + return Err(NixExecutorError::NonzeroStatusError( + String::from_utf8_lossy(&output.stderr).to_string(), + )); + } + + let stdout = String::from_utf8(output.stdout).map_err(|e| { + NixExecutorError::NixCmdInterfaceError(NixCmdInterfaceError::UTF8ConversionError( + e.to_string(), + )) + })?; + + Ok(stdout.trim().to_string()) + } - if !output.status.success() { - return Err(NixError::NixCommandError( - String::from_utf8_lossy(&output.stderr).to_string(), - )); + fn store_path_of_flake(&self, flake_uri: &str) -> Result { + let mut command = self.nix_command(); + command.args(["build", "--no-link", "--print-out-paths", flake_uri]); + + let output = command.output().map_err(|e| { + NixExecutorError::NixCmdInterfaceError(NixCmdInterfaceError::NixCommandError( + e.to_string(), + )) + })?; + + if !output.status.success() { + return Err(NixExecutorError::NonzeroStatusError( + String::from_utf8_lossy(&output.stderr).to_string(), + )); + } + + let stdout = String::from_utf8(output.stdout).map_err(|e| { + NixExecutorError::NixCmdInterfaceError(NixCmdInterfaceError::UTF8ConversionError( + e.to_string(), + )) + })?; + + Ok(PathBuf::from(stdout.trim())) } - let stdout = String::from_utf8(output.stdout)?; - Ok(PathBuf::from(stdout.trim())) -} + fn nixfmt_file(&self, path: &PathBuf) -> Result<(), Self::Error> { + let path = path.to_str().ok_or(NixExecutorError::NixCmdInterfaceError( + NixCmdInterfaceError::InvalidPath(path.clone()), + ))?; -pub fn nixfmt_file(path: &PathBuf) -> Result<()> { - let path = path.to_str().ok_or(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid path", - ))?; + let output = Command::new("nixfmt").arg(&path).output().map_err(|e| { + NixExecutorError::NixCmdInterfaceError(NixCmdInterfaceError::NixCommandError( + e.to_string(), + )) + })?; - Command::new("nixfmt").args([&path]).output()?; + if !output.status.success() { + return Err(NixExecutorError::NonzeroStatusError( + String::from_utf8_lossy(&output.stderr).to_string(), + )); + } - Ok(()) + Ok(()) + } } #[cfg(test)] @@ -93,130 +156,341 @@ mod tests { use std::io::Write; use tempfile::TempDir; + // Helper function to clean strings for comparison fn clean_string(s: &str) -> String { s.split_whitespace().collect::() } - #[test] - #[serial(nix_transaction)] - fn test_valid_nix_file() -> Result<()> { - let temp_dir = TempDir::new()?; - let file_path = temp_dir.path().join("test.nix"); - let mut file = File::create(&file_path)?; - write!( - file, - r#" - {{ - description = "Test description"; - inputs = {{ - test.url = "github:test/repo"; - }}; - }} - "# - )?; - - let result = eval_nix_file(&file_path, true)?; - let expected = - r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#; - - assert_eq!(clean_string(&result), clean_string(expected)); + mod mock_tests { + use crate::nix::{NixCmdInterface, NixCmdInterfaceError}; + use std::collections::HashMap; + use std::path::{Path, PathBuf}; + + pub struct MockExecutor { + eval_results: HashMap>, + store_paths: HashMap>, + nixfmt_results: HashMap>, + } + + impl MockExecutor { + pub fn new() -> Self { + Self { + eval_results: HashMap::new(), + store_paths: HashMap::new(), + nixfmt_results: HashMap::new(), + } + } - Ok(()) - } + pub fn mock_eval>( + &mut self, + path: P, + result: Result, + ) { + self.eval_results + .insert(path.as_ref().to_path_buf(), result); + } - #[test] - #[serial(nix_transaction)] - fn test_nonexistent_path() { - let invalid_path = PathBuf::from("/nonexistent/path"); - let result = eval_nix_file(&invalid_path, true); - assert!(matches!(result, Err(NixError::NixCommandError(_)))); - } + pub fn mock_store_path( + &mut self, + flake_uri: String, + result: Result, + ) { + self.store_paths.insert(flake_uri, result); + } - #[test] - #[serial(nix_transaction)] - fn test_invalid_path() { - let invalid_path = PathBuf::from(""); - let result = eval_nix_file(&invalid_path, true); - assert!(matches!(result, Err(NixError::NixCommandError(_)))); - } + pub fn mock_nixfmt>( + &mut self, + path: P, + result: Result<(), NixCmdInterfaceError>, + ) { + self.nixfmt_results + .insert(path.as_ref().to_path_buf(), result); + } + } - #[test] - #[serial(nix_transaction)] - fn test_non_json_output() -> Result<()> { - let temp_dir = TempDir::new()?; - let file_path = temp_dir.path().join("test.nix"); - let mut file = File::create(&file_path)?; - write!(file, r#""Hello, World!""#)?; + impl NixCmdInterface for MockExecutor { + type Error = NixCmdInterfaceError; - let result = eval_nix_file(&file_path, false)?; - assert_eq!(clean_string(&result), clean_string("\"Hello, World!\"")); + fn eval_nix_file(&self, path: &PathBuf, _to_json: bool) -> Result { + self.eval_results + .get(path) + .cloned() + .unwrap_or(Err(NixCmdInterfaceError::InvalidPath(path.clone()))) + } - Ok(()) - } + fn store_path_of_flake(&self, flake_uri: &str) -> Result { + self.store_paths.get(flake_uri).cloned().unwrap_or(Err( + NixCmdInterfaceError::NixCommandError(format!( + "Flake URI not mocked: {}", + flake_uri + )), + )) + } - #[test] - #[serial(nix_transaction)] - fn test_complex_nix_file() -> Result<()> { - let temp_dir = TempDir::new()?; - let file_path = temp_dir.path().join("test.nix"); - let mut file = File::create(&file_path)?; - write!( - file, - r#" - {{ - description = "Flake bindings for the `github:cachix/devenv` development environment."; - inputs = {{ - devenv.url = "github:cachix/devenv"; - devenv-root = {{ - url = "file+file:///dev/null"; - flake = false; - }}; - mk-shell-bin.url = "github:rrbutani/nix-mk-shell-bin"; - nix2container = {{ - url = "github:nlewo/nix2container"; - inputs.nixpkgs.follows = "nixpkgs"; - }}; - }}; - conflicts = [ "shells" ]; - extraTrustedPublicKeys = [ "https://devenv.cachix.org" ]; - extraSubstituters = [ "devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=" ]; - }} - "# - )?; - - let result = eval_nix_file(&file_path, true)?; - let expected = r#" - { - "conflicts":["shells"], - "description":"Flake bindings for the `github:cachix/devenv` development environment.", - "extraSubstituters":["devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="], - "extraTrustedPublicKeys":["https://devenv.cachix.org"], - "inputs":{ - "devenv":{"url":"github:cachix/devenv"}, - "devenv-root":{"flake":false,"url":"file+file:///dev/null"}, - "mk-shell-bin":{"url":"github:rrbutani/nix-mk-shell-bin"}, - "nix2container":{ - "inputs":{"nixpkgs":{"follows":"nixpkgs"}}, - "url":"github:nlewo/nix2container" - } + fn nixfmt_file(&self, path: &PathBuf) -> Result<(), Self::Error> { + self.nixfmt_results + .get(path) + .cloned() + .unwrap_or(Err(NixCmdInterfaceError::InvalidPath(path.clone()))) + } + } + #[cfg(test)] + mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_mock_eval_nix_file_valid() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/valid.nix"); + let expected_output = r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#.to_string(); + + mock.mock_eval(&path, Ok(expected_output.clone())); + + let result = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result, expected_output); } - } - "#; - assert_eq!(clean_string(&result), clean_string(expected)); + #[test] + fn test_mock_eval_nix_file_error() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/error.nix"); + let error_message = "Nix evaluation error".to_string(); + + mock.mock_eval( + &path, + Err(NixCmdInterfaceError::NixCommandError(error_message.clone())), + ); + + let result = mock.eval_nix_file(&path, true); + assert!( + matches!(result, Err(NixCmdInterfaceError::NixCommandError(msg)) if msg == error_message) + ); + } - Ok(()) - } + #[test] + fn test_mock_eval_nix_file_utf8_error() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/utf8_error.nix"); + let error_message = "UTF-8 conversion error".to_string(); + + mock.mock_eval( + &path, + Err(NixCmdInterfaceError::UTF8ConversionError( + error_message.clone(), + )), + ); + + let result = mock.eval_nix_file(&path, true); + assert!( + matches!(result, Err(NixCmdInterfaceError::UTF8ConversionError(msg)) if msg == error_message) + ); + } + + #[test] + fn test_mock_eval_nix_file_not_mocked() { + let mock = MockExecutor::new(); + let path = PathBuf::from("/test/not_mocked.nix"); - #[test] - #[serial(nix_transaction)] - fn test_nix_command_error() { - let temp_dir = TempDir::new().unwrap(); - let file_path = temp_dir.path().join("invalid.nix"); - let mut file = File::create(&file_path).unwrap(); - write!(file, "this is not a valid nix expression").unwrap(); + let result = mock.eval_nix_file(&path, true); + assert!(matches!(result, Err(NixCmdInterfaceError::InvalidPath(p)) if p == path)); + } + + #[test] + fn test_mock_eval_nix_file_multiple_calls() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/multiple_calls.nix"); + let expected_output = "Test output".to_string(); + + mock.mock_eval(&path, Ok(expected_output.clone())); + + // First call should succeed + let result1 = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result1, expected_output); + + // Second call should also succeed with the same result + let result2 = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result2, expected_output); + } - let result = eval_nix_file(&file_path, true); - assert!(matches!(result, Err(NixError::NixCommandError(_)))); + #[test] + fn test_mock_eval_nix_file_different_paths() { + let mut mock = MockExecutor::new(); + let path1 = PathBuf::from("/test/path1.nix"); + let path2 = PathBuf::from("/test/path2.nix"); + let output1 = "Output 1".to_string(); + let output2 = "Output 2".to_string(); + + mock.mock_eval(&path1, Ok(output1.clone())); + mock.mock_eval(&path2, Ok(output2.clone())); + + let result1 = mock.eval_nix_file(&path1, true).unwrap(); + let result2 = mock.eval_nix_file(&path2, true).unwrap(); + + assert_eq!(result1, output1); + assert_eq!(result2, output2); + } + + #[test] + fn test_mock_eval_nix_file_to_json_ignored() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/json_ignored.nix"); + let expected_output = r#"{"key": "value"}"#.to_string(); + + mock.mock_eval(&path, Ok(expected_output.clone())); + + // The to_json parameter should be ignored in the mock + let result_true = mock.eval_nix_file(&path, true).unwrap(); + let result_false = mock.eval_nix_file(&path, false).unwrap(); + + assert_eq!(result_true, expected_output); + assert_eq!(result_false, expected_output); + } + } + } + + mod nix_executor_tests { + use super::*; + + #[test] + #[serial(nix_transaction)] + fn test_valid_nix_file() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!( + file, + r#" + {{ + description = "Test description"; + inputs = {{ + test.url = "github:test/repo"; + }}; + }} + "# + )?; + + let result = nix_cmd.eval_nix_file(&file_path, true)?; + let expected = r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#; + + assert_eq!(clean_string(&result), clean_string(expected)); + + Ok(()) + } + + #[test] + #[serial(nix_transaction)] + fn test_nonexistent_path() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let invalid_path = PathBuf::from("/nonexistent/path"); + let result = nix_cmd.eval_nix_file(&invalid_path, true); + assert!(matches!( + result, + Err(NixExecutorError::NonzeroStatusError(_)) + )); + Ok(()) + } + + #[test] + #[serial(nix_transaction)] + fn test_invalid_path() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let invalid_path = PathBuf::from(""); + let result = nix_cmd.eval_nix_file(&invalid_path, true); + assert!(matches!( + result, + Err(NixExecutorError::NonzeroStatusError(_)) + )); + Ok(()) + } + + #[test] + #[serial(nix_transaction)] + fn test_non_json_output() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!(file, r#""Hello, World!""#)?; + + let result = nix_cmd.eval_nix_file(&file_path, false)?; + assert_eq!(clean_string(&result), clean_string("\"Hello, World!\"")); + + Ok(()) + } + + #[test] + #[serial(nix_transaction)] + fn test_complex_nix_file() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("test.nix"); + let mut file = File::create(&file_path)?; + write!( + file, + r#" + {{ + description = "Flake bindings for the `github:cachix/devenv` development environment."; + inputs = {{ + devenv.url = "github:cachix/devenv"; + devenv-root = {{ + url = "file+file:///dev/null"; + flake = false; + }}; + mk-shell-bin.url = "github:rrbutani/nix-mk-shell-bin"; + nix2container = {{ + url = "github:nlewo/nix2container"; + inputs.nixpkgs.follows = "nixpkgs"; + }}; + }}; + conflicts = [ "shells" ]; + extraTrustedPublicKeys = [ "https://devenv.cachix.org" ]; + extraSubstituters = [ "devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=" ]; + }} + "# + )?; + + let result = nix_cmd.eval_nix_file(&file_path, true)?; + let expected = r#" + { + "conflicts":["shells"], + "description":"Flake bindings for the `github:cachix/devenv` development environment.", + "extraSubstituters":["devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="], + "extraTrustedPublicKeys":["https://devenv.cachix.org"], + "inputs":{ + "devenv":{"url":"github:cachix/devenv"}, + "devenv-root":{"flake":false,"url":"file+file:///dev/null"}, + "mk-shell-bin":{"url":"github:rrbutani/nix-mk-shell-bin"}, + "nix2container":{ + "inputs":{"nixpkgs":{"follows":"nixpkgs"}}, + "url":"github:nlewo/nix2container" + } + } + } + "#; + + assert_eq!(clean_string(&result), clean_string(expected)); + + Ok(()) + } + + #[test] + #[serial(nix_transaction)] + fn test_nix_command_error() -> Result<()> { + let nix_cmd = NixExecutor::from_env()?; + let temp_dir = TempDir::new()?; + let file_path = temp_dir.path().join("invalid.nix"); + let mut file = File::create(&file_path)?; + write!(file, "this is not a valid nix expression")?; + + let result = nix_cmd.eval_nix_file(&file_path, true); + assert!(matches!( + result, + Err(NixExecutorError::NonzeroStatusError(_)) + )); + + Ok(()) + } } } diff --git a/src/parts.rs b/src/parts.rs index bfbd9ca..c739712 100644 --- a/src/parts.rs +++ b/src/parts.rs @@ -7,7 +7,7 @@ use std::path::PathBuf; use thiserror::Error; use crate::config::META_FILE; -use crate::nix::{eval_nix_file, get_flake_store_path}; +use crate::nix::NixCmdInterface; #[derive(Debug, Clone)] pub struct FlakePart { @@ -26,12 +26,13 @@ pub struct FlakePartTuple<'a> { } pub fn normalize_flake_string(target: &str, flake: &str, derivation: Option<&str>) -> String { + // TODO return error if empty flake if target.contains('#') { target.to_string() } else if let Some(derivation) = derivation { format!("{}#{}/{}", flake, derivation, target) } else { - format!("{}/{}", flake, target) + format!("{}#default/{}", flake, target) } } @@ -165,14 +166,14 @@ impl FlakePart { } } - pub fn from_path(nix_store_path: PathBuf) -> Result { + pub fn from_path(nix_store_path: PathBuf, nix_cmd: &impl NixCmdInterface) -> Result { let name = nix_store_path .file_name() .ok_or(FlakePartParseError::InvalidPathError())? .to_str() .ok_or(FlakePartParseError::InvalidPathError())?; - let eval_output = eval_nix_file(&nix_store_path.join(META_FILE), true)?; + let eval_output = nix_cmd.eval_nix_file(&nix_store_path.join(META_FILE), true)?; let metadata: FlakePartMetadata = serde_json::from_str(&eval_output) .map_err(|e| FlakePartParseError::MetadataConversionError(e))?; @@ -203,17 +204,112 @@ impl FlakePartsStore { } // TODO handle errors - pub fn from_flake_uri(flake_uri: &str) -> Result { - let nix_store_path = get_flake_store_path(flake_uri)?; + pub fn from_flake_uri(flake_uri: &str, nix_cmd: &impl NixCmdInterface) -> Result { + let nix_store_path = nix_cmd.store_path_of_flake(flake_uri)?; let parts = fs::read_dir(nix_store_path.join("flake-parts"))? .map(|entry| { let entry = entry?; - Ok(FlakePart::from_path(entry.path())?) + Ok(FlakePart::from_path(entry.path(), nix_cmd)?) }) .collect::>()?; Ok(Self::new(flake_uri.to_string(), nix_store_path, parts)) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_normalize_flake_string_with_output() { + let result = normalize_flake_string("github:user/repo#output", "unused", None); + assert_eq!(result, "github:user/repo#output"); + } + + #[test] + fn test_normalize_flake_string_with_branch() { + let result = + normalize_flake_string("output", "github:user/repo/branch", Some("derivation")); + assert_eq!(result, "github:user/repo/branch#derivation/output"); + } + + #[test] + fn test_normalize_flake_string_with_derivation() { + let result = normalize_flake_string("target", "github:user/repo", Some("derivation")); + assert_eq!(result, "github:user/repo#derivation/target"); + } + + #[test] + fn test_normalize_flake_string_without_derivation() { + let result = normalize_flake_string("target", "github:user/repo", None); + assert_eq!(result, "github:user/repo#default/target"); + } + + #[test] + fn test_normalize_flake_string_with_empty_target() { + let result = normalize_flake_string("", "github:user/repo", Some("derivation")); + assert_eq!(result, "github:user/repo#derivation/"); + } + + #[test] + fn test_normalize_flake_string_with_empty_target_and_no_derivation() { + let result = normalize_flake_string("", "github:user/repo", None); + assert_eq!(result, "github:user/repo#default/"); + } + + #[test] + fn test_normalize_flake_string_with_empty_flake() { + let result = normalize_flake_string("target", "", Some("derivation")); + assert_eq!(result, "#derivation/target"); + // Note: This might be an error case in the future, based on the TODO comment + } + + #[test] + fn test_normalize_flake_string_with_empty_flake_and_no_derivation() { + let result = normalize_flake_string("target", "", None); + assert_eq!(result, "#default/target"); + // Note: This might be an error case in the future, based on the TODO comment + } + + #[test] + fn test_normalize_flake_string_with_complex_target() { + let result = + normalize_flake_string("path/to/target", "github:user/repo", Some("derivation")); + assert_eq!(result, "github:user/repo#derivation/path/to/target"); + } + + #[test] + fn test_normalize_flake_string_with_branch_and_output() { + let result = normalize_flake_string("output", "github:user/repo/branch", None); + assert_eq!(result, "github:user/repo/branch#default/output"); + } + + #[test] + fn test_normalize_flake_string_with_commit_hash() { + let result = + normalize_flake_string("target", "github:user/repo/a1b2c3d", Some("derivation")); + assert_eq!(result, "github:user/repo/a1b2c3d#derivation/target"); + } + + #[test] + fn test_normalize_flake_string_with_local_path() { + let result = normalize_flake_string("target", "./local/path", Some("derivation")); + assert_eq!(result, "./local/path#derivation/target"); + } + + #[test] + fn test_normalize_flake_string_with_output_in_target() { + let result = + normalize_flake_string("target#output", "github:user/repo", Some("derivation")); + assert_eq!(result, "target#output"); + } + + #[test] + fn test_normalize_flake_string_with_output_in_target_and_no_derivation() { + let result = normalize_flake_string("target#output", "github:user/repo", None); + assert_eq!(result, "target#output"); + } +} diff --git a/src/templates.rs b/src/templates.rs index bb51431..779b901 100644 --- a/src/templates.rs +++ b/src/templates.rs @@ -248,7 +248,6 @@ mod tests { let context = FlakeContext::new(inputs_context, trusted_keys, substituters); let rendered = context.render()?; - println!("{}", rendered); let cleaned_rendered = rendered.split_whitespace().collect::(); let expected = r#" From 289d7e4a64f5f63c72cf9f4aa7d42367628a1152 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Thu, 1 Aug 2024 17:28:06 +0200 Subject: [PATCH 20/21] test(nix.rs): add nixmft_file, store_path_of_flake --- src/nix.rs | 330 ++++++++++++++++++++++++++++++++++------------------- 1 file changed, 215 insertions(+), 115 deletions(-) diff --git a/src/nix.rs b/src/nix.rs index 227bb48..e3aa3b0 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -164,20 +164,26 @@ mod tests { mod mock_tests { use crate::nix::{NixCmdInterface, NixCmdInterfaceError}; use std::collections::HashMap; + use std::fs::File; + use std::io::Write; use std::path::{Path, PathBuf}; + use std::thread::sleep; + use std::time::Duration; + use std::time::SystemTime; + use tempfile::{tempdir, TempDir}; pub struct MockExecutor { eval_results: HashMap>, - store_paths: HashMap>, - nixfmt_results: HashMap>, + mocked_store: TempDir, + store_paths: HashMap, } impl MockExecutor { pub fn new() -> Self { Self { eval_results: HashMap::new(), + mocked_store: tempdir().expect("Failed to create temporary directory"), store_paths: HashMap::new(), - nixfmt_results: HashMap::new(), } } @@ -193,18 +199,15 @@ mod tests { pub fn mock_store_path( &mut self, flake_uri: String, - result: Result, - ) { - self.store_paths.insert(flake_uri, result); - } - - pub fn mock_nixfmt>( - &mut self, - path: P, - result: Result<(), NixCmdInterfaceError>, - ) { - self.nixfmt_results - .insert(path.as_ref().to_path_buf(), result); + ) -> Result { + let mock_path = self + .mocked_store + .path() + .join(format!("mock-{}", flake_uri.replace(':', "-"))); + std::fs::create_dir_all(&mock_path) + .map_err(|e| NixCmdInterfaceError::NixCommandError(e.to_string()))?; + self.store_paths.insert(flake_uri, mock_path.clone()); + Ok(mock_path) } } @@ -219,133 +222,230 @@ mod tests { } fn store_path_of_flake(&self, flake_uri: &str) -> Result { - self.store_paths.get(flake_uri).cloned().unwrap_or(Err( + self.store_paths.get(flake_uri).cloned().ok_or_else(|| { NixCmdInterfaceError::NixCommandError(format!( "Flake URI not mocked: {}", flake_uri - )), - )) + )) + }) } fn nixfmt_file(&self, path: &PathBuf) -> Result<(), Self::Error> { - self.nixfmt_results - .get(path) - .cloned() - .unwrap_or(Err(NixCmdInterfaceError::InvalidPath(path.clone()))) + if path.exists() { + // Touch the file by updating its modification time + File::open(path) + .and_then(|file| file.set_modified(SystemTime::now())) + .map_err(|e| NixCmdInterfaceError::NixCommandError(e.to_string())) + } else { + Err(NixCmdInterfaceError::InvalidPath(path.clone())) + } } } - #[cfg(test)] - mod tests { - use super::*; - use std::path::PathBuf; - #[test] - fn test_mock_eval_nix_file_valid() { - let mut mock = MockExecutor::new(); - let path = PathBuf::from("/test/valid.nix"); - let expected_output = r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#.to_string(); + #[test] + fn test_mock_eval_nix_file_valid() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/valid.nix"); + let expected_output = r#"{"description":"Test description","inputs":{"test":{"url":"github:test/repo"}}}"#.to_string(); - mock.mock_eval(&path, Ok(expected_output.clone())); + mock.mock_eval(&path, Ok(expected_output.clone())); - let result = mock.eval_nix_file(&path, true).unwrap(); - assert_eq!(result, expected_output); - } + let result = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result, expected_output); + } - #[test] - fn test_mock_eval_nix_file_error() { - let mut mock = MockExecutor::new(); - let path = PathBuf::from("/test/error.nix"); - let error_message = "Nix evaluation error".to_string(); - - mock.mock_eval( - &path, - Err(NixCmdInterfaceError::NixCommandError(error_message.clone())), - ); - - let result = mock.eval_nix_file(&path, true); - assert!( - matches!(result, Err(NixCmdInterfaceError::NixCommandError(msg)) if msg == error_message) - ); - } + #[test] + fn test_mock_eval_nix_file_error() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/error.nix"); + let error_message = "Nix evaluation error".to_string(); + + mock.mock_eval( + &path, + Err(NixCmdInterfaceError::NixCommandError(error_message.clone())), + ); + + let result = mock.eval_nix_file(&path, true); + assert!( + matches!(result, Err(NixCmdInterfaceError::NixCommandError(msg)) if msg == error_message) + ); + } - #[test] - fn test_mock_eval_nix_file_utf8_error() { - let mut mock = MockExecutor::new(); - let path = PathBuf::from("/test/utf8_error.nix"); - let error_message = "UTF-8 conversion error".to_string(); - - mock.mock_eval( - &path, - Err(NixCmdInterfaceError::UTF8ConversionError( - error_message.clone(), - )), - ); - - let result = mock.eval_nix_file(&path, true); - assert!( - matches!(result, Err(NixCmdInterfaceError::UTF8ConversionError(msg)) if msg == error_message) - ); - } + #[test] + fn test_mock_eval_nix_file_utf8_error() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/utf8_error.nix"); + let error_message = "UTF-8 conversion error".to_string(); + + mock.mock_eval( + &path, + Err(NixCmdInterfaceError::UTF8ConversionError( + error_message.clone(), + )), + ); + + let result = mock.eval_nix_file(&path, true); + assert!( + matches!(result, Err(NixCmdInterfaceError::UTF8ConversionError(msg)) if msg == error_message) + ); + } - #[test] - fn test_mock_eval_nix_file_not_mocked() { - let mock = MockExecutor::new(); - let path = PathBuf::from("/test/not_mocked.nix"); + #[test] + fn test_mock_eval_nix_file_not_mocked() { + let mock = MockExecutor::new(); + let path = PathBuf::from("/test/not_mocked.nix"); - let result = mock.eval_nix_file(&path, true); - assert!(matches!(result, Err(NixCmdInterfaceError::InvalidPath(p)) if p == path)); - } + let result = mock.eval_nix_file(&path, true); + assert!(matches!(result, Err(NixCmdInterfaceError::InvalidPath(p)) if p == path)); + } - #[test] - fn test_mock_eval_nix_file_multiple_calls() { - let mut mock = MockExecutor::new(); - let path = PathBuf::from("/test/multiple_calls.nix"); - let expected_output = "Test output".to_string(); + #[test] + fn test_mock_eval_nix_file_multiple_calls() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/multiple_calls.nix"); + let expected_output = "Test output".to_string(); - mock.mock_eval(&path, Ok(expected_output.clone())); + mock.mock_eval(&path, Ok(expected_output.clone())); - // First call should succeed - let result1 = mock.eval_nix_file(&path, true).unwrap(); - assert_eq!(result1, expected_output); + // First call should succeed + let result1 = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result1, expected_output); - // Second call should also succeed with the same result - let result2 = mock.eval_nix_file(&path, true).unwrap(); - assert_eq!(result2, expected_output); - } + // Second call should also succeed with the same result + let result2 = mock.eval_nix_file(&path, true).unwrap(); + assert_eq!(result2, expected_output); + } - #[test] - fn test_mock_eval_nix_file_different_paths() { - let mut mock = MockExecutor::new(); - let path1 = PathBuf::from("/test/path1.nix"); - let path2 = PathBuf::from("/test/path2.nix"); - let output1 = "Output 1".to_string(); - let output2 = "Output 2".to_string(); + #[test] + fn test_mock_eval_nix_file_different_paths() { + let mut mock = MockExecutor::new(); + let path1 = PathBuf::from("/test/path1.nix"); + let path2 = PathBuf::from("/test/path2.nix"); + let output1 = "Output 1".to_string(); + let output2 = "Output 2".to_string(); - mock.mock_eval(&path1, Ok(output1.clone())); - mock.mock_eval(&path2, Ok(output2.clone())); + mock.mock_eval(&path1, Ok(output1.clone())); + mock.mock_eval(&path2, Ok(output2.clone())); - let result1 = mock.eval_nix_file(&path1, true).unwrap(); - let result2 = mock.eval_nix_file(&path2, true).unwrap(); + let result1 = mock.eval_nix_file(&path1, true).unwrap(); + let result2 = mock.eval_nix_file(&path2, true).unwrap(); - assert_eq!(result1, output1); - assert_eq!(result2, output2); - } + assert_eq!(result1, output1); + assert_eq!(result2, output2); + } - #[test] - fn test_mock_eval_nix_file_to_json_ignored() { - let mut mock = MockExecutor::new(); - let path = PathBuf::from("/test/json_ignored.nix"); - let expected_output = r#"{"key": "value"}"#.to_string(); + #[test] + fn test_mock_eval_nix_file_to_json_ignored() { + let mut mock = MockExecutor::new(); + let path = PathBuf::from("/test/json_ignored.nix"); + let expected_output = r#"{"key": "value"}"#.to_string(); - mock.mock_eval(&path, Ok(expected_output.clone())); + mock.mock_eval(&path, Ok(expected_output.clone())); - // The to_json parameter should be ignored in the mock - let result_true = mock.eval_nix_file(&path, true).unwrap(); - let result_false = mock.eval_nix_file(&path, false).unwrap(); + // The to_json parameter should be ignored in the mock + let result_true = mock.eval_nix_file(&path, true).unwrap(); + let result_false = mock.eval_nix_file(&path, false).unwrap(); - assert_eq!(result_true, expected_output); - assert_eq!(result_false, expected_output); - } + assert_eq!(result_true, expected_output); + assert_eq!(result_false, expected_output); + } + + #[test] + fn test_mock_store_path_of_flake_valid() { + let mut mock = MockExecutor::new(); + let flake_uri = "github:user/repo"; + let mock_path = mock.mock_store_path(flake_uri.to_string()).unwrap(); + + let result = mock.store_path_of_flake(flake_uri).unwrap(); + assert_eq!(result, mock_path); + assert!(result.exists()); + assert!(result.is_dir()); + } + + #[test] + fn test_mock_store_path_of_flake_not_mocked() { + let mock = MockExecutor::new(); + let flake_uri = "github:user/not-mocked-repo"; + + let result = mock.store_path_of_flake(flake_uri); + assert!(matches!( + result, + Err(NixCmdInterfaceError::NixCommandError(_)) + )); + } + + #[test] + fn test_mock_store_path_of_flake_multiple_flakes() { + let mut mock = MockExecutor::new(); + let flake_uri1 = "github:user/repo1"; + let flake_uri2 = "github:user/repo2"; + let mock_path1 = mock.mock_store_path(flake_uri1.to_string()).unwrap(); + let mock_path2 = mock.mock_store_path(flake_uri2.to_string()).unwrap(); + + let result1 = mock.store_path_of_flake(flake_uri1).unwrap(); + let result2 = mock.store_path_of_flake(flake_uri2).unwrap(); + + assert_eq!(result1, mock_path1); + assert_eq!(result2, mock_path2); + assert_ne!(result1, result2); + } + + #[test] + fn test_mock_store_path_of_flake_overwrite() { + let mut mock = MockExecutor::new(); + let flake_uri = "github:user/repo"; + let mock_path1 = mock.mock_store_path(flake_uri.to_string()).unwrap(); + let mock_path2 = mock.mock_store_path(flake_uri.to_string()).unwrap(); + + assert_eq!(mock_path1, mock_path2); + + let result = mock.store_path_of_flake(flake_uri).unwrap(); + assert_eq!(result, mock_path2); + } + + #[test] + fn test_mock_store_path_of_flake_different_uris() { + let mut mock = MockExecutor::new(); + let flake_uri1 = "github:user/repo"; + let flake_uri2 = "gitlab:user/repo"; + let mock_path1 = mock.mock_store_path(flake_uri1.to_string()).unwrap(); + let mock_path2 = mock.mock_store_path(flake_uri2.to_string()).unwrap(); + + let result1 = mock.store_path_of_flake(flake_uri1).unwrap(); + let result2 = mock.store_path_of_flake(flake_uri2).unwrap(); + + assert_ne!(result1, result2); + assert_eq!(result1, mock_path1); + assert_eq!(result2, mock_path2); + } + + #[test] + fn test_mock_nixfmt_file_success() { + let mock = MockExecutor::new(); + let temp_dir = tempdir().unwrap(); + let file_path = temp_dir.path().join("test.nix"); + File::create(&file_path) + .unwrap() + .write_all(b"# Test Nix file") + .unwrap(); + + let original_modified = file_path.metadata().unwrap().modified().unwrap(); + sleep(Duration::from_secs(1)); // Ensure some time passes + + let result = mock.nixfmt_file(&file_path); + assert!(result.is_ok()); + + let new_modified = file_path.metadata().unwrap().modified().unwrap(); + assert!(new_modified > original_modified); + } + + #[test] + fn test_mock_nixfmt_file_not_exist() { + let mock = MockExecutor::new(); + let non_existent_path = PathBuf::from("/path/to/non/existent/file.nix"); + + let result = mock.nixfmt_file(&non_existent_path); + assert!(matches!(result, Err(NixCmdInterfaceError::InvalidPath(_)))); } } From d4fcc4e3002fd552aa1661d77038ca48d86e92c7 Mon Sep 17 00:00:00 2001 From: tsandrini Date: Thu, 1 Aug 2024 19:18:03 +0200 Subject: [PATCH 21/21] test(builder): update tests, add loger, README --- Cargo.lock | 31 ++++++++++++++++++++++++++ Cargo.toml | 2 ++ README.md | 12 ++++++++++ flake.nix | 2 +- src/cmd/add.rs | 24 +++++++++++++++++--- src/cmd/init.rs | 58 ++++++++++++++++++++++++++++++++++++------------- src/cmd/list.rs | 2 ++ src/main.rs | 18 ++++++++++++--- src/nix.rs | 2 +- src/parts.rs | 53 +++++++++++--------------------------------- 10 files changed, 141 insertions(+), 63 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e5ff3bc..e6b1bc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -199,6 +199,29 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "env_filter" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "humantime", + "log", +] + [[package]] name = "errno" version = "0.3.9" @@ -232,7 +255,9 @@ dependencies = [ "clap", "color-eyre", "diff", + "env_logger", "fs_extra", + "log", "minijinja", "regex", "serde", @@ -349,6 +374,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "indenter" version = "0.3.3" diff --git a/Cargo.toml b/Cargo.toml index 797e9ae..5d93883 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,9 @@ license = "MIT" clap = { version = "4.5.7", features = ["cargo", "derive"] } color-eyre = "0.6.3" diff = "0.1.13" +env_logger = "0.11.5" fs_extra = "1.3.0" +log = "0.4.22" minijinja = "2.0.2" regex = "1.10.5" serde = { version = "1.0.203", features = ["derive"] } diff --git a/README.md b/README.md index 9c7b753..f6f5bf7 100644 --- a/README.md +++ b/README.md @@ -274,3 +274,15 @@ you can clearly **"see"** that this isn't a good candidate for a `flakeModule`, they are too specific, they typically represent the end user options of some existing `flakeModule`s. Wrapping this code into another layer of modularity doesn't make sense, since this is meant to be a piece of configuration code. + +### Help! I'm experiencing an XYZ bug! + +I'm sorry for the inconvenience, please run whatever is producing said bug +with these `RUST_LOG=debug RUST_BACKTRACE=full` environment variables, +for example + +``` bash +RUST_LOG=debug RUST_BACKTRACE=full flake-parts-builder add shells ./myProject +``` + +and paste the output into a new bug issue. Thanks! :heart: diff --git a/flake.nix b/flake.nix index c31ce08..76d5b4a 100644 --- a/flake.nix +++ b/flake.nix @@ -132,7 +132,7 @@ export HOME=$TMPDIR/home ''; - cargoSha256 = "sha256-ZuehJ7qF+7jyTHsvQLr7V1xfBhTw10OrlFdPk9CU9XE="; + cargoSha256 = "sha256-vMnU9PVZz61Tp/W9Rz4FPePyXYqzPAKsk9XAFc4rfo8="; postBuild = '' cargo doc --no-deps --release diff --git a/src/cmd/add.rs b/src/cmd/add.rs index 3f7d7f9..7e6603c 100644 --- a/src/cmd/add.rs +++ b/src/cmd/add.rs @@ -29,6 +29,8 @@ pub struct AddCommand { pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.init.shared_args.disable_base_parts { + log::info!("Adding base parts store to `cmd.shared_args.parts_stores`"); + cmd.init .shared_args .parts_stores @@ -44,6 +46,14 @@ pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { .map(|store| FlakePartsStore::from_flake_uri(&store, &nix_cmd)) .collect::>>()?; + log::debug!( + "All parts stores: {:?}", + stores + .iter() + .map(|store| store.flake_uri.clone()) + .collect::>() + ); + let parts_tuples = parse_required_parts_tuples(&cmd.init, &stores)?; let path = cmd @@ -52,17 +62,22 @@ pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { .canonicalize() .unwrap_or_else(|_| cmd.init.path.clone()); - // TODO probably yield an error instead + log::debug!("Full user provided path: {:?}", path); + if !path.exists() { - dir::create_all(&path, false)?; + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + format!("Path {:?} does not exist", path), + ))?; } let tmpdir = tempdir()?; + log::info!("Preparing new additions in a tmpdir at {:?}", tmpdir.path()); prepare_tmpdir( &nix_cmd, &tmpdir, &parts_tuples, - path.to_str(), + path.file_name().map(|osstr| osstr.to_str().unwrap()), &cmd.init.strategy, false, )?; @@ -70,6 +85,7 @@ pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { // NOTE the flake.nix file shouldn't be present due to the strucutre of // flake-parts, but I am way tooo paranoid. if tmpdir.path().join("flake.nix").exists() { + log::warn!("Unexpected flake.nix file found in tmpdir, removing it."); std::fs::remove_file(tmpdir.path().join("flake.nix"))?; } @@ -78,12 +94,14 @@ pub fn add(mut cmd: AddCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { .map(|part_tuple| &part_tuple.part.metadata) .collect::>(); + log::info!("Rendering `flake-inputs.nix.template` inputs"); let flake_context = FlakeInputsContext::from_merged_metadata(&metadata); let rendered = flake_context.render()?; println!("Please add the following snippet to your `flake.nix` inputs:"); println!("{}", rendered); + log::info!("Addition succesfully prepared in tmpdir, now copying to target directory"); dir::copy( &tmpdir, &cmd.init.path, diff --git a/src/cmd/init.rs b/src/cmd/init.rs index 94600e8..b509346 100644 --- a/src/cmd/init.rs +++ b/src/cmd/init.rs @@ -99,16 +99,6 @@ pub enum PartsTuplesParsingError { UnresolvedDependenciesError(Vec), } -// NOTE -// 1. Load all FlakePartsStores -// 2. Create an iterator over all parts (don't collect them yet) -// 3. Construct a final vec of all parts that should be used -// a. First we parse the CLI parts -// b. Then we iterate over those to add potential dependencies -// c. Unique filter -// d. Combine these two -// 4. We finally can create a vec of all parts that should be used -// 5. Collect! (profit) pub fn parse_required_parts_tuples<'a>( cmd: &InitCommand, stores: &'a Vec, @@ -125,6 +115,8 @@ pub fn parse_required_parts_tuples<'a>( let user_req_flake_strings = cmd.parts.clone(); + log::debug!("User requested parts: {:?}", user_req_flake_strings); + let (resolved_deps, unresolved_deps) = { let start_indices: Vec = all_parts_tuples .iter() @@ -152,6 +144,10 @@ pub fn parse_required_parts_tuples<'a>( .chain(resolved_deps.iter()) .collect::>(); + log::debug!("Resolved dependencies: {:?}", resolved_deps); + log::debug!("Unresolved dependencies: {:?}", unresolved_deps); + log::debug!("All required parts: {:?}", all_req_flake_strings); + let final_parts_tuples = all_parts_tuples .into_iter() .filter(|part_tuple| { @@ -166,17 +162,18 @@ pub fn parse_required_parts_tuples<'a>( FlakePartTuple::find_missing_parts_in(&final_parts_tuples, &user_req_flake_strings); if missing_parts.len() > 0 { + log::error!("Missing parts: {:?}", missing_parts); return Err(PartsTuplesParsingError::MissingPartsError( missing_parts.into_iter().cloned().collect::>(), )); } - // TODO probably print that we are ignoring conflicts if !cmd.ignore_conflicts { // check_for_conflicts(&final_parts_tuples)?; let conflicts = FlakePartTuple::find_conflicting_parts_in(&final_parts_tuples); if conflicts.len() > 0 { + log::error!("Conflicting parts: {:?}", conflicts); return Err(PartsTuplesParsingError::ConflictingPartsError( conflicts .into_iter() @@ -184,6 +181,8 @@ pub fn parse_required_parts_tuples<'a>( .collect::>(), )); } + } else { + log::warn!("Ignoring conflicts"); } Ok(final_parts_tuples) @@ -200,6 +199,10 @@ pub fn prepare_tmpdir( // TODO MERGE STRATEGY let tmp_path = tmpdir.path(); for part_tuple in parts_tuples { + log::debug!( + "Copying the following part into tmpdir: {:?}", + part_tuple.part.name + ); dir::copy( &part_tuple.part.nix_store_path, &tmp_path, @@ -210,13 +213,15 @@ pub fn prepare_tmpdir( )?; } - // TODO fails if no META_FILE is present - // check if meta exists and delete it if yes + log::debug!("Removing meta file from tmpdir"); std::fs::remove_file(tmp_path.join(META_FILE))?; + log::info!("Resetting permissions in tmpdir"); reset_permissions(tmp_path.to_str().unwrap())?; if render_flake_nix { + log::info!("Rendering `flake.nix.template` in tmpdir"); + let metadata = parts_tuples .iter() .map(|part_tuple| &part_tuple.part.metadata) @@ -226,12 +231,19 @@ pub fn prepare_tmpdir( let rendered = flake_context.render()?; fs::write(tmp_path.join("flake.nix"), rendered)?; + log::info!("Running nixfmt on flake.nix in tmpdir"); nix_cmd.nixfmt_file(&tmp_path.join("flake.nix"))?; // nixfmt_file(&tmp_path.join("flake.nix"))?; + } else { + log::info!("Skipping rendering of `flake.nix.template`"); } // This becomes None when `.`, `../`,etc... is passed if let Some(name) = target_name { + log::info!( + "Globally replacing NAMEPLACEHOLDER in tmpdir to name: {}", + name + ); regex_in_dir_recursive(tmp_path.to_str().unwrap(), &NAMEPLACEHOLDER, name)?; } @@ -240,15 +252,19 @@ pub fn prepare_tmpdir( pub fn init(mut cmd: InitCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.shared_args.disable_base_parts { + log::info!("Adding base parts store to `cmd.shared_args.parts_stores`"); + cmd.shared_args .parts_stores .push(format!("{}#{}", SELF_FLAKE_URI, BASE_DERIVATION_NAME)); } - // NOTE this one is required even if you disable base store parts + log::info!("Adding bootstrap parts store to `cmd.shared_args.parts_stores`"); cmd.shared_args .parts_stores .push(format!("{}#{}", SELF_FLAKE_URI, BOOTSTRAP_DERIVATION_NAME)); + + log::info!("Adding _bootstrap to required `cmd.parts`"); cmd.parts.push(format!( "{}#{}/_bootstrap", SELF_FLAKE_URI, BOOTSTRAP_DERIVATION_NAME @@ -262,24 +278,36 @@ pub fn init(mut cmd: InitCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { .map(|store| FlakePartsStore::from_flake_uri(&store, &nix_cmd)) .collect::>>()?; + log::debug!( + "All parts stores: {:?}", + stores + .iter() + .map(|store| store.flake_uri.clone()) + .collect::>() + ); + let parts_tuples = parse_required_parts_tuples(&cmd, &stores)?; let path = cmd.path.canonicalize().unwrap_or_else(|_| cmd.path.clone()); + log::debug!("Full user provided path: {:?}", path); if !path.exists() { + log::info!("Provided path doesn't exist, creating it"); dir::create_all(&path, false)?; } let tmpdir = tempdir()?; + log::info!("Preparing new project in a tmpdir at {:?}", tmpdir.path()); prepare_tmpdir( &nix_cmd, &tmpdir, &parts_tuples, - path.to_str(), + path.file_name().map(|osstr| osstr.to_str().unwrap()), &cmd.strategy, true, )?; + log::info!("Project successfully prepared in tmpdir, now copying to target directory"); dir::copy( &tmpdir, &cmd.path, diff --git a/src/cmd/list.rs b/src/cmd/list.rs index e47ec27..d208984 100644 --- a/src/cmd/list.rs +++ b/src/cmd/list.rs @@ -17,12 +17,14 @@ pub struct ListCommand { pub fn list(mut cmd: ListCommand, nix_cmd: impl NixCmdInterface) -> Result<()> { if !cmd.shared_args.disable_base_parts { + log::info!("Adding base parts store to `cmd.shared_args.parts_stores`"); cmd.shared_args .parts_stores .push(format!("{}#{}", SELF_FLAKE_URI, BASE_DERIVATION_NAME)); } // NOTE this one is required even if you disable base store parts + log::info!("Adding bootstrap parts store to `cmd.shared_args.parts_stores`"); cmd.shared_args .parts_stores .push(format!("{}#{}", SELF_FLAKE_URI, BOOTSTRAP_DERIVATION_NAME)); diff --git a/src/main.rs b/src/main.rs index 4524a50..16be38a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -37,13 +37,25 @@ enum Commands { // TODO better docs fn main() -> Result<()> { color_eyre::install()?; + env_logger::init(); + log::debug!("color-eyre installed and logger initialized"); + let cli = Cli::parse(); let nix_cmd = NixExecutor::from_env()?; match cli.command { - Commands::List(cmd) => list(cmd, nix_cmd), - Commands::Init(cmd) => init(cmd, nix_cmd), - Commands::Add(cmd) => add(cmd, nix_cmd), + Commands::List(cmd) => { + log::info!("Executing list command"); + list(cmd, nix_cmd) + } + Commands::Init(cmd) => { + log::info!("Executing init command"); + init(cmd, nix_cmd) + } + Commands::Add(cmd) => { + log::info!("Executing add command"); + add(cmd, nix_cmd) + } } } diff --git a/src/nix.rs b/src/nix.rs index e3aa3b0..b0626ba 100644 --- a/src/nix.rs +++ b/src/nix.rs @@ -20,7 +20,7 @@ pub trait NixCmdInterface { type Error: From + std::error::Error + Send + Sync + 'static; fn eval_nix_file(&self, path: &PathBuf, to_json: bool) -> Result; - fn store_path_of_flake(&self, flake_uri: &str) -> Result; // TODO maybe rename + fn store_path_of_flake(&self, flake_uri: &str) -> Result; fn nixfmt_file(&self, path: &PathBuf) -> Result<(), Self::Error>; } diff --git a/src/parts.rs b/src/parts.rs index c739712..5e70e27 100644 --- a/src/parts.rs +++ b/src/parts.rs @@ -26,9 +26,10 @@ pub struct FlakePartTuple<'a> { } pub fn normalize_flake_string(target: &str, flake: &str, derivation: Option<&str>) -> String { - // TODO return error if empty flake if target.contains('#') { - target.to_string() + target.to_string() // OK + } else if flake.contains('#') { + format!("{}/{}", flake, target) } else if let Some(derivation) = derivation { format!("{}#{}/{}", flake, derivation, target) } else { @@ -224,16 +225,15 @@ mod tests { use super::*; #[test] - fn test_normalize_flake_string_with_output() { + fn test_normalize_flake_string_with_hash_in_target() { let result = normalize_flake_string("github:user/repo#output", "unused", None); assert_eq!(result, "github:user/repo#output"); } #[test] - fn test_normalize_flake_string_with_branch() { - let result = - normalize_flake_string("output", "github:user/repo/branch", Some("derivation")); - assert_eq!(result, "github:user/repo/branch#derivation/output"); + fn test_normalize_flake_string_with_hash_in_flake() { + let result = normalize_flake_string("output", "github:user/repo#derivation", None); + assert_eq!(result, "github:user/repo#derivation/output"); } #[test] @@ -260,20 +260,6 @@ mod tests { assert_eq!(result, "github:user/repo#default/"); } - #[test] - fn test_normalize_flake_string_with_empty_flake() { - let result = normalize_flake_string("target", "", Some("derivation")); - assert_eq!(result, "#derivation/target"); - // Note: This might be an error case in the future, based on the TODO comment - } - - #[test] - fn test_normalize_flake_string_with_empty_flake_and_no_derivation() { - let result = normalize_flake_string("target", "", None); - assert_eq!(result, "#default/target"); - // Note: This might be an error case in the future, based on the TODO comment - } - #[test] fn test_normalize_flake_string_with_complex_target() { let result = @@ -282,16 +268,10 @@ mod tests { } #[test] - fn test_normalize_flake_string_with_branch_and_output() { - let result = normalize_flake_string("output", "github:user/repo/branch", None); - assert_eq!(result, "github:user/repo/branch#default/output"); - } - - #[test] - fn test_normalize_flake_string_with_commit_hash() { + fn test_normalize_flake_string_with_hash_in_flake_and_derivation() { let result = - normalize_flake_string("target", "github:user/repo/a1b2c3d", Some("derivation")); - assert_eq!(result, "github:user/repo/a1b2c3d#derivation/target"); + normalize_flake_string("output", "github:user/repo#derivation", Some("unused")); + assert_eq!(result, "github:user/repo#derivation/output"); } #[test] @@ -301,15 +281,8 @@ mod tests { } #[test] - fn test_normalize_flake_string_with_output_in_target() { - let result = - normalize_flake_string("target#output", "github:user/repo", Some("derivation")); - assert_eq!(result, "target#output"); - } - - #[test] - fn test_normalize_flake_string_with_output_in_target_and_no_derivation() { - let result = normalize_flake_string("target#output", "github:user/repo", None); - assert_eq!(result, "target#output"); + fn test_normalize_flake_string_with_hash_in_flake_and_target() { + let result = normalize_flake_string("output#extra", "github:user/repo#branch", None); + assert_eq!(result, "output#extra"); } }