From 4c10a0cde977e1a6e346af5197eeabafdd0f60d3 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Wed, 19 Jun 2024 14:15:14 +0200 Subject: [PATCH] feat: serialize tests to YAML (#935) --- .pre-commit-config.yaml | 2 +- pixi.toml | 2 +- rust-tests/src/lib.rs | 25 -- src/build.rs | 2 +- src/package_test/run_test.rs | 308 ++++++++---------- src/package_test/serialize_test.rs | 95 ++---- src/packaging.rs | 9 +- src/packaging/metadata.rs | 5 + src/recipe/parser/script.rs | 22 +- ...__recipe__parser__test__test__parsing.snap | 9 +- ...e__parser__test__test__script_parsing.snap | 40 +++ src/recipe/parser/test.rs | 65 +++- ...ecipe__parser__tests__complete_recipe.snap | 48 ++- ...recipe__parser__tests__recipe_windows.snap | 25 +- ...d__recipe__parser__tests__unix_recipe.snap | 27 +- src/script.rs | 10 +- ...ild__metadata__test__rich_recipe.yaml.snap | 9 +- .../recipes/test-section/script-test.yaml | 38 +++ test-data/rendered_recipes/rich_recipe.yaml | 9 +- .../end-to-end/__snapshots__/test_simple.ambr | 16 + test/end-to-end/test_simple.py | 25 ++ 21 files changed, 454 insertions(+), 337 deletions(-) create mode 100644 src/recipe/parser/snapshots/rattler_build__recipe__parser__test__test__script_parsing.snap create mode 100644 test-data/recipes/test-section/script-test.yaml create mode 100644 test/end-to-end/__snapshots__/test_simple.ambr diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dcc41bde0..5b0b40fb2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -exclude: '(\.patch|\.diff|\.snap|test-data/recipes/test-parsing/.+)$' +exclude: '(\.patch|\.diff|\.snap|\.ambr|test-data/recipes/test-parsing/.+)$' repos: - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/pixi.toml b/pixi.toml index 32acba583..bdcc66464 100644 --- a/pixi.toml +++ b/pixi.toml @@ -8,7 +8,7 @@ platforms = ["linux-64", "win-64", "osx-64", "osx-arm64"] [tasks] build = "cargo build --release" install = "cargo install --path . --locked" -end_to_end_test = "pytest test/end-to-end --snapshot-warn-unused" +end_to_end_test = "pytest test/end-to-end --snapshot-warn-unused --snapshot-details" test = "cargo test" lint = "pre-commit run --all" generate-cli-docs = "cargo run --bin generate-cli-docs --features generate-cli-docs > docs/reference/cli.md" diff --git a/rust-tests/src/lib.rs b/rust-tests/src/lib.rs index 7414613e7..5df8b1e39 100644 --- a/rust-tests/src/lib.rs +++ b/rust-tests/src/lib.rs @@ -463,31 +463,6 @@ mod tests { assert!(rattler_build.status.code().unwrap() == 1); } - #[test] - fn test_noarch_flask() { - let tmp = tmp("test_noarch_flask"); - let rattler_build = rattler().build(recipes().join("flask"), tmp.as_dir(), None, None); - - assert!(rattler_build.status.success()); - - let pkg = get_extracted_package(tmp.as_dir(), "flask"); - // this is to ensure that the clone happens correctly - let license = pkg.join("info/licenses/LICENSE.rst"); - assert!(license.exists()); - - assert!(pkg.join("info/tests/1/run_test.sh").exists()); - assert!(pkg.join("info/tests/1/run_test.bat").exists()); - assert!(pkg - .join("info/tests/1/test_time_dependencies.json") - .exists()); - - assert!(pkg.join("info/tests/0/python_test.json").exists()); - // make sure that the entry point does not exist - assert!(!pkg.join("python-scripts/flask").exists()); - - assert!(pkg.join("info/link.json").exists()) - } - #[test] fn test_files_copy() { if cfg!(target_os = "windows") { diff --git a/src/build.rs b/src/build.rs index 778eeff4b..37fa5c861 100644 --- a/src/build.rs +++ b/src/build.rs @@ -149,7 +149,7 @@ pub async fn run_build( // We run all the package content tests for test in output.recipe.tests() { // TODO we could also run each of the (potentially multiple) test scripts and collect the errors - if let TestType::PackageContents(package_contents) = test { + if let TestType::PackageContents { package_contents } = test { package_contents .run_test(&paths_json, &output.build_configuration.target_platform) .into_diagnostic()?; diff --git a/src/package_test/run_test.rs b/src/package_test/run_test.rs index 034e8ab11..21280790d 100644 --- a/src/package_test/run_test.rs +++ b/src/package_test/run_test.rs @@ -25,13 +25,9 @@ use rattler_solve::{ChannelPriority, SolveStrategy}; use url::Url; use crate::env_vars; -use crate::recipe::parser::{Script, ScriptContent}; +use crate::recipe::parser::{CommandsTest, Script, ScriptContent, TestType}; use crate::source::copy_dir::CopyDir; -use crate::{ - recipe::parser::{CommandsTestRequirements, PythonTest}, - render::solver::create_environment, - tool_configuration, -}; +use crate::{recipe::parser::PythonTest, render::solver::create_environment, tool_configuration}; #[allow(missing_docs)] #[derive(thiserror::Error, Debug)] @@ -66,6 +62,9 @@ pub enum TestError { #[error("failed to setup test environment: {0}")] TestEnvironmentActivation(#[from] ActivationError), + #[error("failed to parse tests from `info/tests/tests.yaml`: {0}")] + TestYamlParseError(#[from] serde_yaml::Error), + #[error("failed to parse JSON from test files: {0}")] TestJSONParseError(#[from] serde_json::Error), @@ -331,15 +330,24 @@ pub async fn run_test(package_file: &Path, config: &TestConfiguration) -> Result ); } - if package_folder.join("info/tests").exists() { - // These are the new style tests - let test_folder = package_folder.join("info/tests"); - let mut read_dir = tokio::fs::read_dir(&test_folder).await?; + if package_folder.join("info/tests/tests.yaml").exists() { + let tests = fs::read_to_string(package_folder.join("info/tests/tests.yaml"))?; + let tests: Vec = serde_yaml::from_str(&tests)?; - // for each enumerated test, we load and run it - while let Some(entry) = read_dir.next_entry().await? { - tracing::info!("test {:?}", entry.path()); - run_individual_test(&pkg, &entry.path(), &prefix, &config).await?; + for test in tests { + match test { + TestType::Command(c) => c.run_test(&pkg, &package_folder, &prefix, &config).await?, + TestType::Python { python } => { + python + .run_test(&pkg, &package_folder, &prefix, &config) + .await? + } + TestType::Downstream(_) => { + tracing::warn!("Downstream tests are not yet implemented in rattler-build") + } + // This test already runs during the build process and we don't need to run it again + TestType::PackageContents { .. } => {} + } } tracing::info!( @@ -353,104 +361,134 @@ pub async fn run_test(package_file: &Path, config: &TestConfiguration) -> Result Ok(()) } -async fn run_python_test( - pkg: &ArchiveIdentifier, - path: &Path, - prefix: &Path, - config: &TestConfiguration, -) -> Result<(), TestError> { - let test_file = path.join("python_test.json"); - let test: PythonTest = serde_json::from_reader(fs::File::open(test_file)?)?; - - let match_spec = MatchSpec::from_str( - format!("{}={}={}", pkg.name, pkg.version, pkg.build_string).as_str(), - ParseStrictness::Lenient, - )?; - let mut dependencies = vec![match_spec]; - if test.pip_check { - dependencies.push(MatchSpec::from_str("pip", ParseStrictness::Strict).unwrap()); - } - - create_environment( - &dependencies, - &Platform::current(), - prefix, - &config.channels, - &config.tool_configuration, - config.channel_priority, - config.solve_strategy, - ) - .await - .map_err(TestError::TestEnvironmentSetup)?; - - let mut imports = String::new(); - for import in test.imports { - writeln!(imports, "import {}", import)?; - } - - let script = Script { - content: ScriptContent::Command(imports), - interpreter: Some("python".into()), - ..Script::default() - }; +impl PythonTest { + /// Execute the Python test + pub async fn run_test( + &self, + pkg: &ArchiveIdentifier, + path: &Path, + prefix: &Path, + config: &TestConfiguration, + ) -> Result<(), TestError> { + let match_spec = MatchSpec::from_str( + format!("{}={}={}", pkg.name, pkg.version, pkg.build_string).as_str(), + ParseStrictness::Lenient, + )?; + let mut dependencies = vec![match_spec]; + if self.pip_check { + dependencies.push(MatchSpec::from_str("pip", ParseStrictness::Strict).unwrap()); + } - let tmp_dir = tempfile::tempdir()?; - script - .run_script(Default::default(), tmp_dir.path(), path, prefix, None) + create_environment( + &dependencies, + &Platform::current(), + prefix, + &config.channels, + &config.tool_configuration, + config.channel_priority, + config.solve_strategy, + ) .await - .map_err(|e| TestError::TestFailed(e.to_string()))?; + .map_err(TestError::TestEnvironmentSetup)?; - tracing::info!( - "{} python imports test passed!", - console::style(console::Emoji("✔", "")).green() - ); + let mut imports = String::new(); + for import in &self.imports { + writeln!(imports, "import {}", import)?; + } - if test.pip_check { let script = Script { - content: ScriptContent::Command("pip check".into()), + content: ScriptContent::Command(imports), + interpreter: Some("python".into()), ..Script::default() }; + + let tmp_dir = tempfile::tempdir()?; script - .run_script(Default::default(), path, path, prefix, None) + .run_script(Default::default(), tmp_dir.path(), path, prefix, None) .await .map_err(|e| TestError::TestFailed(e.to_string()))?; tracing::info!( - "{} pip check passed!", + "{} python imports test passed!", console::style(console::Emoji("✔", "")).green() ); - } - Ok(()) + if self.pip_check { + let script = Script { + content: ScriptContent::Command("pip check".into()), + ..Script::default() + }; + script + .run_script(Default::default(), path, path, prefix, None) + .await + .map_err(|e| TestError::TestFailed(e.to_string()))?; + + tracing::info!( + "{} pip check passed!", + console::style(console::Emoji("✔", "")).green() + ); + } + + Ok(()) + } } -async fn run_shell_test( - pkg: &ArchiveIdentifier, - path: &Path, - prefix: &Path, - config: &TestConfiguration, -) -> Result<(), TestError> { - let deps = if path.join("test_time_dependencies.json").exists() { - let test_dep_json = path.join("test_time_dependencies.json"); - serde_json::from_str(&fs::read_to_string(test_dep_json)?)? - } else { - CommandsTestRequirements::default() - }; +impl CommandsTest { + /// Execute the command test + pub async fn run_test( + &self, + pkg: &ArchiveIdentifier, + path: &Path, + prefix: &Path, + config: &TestConfiguration, + ) -> Result<(), TestError> { + let deps = self.requirements.clone(); + + let build_env = if !deps.build.is_empty() { + tracing::info!("Installing build dependencies"); + let build_prefix = prefix.join("bld"); + let platform = Platform::current(); + let build_dependencies = deps + .build + .iter() + .map(|s| MatchSpec::from_str(s, ParseStrictness::Lenient)) + .collect::, _>>()?; + + create_environment( + &build_dependencies, + &platform, + &build_prefix, + &config.channels, + &config.tool_configuration, + config.channel_priority, + config.solve_strategy, + ) + .await + .map_err(TestError::TestEnvironmentSetup)?; + Some(build_prefix) + } else { + None + }; - let build_env = if !deps.build.is_empty() { - tracing::info!("Installing build dependencies"); - let build_prefix = prefix.join("bld"); - let platform = Platform::current(); - let build_dependencies = deps - .build + let mut dependencies = deps + .run .iter() .map(|s| MatchSpec::from_str(s, ParseStrictness::Lenient)) .collect::, _>>()?; + // create environment with the test dependencies + dependencies.push(MatchSpec::from_str( + format!("{}={}={}", pkg.name, pkg.version, pkg.build_string).as_str(), + ParseStrictness::Lenient, + )?); + + let platform = config.target_platform.unwrap_or_else(Platform::current); + + let run_env = prefix.join("run"); create_environment( - &build_dependencies, + &dependencies, &platform, - &build_prefix, + &run_env, &config.channels, &config.tool_configuration, config.channel_priority, @@ -458,85 +496,27 @@ async fn run_shell_test( ) .await .map_err(TestError::TestEnvironmentSetup)?; - Some(build_prefix) - } else { - None - }; - - let mut dependencies = deps - .run - .iter() - .map(|s| MatchSpec::from_str(s, ParseStrictness::Lenient)) - .collect::, _>>()?; - - // create environment with the test dependencies - dependencies.push(MatchSpec::from_str( - format!("{}={}={}", pkg.name, pkg.version, pkg.build_string).as_str(), - ParseStrictness::Lenient, - )?); - - let platform = config.target_platform.unwrap_or_else(Platform::current); - - let run_env = prefix.join("run"); - create_environment( - &dependencies, - &platform, - &run_env, - &config.channels, - &config.tool_configuration, - config.channel_priority, - config.solve_strategy, - ) - .await - .map_err(TestError::TestEnvironmentSetup)?; - - let platform = Platform::current(); - let mut env_vars = env_vars::os_vars(prefix, &platform); - env_vars.retain(|key, _| key != ShellEnum::default().path_var(&platform)); - env_vars.insert("PREFIX".to_string(), run_env.to_string_lossy().to_string()); - - let script = Script { - content: ScriptContent::Path(PathBuf::from("run_test")), - ..Default::default() - }; - // copy all test files to a temporary directory and set it as the working directory - let tmp_dir = tempfile::tempdir()?; - CopyDir::new(path, tmp_dir.path()).run().map_err(|e| { - TestError::IoError(std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to copy test files: {}", e), - )) - })?; + let platform = Platform::current(); + let mut env_vars = env_vars::os_vars(prefix, &platform); + env_vars.retain(|key, _| key != ShellEnum::default().path_var(&platform)); + env_vars.insert("PREFIX".to_string(), run_env.to_string_lossy().to_string()); - tracing::info!("Testing commands:"); - script - .run_script(env_vars, tmp_dir.path(), path, &run_env, build_env.as_ref()) - .await - .map_err(|e| TestError::TestFailed(e.to_string()))?; + // copy all test files to a temporary directory and set it as the working directory + let tmp_dir = tempfile::tempdir()?; + CopyDir::new(path, tmp_dir.path()).run().map_err(|e| { + TestError::IoError(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to copy test files: {}", e), + )) + })?; - Ok(()) -} + tracing::info!("Testing commands:"); + self.script + .run_script(env_vars, tmp_dir.path(), path, &run_env, build_env.as_ref()) + .await + .map_err(|e| TestError::TestFailed(e.to_string()))?; -async fn run_individual_test( - pkg: &ArchiveIdentifier, - path: &Path, - prefix: &Path, - config: &TestConfiguration, -) -> Result<(), TestError> { - if path.join("python_test.json").exists() { - run_python_test(pkg, path, prefix, config).await?; - } else if path.join("run_test.sh").exists() || path.join("run_test.bat").exists() { - // run shell test - run_shell_test(pkg, path, prefix, config).await?; - } else { - // no test found + Ok(()) } - - tracing::info!( - "{} test passed!", - console::style(console::Emoji("✔", "")).green() - ); - - Ok(()) } diff --git a/src/package_test/serialize_test.rs b/src/package_test/serialize_test.rs index 2a1b70d2f..b8d710a65 100644 --- a/src/package_test/serialize_test.rs +++ b/src/package_test/serialize_test.rs @@ -1,61 +1,23 @@ +use std::path::{Path, PathBuf}; + use fs_err as fs; -use fs_err::File; -use rattler_conda_types::Platform; -use std::{ - io::Write, - path::{Path, PathBuf}, -}; use crate::{ metadata::Output, packaging::PackagingError, - recipe::parser::{CommandsTest, DownstreamTest, PythonTest, TestType}, + recipe::parser::{CommandsTest, TestType}, }; -impl DownstreamTest { - fn write_to_folder(&self, folder: &Path) -> Result, PackagingError> { - fs::create_dir_all(folder)?; - let path = folder.join("downstream_test.json"); - let mut file = File::create(&path)?; - file.write_all(serde_json::to_string(self)?.as_bytes())?; - Ok(vec![path]) - } -} - impl CommandsTest { fn write_to_folder( &self, folder: &Path, output: &Output, ) -> Result, PackagingError> { - let mut command_files = vec![]; - let mut test_files = vec![]; - - fs::create_dir_all(folder)?; + let mut test_files = Vec::new(); - let target_platform = &output.build_configuration.target_platform; - if target_platform.is_windows() || target_platform == &Platform::NoArch { - command_files.push(folder.join("run_test.bat")); - } - - if target_platform.is_unix() || target_platform == &Platform::NoArch { - command_files.push(folder.join("run_test.sh")); - } - - for cf in command_files { - let mut file = File::create(&cf)?; - for el in &self.script { - writeln!(file, "{}\n", el)?; - } - test_files.push(cf); - } - - if !self.requirements.is_empty() { - let test_dependencies = &self.requirements; - let test_file = folder.join("test_time_dependencies.json"); - let mut file = File::create(&test_file)?; - file.write_all(serde_json::to_string(&test_dependencies)?.as_bytes())?; - test_files.push(test_file); + if !self.files.recipe.is_empty() || !self.files.source.is_empty() { + fs::create_dir_all(folder)?; } if !self.files.recipe.is_empty() { @@ -88,15 +50,6 @@ impl CommandsTest { } } -impl PythonTest { - fn write_to_folder(&self, folder: &Path) -> Result, PackagingError> { - fs::create_dir_all(folder)?; - let path = folder.join("python_test.json"); - serde_json::to_writer(&File::create(&path)?, self)?; - Ok(vec![path]) - } -} - /// Write out the test files for the final package pub(crate) fn write_test_files( output: &Output, @@ -104,16 +57,34 @@ pub(crate) fn write_test_files( ) -> Result, PackagingError> { let mut test_files = Vec::new(); - for (idx, test) in output.recipe.tests().iter().enumerate() { - let folder = tmp_dir_path.join(format!("info/tests/{}", idx)); - let files = match test { - TestType::Python(python_test) => python_test.write_to_folder(&folder)?, - TestType::Command(command_test) => command_test.write_to_folder(&folder, output)?, - TestType::Downstream(downstream_test) => downstream_test.write_to_folder(&folder)?, - TestType::PackageContents(_) => Vec::new(), - }; - test_files.extend(files); + let name = output.name().as_normalized(); + + // extract test section from the original recipe + let mut tests = output.recipe.tests().clone(); + + // remove the package contents tests as they are not needed in the final package + tests.retain(|test| !matches!(test, TestType::PackageContents { .. })); + + // For each `Command` test, we need to copy the test files to the package + for (idx, test) in tests.iter_mut().enumerate() { + if let TestType::Command(ref mut command_test) = test { + let cwd = PathBuf::from(format!("etc/conda/test-files/{name}/{idx}")); + let folder = tmp_dir_path.join(&cwd); + let files = command_test.write_to_folder(&folder, output)?; + if !files.is_empty() { + test_files.extend(files); + // store the cwd in the rendered test + command_test.script.cwd = Some(cwd); + } + } } + let test_file_dir = tmp_dir_path.join("info/tests"); + fs::create_dir_all(&test_file_dir)?; + + let test_file = test_file_dir.join("tests.yaml"); + fs::write(&test_file, serde_yaml::to_string(&tests)?)?; + test_files.push(test_file); + Ok(test_files) } diff --git a/src/packaging.rs b/src/packaging.rs index 42fb51035..6f3d02dfd 100644 --- a/src/packaging.rs +++ b/src/packaging.rs @@ -213,8 +213,11 @@ pub fn package_conda( let info_folder = tmp.temp_dir.path().join("info"); - tracing::info!("Writing metadata for package"); + tracing::info!("Writing test files"); + let test_files = write_test_files(output, tmp.temp_dir.path())?; + tmp.add_files(test_files); + tracing::info!("Writing metadata for package"); tmp.add_files(output.write_metadata(&tmp)?); // TODO move things below also to metadata.rs @@ -229,10 +232,6 @@ pub fn package_conda( tmp.add_files(recipe_files); } - tracing::info!("Writing test files"); - let test_files = write_test_files(output, tmp.temp_dir.path())?; - tmp.add_files(test_files); - tracing::info!("Creating entry points"); // create any entry points or link.json for noarch packages if output.recipe.build().noarch().is_python() { diff --git a/src/packaging/metadata.rs b/src/packaging/metadata.rs index c716d0460..22e4a27a3 100644 --- a/src/packaging/metadata.rs +++ b/src/packaging/metadata.rs @@ -343,6 +343,11 @@ impl Output { let relative_path = p.strip_prefix(temp_files.temp_dir.path())?.to_path_buf(); + // skip any info files as they are not part of the paths.json + if relative_path.starts_with("info") { + continue; + } + if !p.exists() { if p.is_symlink() { tracing::warn!( diff --git a/src/recipe/parser/script.rs b/src/recipe/parser/script.rs index 5dd6e1f03..4203bc18e 100644 --- a/src/recipe/parser/script.rs +++ b/src/recipe/parser/script.rs @@ -10,7 +10,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::{borrow::Cow, collections::BTreeMap, path::PathBuf}; /// Defines the script to run to build the package. -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct Script { /// The interpreter to use for the script. pub interpreter: Option, @@ -22,6 +22,9 @@ pub struct Script { pub secrets: Vec, /// The contents of the script, either a path or a list of commands. pub content: ScriptContent, + + /// The current working directory for the script. + pub cwd: Option, } impl Serialize for Script { @@ -51,13 +54,18 @@ impl Serialize for Script { secrets: &'a Vec, #[serde(skip_serializing_if = "Option::is_none", flatten)] content: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + cwd: Option<&'a PathBuf>, }, } let raw_script = match &self.content { ScriptContent::CommandOrPath(content) => RawScript::CommandOrPath(content), ScriptContent::Commands(content) - if self.interpreter.is_none() && self.env.is_empty() && self.secrets.is_empty() => + if self.interpreter.is_none() + && self.env.is_empty() + && self.secrets.is_empty() + && self.cwd.is_none() => { RawScript::Commands(content) } @@ -65,6 +73,7 @@ impl Serialize for Script { interpreter: self.interpreter.as_ref(), env: &self.env, secrets: &self.secrets, + cwd: self.cwd.as_ref(), content: match &self.content { ScriptContent::Command(content) => Some(RawScriptContent::Command { content }), ScriptContent::Commands(content) => { @@ -106,7 +115,10 @@ impl<'de> Deserialize<'de> for Script { env: BTreeMap, #[serde(default)] secrets: Vec, + #[serde(default, flatten)] content: Option, + #[serde(default)] + cwd: Option, }, } @@ -119,10 +131,12 @@ impl<'de> Deserialize<'de> for Script { env, secrets, content, + cwd, } => Self { interpreter, env, secrets, + cwd: cwd.map(PathBuf::from), content: match content { Some(RawScriptContent::Command { content }) => ScriptContent::Command(content), Some(RawScriptContent::Commands { content }) => { @@ -180,6 +194,7 @@ impl From for Script { env: Default::default(), secrets: Default::default(), content: value, + cwd: None, } } } @@ -287,12 +302,13 @@ impl TryConvertNode