feat(migrate): setting up automatic test pipeline and progress in the implementation

This commit is contained in:
Federico Terzi 2021-05-27 22:06:39 +02:00
parent ae2da9b0fe
commit 6ee301c6e1
10 changed files with 191 additions and 62 deletions

67
Cargo.lock generated
View File

@ -24,6 +24,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "anyhow"
version = "1.0.38"
@ -152,7 +161,7 @@ version = "2.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
dependencies = [
"ansi_term",
"ansi_term 0.11.0",
"atty",
"bitflags 1.2.1",
"strsim",
@ -252,6 +261,16 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "ctor"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d"
dependencies = [
"quote 1.0.9",
"syn 1.0.67",
]
[[package]]
name = "dbus"
version = "0.9.1"
@ -262,6 +281,12 @@ dependencies = [
"libdbus-sys",
]
[[package]]
name = "diff"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
[[package]]
name = "difference"
version = "2.0.0"
@ -332,7 +357,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]
@ -489,6 +514,7 @@ dependencies = [
"lazy_static",
"log",
"path-slash",
"pretty_assertions",
"regex",
"tempdir",
"tempfile",
@ -674,7 +700,7 @@ dependencies = [
"markup5ever",
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]
@ -698,7 +724,7 @@ dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]
@ -886,7 +912,7 @@ dependencies = [
"cfg-if",
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]
@ -978,6 +1004,15 @@ dependencies = [
"num-traits",
]
[[package]]
name = "output_vt100"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
dependencies = [
"winapi",
]
[[package]]
name = "path-slash"
version = "0.1.4"
@ -1075,6 +1110,18 @@ dependencies = [
"treeline",
]
[[package]]
name = "pretty_assertions"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cab0e7c02cf376875e9335e0ba1da535775beb5450d21e1dffca068818ed98b"
dependencies = [
"ansi_term 0.12.1",
"ctor",
"diff",
"output_vt100",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
@ -1335,7 +1382,7 @@ checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31"
dependencies = [
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]
@ -1438,9 +1485,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.60"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
checksum = "6498a9efc342871f91cc2d0d694c674368b4ceb40f62b65a7a08c3792935e702"
dependencies = [
"proc-macro2",
"quote 1.0.9",
@ -1509,7 +1556,7 @@ dependencies = [
"cfg-if",
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
"version_check",
]
@ -1539,7 +1586,7 @@ checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2",
"quote 1.0.9",
"syn 1.0.60",
"syn 1.0.67",
]
[[package]]

View File

@ -19,5 +19,6 @@ path-slash = "0.1.4"
[dev-dependencies]
tempdir = "0.3.7"
tempfile = "3.2.0"
include_dir = "0.6.0"
test-case = "1.1.0"
include_dir = { version = "0.6.0", features = ["search"] }
test-case = "1.1.0"
pretty_assertions = "0.7.2"

View File

@ -39,7 +39,7 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
let yaml_name = yaml_get_string(yaml, "name");
let should_generate_match = yaml_matches.is_some() || yaml_global_vars.is_some();
if should_generate_match {
let match_file_path_if_unlisted = if should_generate_match {
let should_underscore = !input_path.starts_with("default") && yaml_parent != Some("default");
let match_output_path = calculate_output_match_path(&input_path, should_underscore);
if match_output_path.is_none() {
@ -55,7 +55,7 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
config_names_to_path.insert(name.to_string(), match_output_path.clone());
}
let output_yaml = output_files.entry(match_output_path).or_insert(Hash::new());
let output_yaml = output_files.entry(match_output_path.clone()).or_insert(Hash::new());
if let Some(global_vars) = yaml_global_vars {
let output_global_vars = output_yaml
@ -78,7 +78,15 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
eprintln!("unable to transform matches for file: {}", input_path);
}
}
}
if should_underscore {
Some(match_output_path)
} else {
None
}
} else {
None
};
let yaml_filter_class = yaml_get_string(yaml, "filter_class");
let yaml_filter_title = yaml_get_string(yaml, "filter_title");
@ -97,11 +105,22 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
copy_field_if_present(yaml, "filter_title", &mut output_yaml, "filter_title");
copy_field_if_present(yaml, "filter_class", &mut output_yaml, "filter_class");
copy_field_if_present(yaml, "filter_exec", &mut output_yaml, "filter_exec");
copy_field_if_present(yaml, "enable_active", &mut output_yaml, "enable");
// TODO: warn if passive mode parameters are used
// TODO: copy other config fields: https://github.com/federico-terzi/espanso/blob/master/src/config/mod.rs#L169
// TODO: if a match file was created above of type "underscored", then explicitly include it here
// depending on whether "exclude_default_entries" is set, use "includes" or "extra_includes"
// Link any unlisted match file (the ones starting with the _ underscore, which are excluded by the
// default.yml config) explicitly, if present.
if let Some(match_file_path) = match_file_path_if_unlisted {
let yaml_exclude_default_entries = yaml_get_bool(yaml, "exclude_default_entries").unwrap_or(false);
let key_name = if yaml_exclude_default_entries { "includes" } else { "extra_includes" };
let includes = vec![Yaml::String(format!("../{}", match_file_path))];
output_yaml.insert(Yaml::String(key_name.to_string()), Yaml::Array(includes));
}
output_files.insert(config_output_path, output_yaml);
}
@ -114,16 +133,15 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
// TODO: here resolve parent: name imports
// TODO: remove this prints
for (file, content) in output_files {
let mut out_str = String::new();
{
let mut emitter = YamlEmitter::new(&mut out_str);
emitter.dump(&Yaml::Hash(content)).unwrap(); // dump the YAML object to a String
}
println!("\n------- {} ------------\n{}", file, out_str);
}
// for (file, content) in output_files {
// let mut out_str = String::new();
// {
// let mut emitter = YamlEmitter::new(&mut out_str);
// emitter.dump(&Yaml::Hash(content)).unwrap(); // dump the YAML object to a String
// }
// println!("\n------- {} ------------\n{}", file, out_str);
// }
todo!();
output_files
}
@ -188,6 +206,12 @@ fn yaml_get_string<'a>(yaml: &'a Hash, name: &str) -> Option<&'a str> {
.and_then(|v| v.as_str())
}
fn yaml_get_bool<'a>(yaml: &'a Hash, name: &str) -> Option<bool> {
yaml
.get(&Yaml::String(name.to_string()))
.and_then(|v| v.as_bool())
}
fn copy_field_if_present(
input_yaml: &Hash,
input_field_name: &str,

View File

@ -66,23 +66,58 @@ mod load;
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use std::{fs::create_dir_all, path::{Path}};
use super::*;
use include_dir::{include_dir, Dir};
use tempdir::TempDir;
use test_case::test_case;
use pretty_assertions::{assert_eq as assert_peq};
fn run_with_temp_dir(test_data: &Dir, action: impl FnOnce(&Path, &Path)) {
let tmp_dir = TempDir::new("espanso-migration").unwrap();
let tmp_path = tmp_dir.path();
let legacy_path = tmp_dir.path().join("legacy");
let expected_path = tmp_dir.path().join("expected");
for entry in test_data.find("**/*").unwrap() {
let entry_path = entry.path();
let entry_path_str = entry_path.to_string_lossy().to_string();
if entry_path_str.is_empty() {
continue;
}
let target = tmp_path.join(entry_path);
if entry_path.extension().is_none() {
create_dir_all(target).unwrap();
} else {
std::fs::write(target, test_data.get_file(entry_path).unwrap().contents()).unwrap();
}
}
action(&legacy_path, &expected_path);
}
static SIMPLE_CASE: Dir = include_dir!("test/simple");
static BASE_CASE: Dir = include_dir!("test/base");
#[test_case(&SIMPLE_CASE; "simple case")]
#[test_case(&BASE_CASE; "base case")]
fn test_migration(test_data: &Dir) {
let input_files = load::load(&PathBuf::from(
r"",
))
.unwrap();
convert::convert(input_files);
run_with_temp_dir(test_data, |legacy, expected| {
let legacy_files = load::load(legacy).unwrap();
let expected_files = load::load(expected).unwrap();
// TODO
assert!(false);
let converted_files = convert::convert(legacy_files);
assert_eq!(converted_files.len(), expected_files.len());
for (file, content) in converted_files {
assert_peq!(&content, expected_files.get(&file).unwrap());
}
});
}
}

View File

@ -22,16 +22,16 @@ use path_slash::PathExt;
use std::{collections::HashMap, path::Path};
use thiserror::Error;
use walkdir::WalkDir;
use yaml_rust::{Yaml, YamlLoader, yaml::Hash};
use yaml_rust::{yaml::Hash, Yaml, YamlLoader};
pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
if !legacy_config_dir.is_dir() {
pub fn load(config_dir: &Path) -> Result<HashMap<String, Hash>> {
if !config_dir.is_dir() {
return Err(LoadError::NotDirectory.into());
}
let mut input_files = HashMap::new();
for entry in WalkDir::new(legacy_config_dir) {
for entry in WalkDir::new(config_dir) {
match entry {
Ok(entry) => {
// Skip directories
@ -50,7 +50,7 @@ pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
continue;
}
match entry.path().strip_prefix(legacy_config_dir) {
match entry.path().strip_prefix(config_dir) {
Ok(relative_path) => {
let corrected_path = relative_path.to_slash_lossy();
@ -59,30 +59,40 @@ pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
}
match std::fs::read_to_string(entry.path()) {
Ok(content) => match YamlLoader::load_from_str(&content) {
Ok(mut yaml) => {
if !yaml.is_empty() {
let yaml = yaml.remove(0);
if let Yaml::Hash(hash) = yaml {
input_files.insert(corrected_path, hash);
} else {
eprintln!("yaml file does not have a valid format: {}", entry.path().display());
Ok(content) => {
// Empty files are not valid YAML, but we want to handle them anyway
if content.trim().is_empty() {
input_files.insert(corrected_path, Hash::new());
} else {
match YamlLoader::load_from_str(&content) {
Ok(mut yaml) => {
if !yaml.is_empty() {
let yaml = yaml.remove(0);
if let Yaml::Hash(hash) = yaml {
input_files.insert(corrected_path, hash);
} else {
eprintln!(
"yaml file does not have a valid format: {}",
entry.path().display()
);
}
} else {
eprintln!(
"error, found empty document while reading entry: {}",
entry.path().display()
);
}
}
Err(err) => {
eprintln!(
"experienced error while parsing file: {}, error: {}",
entry.path().display(),
err
);
}
} else {
eprintln!(
"error, found empty document while reading entry: {}",
entry.path().display()
);
}
}
Err(err) => {
eprintln!(
"experienced error while parsing file: {}, error: {}",
entry.path().display(),
err
);
}
},
}
Err(err) => {
eprintln!(
"error while reading entry: {}, error: {}",

View File

@ -1,3 +1,3 @@
filter_title: "Disabled program"
enabled: false
enable: false

View File

@ -1,6 +1,6 @@
global_vars:
- name: "name"
type: "echo"
type: "dummy"
params:
echo: "John"

View File

@ -0,0 +1,6 @@
matches:
- name: ":hi"
trigger: "Hello"
- name: ":greet"
trigger: "Hi {{name}}"

View File

@ -0,0 +1,6 @@
matches:
- name: ":hi"
trigger: "Hello"
- name: ":greet"
trigger: "Hi {{name}}"