feat(migrate): setting up automatic test pipeline and progress in the implementation
This commit is contained in:
parent
ae2da9b0fe
commit
6ee301c6e1
67
Cargo.lock
generated
67
Cargo.lock
generated
|
@ -24,6 +24,15 @@ dependencies = [
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ansi_term"
|
||||||
|
version = "0.12.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.38"
|
version = "1.0.38"
|
||||||
|
@ -152,7 +161,7 @@ version = "2.33.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
|
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi_term",
|
"ansi_term 0.11.0",
|
||||||
"atty",
|
"atty",
|
||||||
"bitflags 1.2.1",
|
"bitflags 1.2.1",
|
||||||
"strsim",
|
"strsim",
|
||||||
|
@ -252,6 +261,16 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ctor"
|
||||||
|
version = "0.1.20"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d"
|
||||||
|
dependencies = [
|
||||||
|
"quote 1.0.9",
|
||||||
|
"syn 1.0.67",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dbus"
|
name = "dbus"
|
||||||
version = "0.9.1"
|
version = "0.9.1"
|
||||||
|
@ -262,6 +281,12 @@ dependencies = [
|
||||||
"libdbus-sys",
|
"libdbus-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diff"
|
||||||
|
version = "0.1.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "difference"
|
name = "difference"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
|
@ -332,7 +357,7 @@ dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -489,6 +514,7 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"path-slash",
|
"path-slash",
|
||||||
|
"pretty_assertions",
|
||||||
"regex",
|
"regex",
|
||||||
"tempdir",
|
"tempdir",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
@ -674,7 +700,7 @@ dependencies = [
|
||||||
"markup5ever",
|
"markup5ever",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -698,7 +724,7 @@ dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -886,7 +912,7 @@ dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -978,6 +1004,15 @@ dependencies = [
|
||||||
"num-traits",
|
"num-traits",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "output_vt100"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "path-slash"
|
name = "path-slash"
|
||||||
version = "0.1.4"
|
version = "0.1.4"
|
||||||
|
@ -1075,6 +1110,18 @@ dependencies = [
|
||||||
"treeline",
|
"treeline",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pretty_assertions"
|
||||||
|
version = "0.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1cab0e7c02cf376875e9335e0ba1da535775beb5450d21e1dffca068818ed98b"
|
||||||
|
dependencies = [
|
||||||
|
"ansi_term 0.12.1",
|
||||||
|
"ctor",
|
||||||
|
"diff",
|
||||||
|
"output_vt100",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro-hack"
|
name = "proc-macro-hack"
|
||||||
version = "0.5.19"
|
version = "0.5.19"
|
||||||
|
@ -1335,7 +1382,7 @@ checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1438,9 +1485,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.60"
|
version = "1.0.67"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
|
checksum = "6498a9efc342871f91cc2d0d694c674368b4ceb40f62b65a7a08c3792935e702"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
|
@ -1509,7 +1556,7 @@ dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1539,7 +1586,7 @@ checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.60",
|
"syn 1.0.67",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -19,5 +19,6 @@ path-slash = "0.1.4"
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempdir = "0.3.7"
|
tempdir = "0.3.7"
|
||||||
tempfile = "3.2.0"
|
tempfile = "3.2.0"
|
||||||
include_dir = "0.6.0"
|
include_dir = { version = "0.6.0", features = ["search"] }
|
||||||
test-case = "1.1.0"
|
test-case = "1.1.0"
|
||||||
|
pretty_assertions = "0.7.2"
|
|
@ -39,7 +39,7 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
|
||||||
let yaml_name = yaml_get_string(yaml, "name");
|
let yaml_name = yaml_get_string(yaml, "name");
|
||||||
|
|
||||||
let should_generate_match = yaml_matches.is_some() || yaml_global_vars.is_some();
|
let should_generate_match = yaml_matches.is_some() || yaml_global_vars.is_some();
|
||||||
if should_generate_match {
|
let match_file_path_if_unlisted = if should_generate_match {
|
||||||
let should_underscore = !input_path.starts_with("default") && yaml_parent != Some("default");
|
let should_underscore = !input_path.starts_with("default") && yaml_parent != Some("default");
|
||||||
let match_output_path = calculate_output_match_path(&input_path, should_underscore);
|
let match_output_path = calculate_output_match_path(&input_path, should_underscore);
|
||||||
if match_output_path.is_none() {
|
if match_output_path.is_none() {
|
||||||
|
@ -55,7 +55,7 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
|
||||||
config_names_to_path.insert(name.to_string(), match_output_path.clone());
|
config_names_to_path.insert(name.to_string(), match_output_path.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let output_yaml = output_files.entry(match_output_path).or_insert(Hash::new());
|
let output_yaml = output_files.entry(match_output_path.clone()).or_insert(Hash::new());
|
||||||
|
|
||||||
if let Some(global_vars) = yaml_global_vars {
|
if let Some(global_vars) = yaml_global_vars {
|
||||||
let output_global_vars = output_yaml
|
let output_global_vars = output_yaml
|
||||||
|
@ -78,7 +78,15 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
|
||||||
eprintln!("unable to transform matches for file: {}", input_path);
|
eprintln!("unable to transform matches for file: {}", input_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
if should_underscore {
|
||||||
|
Some(match_output_path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let yaml_filter_class = yaml_get_string(yaml, "filter_class");
|
let yaml_filter_class = yaml_get_string(yaml, "filter_class");
|
||||||
let yaml_filter_title = yaml_get_string(yaml, "filter_title");
|
let yaml_filter_title = yaml_get_string(yaml, "filter_title");
|
||||||
|
@ -97,11 +105,22 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
|
||||||
copy_field_if_present(yaml, "filter_title", &mut output_yaml, "filter_title");
|
copy_field_if_present(yaml, "filter_title", &mut output_yaml, "filter_title");
|
||||||
copy_field_if_present(yaml, "filter_class", &mut output_yaml, "filter_class");
|
copy_field_if_present(yaml, "filter_class", &mut output_yaml, "filter_class");
|
||||||
copy_field_if_present(yaml, "filter_exec", &mut output_yaml, "filter_exec");
|
copy_field_if_present(yaml, "filter_exec", &mut output_yaml, "filter_exec");
|
||||||
|
copy_field_if_present(yaml, "enable_active", &mut output_yaml, "enable");
|
||||||
|
|
||||||
|
// TODO: warn if passive mode parameters are used
|
||||||
|
|
||||||
// TODO: copy other config fields: https://github.com/federico-terzi/espanso/blob/master/src/config/mod.rs#L169
|
// TODO: copy other config fields: https://github.com/federico-terzi/espanso/blob/master/src/config/mod.rs#L169
|
||||||
|
|
||||||
// TODO: if a match file was created above of type "underscored", then explicitly include it here
|
// Link any unlisted match file (the ones starting with the _ underscore, which are excluded by the
|
||||||
// depending on whether "exclude_default_entries" is set, use "includes" or "extra_includes"
|
// default.yml config) explicitly, if present.
|
||||||
|
if let Some(match_file_path) = match_file_path_if_unlisted {
|
||||||
|
let yaml_exclude_default_entries = yaml_get_bool(yaml, "exclude_default_entries").unwrap_or(false);
|
||||||
|
let key_name = if yaml_exclude_default_entries { "includes" } else { "extra_includes" };
|
||||||
|
|
||||||
|
let includes = vec![Yaml::String(format!("../{}", match_file_path))];
|
||||||
|
|
||||||
|
output_yaml.insert(Yaml::String(key_name.to_string()), Yaml::Array(includes));
|
||||||
|
}
|
||||||
|
|
||||||
output_files.insert(config_output_path, output_yaml);
|
output_files.insert(config_output_path, output_yaml);
|
||||||
}
|
}
|
||||||
|
@ -114,16 +133,15 @@ pub fn convert(input_files: HashMap<String, Hash>) -> HashMap<String, Hash> {
|
||||||
// TODO: here resolve parent: name imports
|
// TODO: here resolve parent: name imports
|
||||||
|
|
||||||
// TODO: remove this prints
|
// TODO: remove this prints
|
||||||
for (file, content) in output_files {
|
// for (file, content) in output_files {
|
||||||
let mut out_str = String::new();
|
// let mut out_str = String::new();
|
||||||
{
|
// {
|
||||||
let mut emitter = YamlEmitter::new(&mut out_str);
|
// let mut emitter = YamlEmitter::new(&mut out_str);
|
||||||
emitter.dump(&Yaml::Hash(content)).unwrap(); // dump the YAML object to a String
|
// emitter.dump(&Yaml::Hash(content)).unwrap(); // dump the YAML object to a String
|
||||||
}
|
// }
|
||||||
println!("\n------- {} ------------\n{}", file, out_str);
|
// println!("\n------- {} ------------\n{}", file, out_str);
|
||||||
}
|
// }
|
||||||
|
|
||||||
todo!();
|
|
||||||
output_files
|
output_files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,6 +206,12 @@ fn yaml_get_string<'a>(yaml: &'a Hash, name: &str) -> Option<&'a str> {
|
||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn yaml_get_bool<'a>(yaml: &'a Hash, name: &str) -> Option<bool> {
|
||||||
|
yaml
|
||||||
|
.get(&Yaml::String(name.to_string()))
|
||||||
|
.and_then(|v| v.as_bool())
|
||||||
|
}
|
||||||
|
|
||||||
fn copy_field_if_present(
|
fn copy_field_if_present(
|
||||||
input_yaml: &Hash,
|
input_yaml: &Hash,
|
||||||
input_field_name: &str,
|
input_field_name: &str,
|
||||||
|
|
|
@ -66,23 +66,58 @@ mod load;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::PathBuf;
|
use std::{fs::create_dir_all, path::{Path}};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use include_dir::{include_dir, Dir};
|
use include_dir::{include_dir, Dir};
|
||||||
|
use tempdir::TempDir;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
|
use pretty_assertions::{assert_eq as assert_peq};
|
||||||
|
|
||||||
|
fn run_with_temp_dir(test_data: &Dir, action: impl FnOnce(&Path, &Path)) {
|
||||||
|
let tmp_dir = TempDir::new("espanso-migration").unwrap();
|
||||||
|
let tmp_path = tmp_dir.path();
|
||||||
|
let legacy_path = tmp_dir.path().join("legacy");
|
||||||
|
let expected_path = tmp_dir.path().join("expected");
|
||||||
|
|
||||||
|
for entry in test_data.find("**/*").unwrap() {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
|
||||||
|
let entry_path_str = entry_path.to_string_lossy().to_string();
|
||||||
|
if entry_path_str.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let target = tmp_path.join(entry_path);
|
||||||
|
|
||||||
|
if entry_path.extension().is_none() {
|
||||||
|
create_dir_all(target).unwrap();
|
||||||
|
} else {
|
||||||
|
std::fs::write(target, test_data.get_file(entry_path).unwrap().contents()).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
action(&legacy_path, &expected_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
static SIMPLE_CASE: Dir = include_dir!("test/simple");
|
||||||
static BASE_CASE: Dir = include_dir!("test/base");
|
static BASE_CASE: Dir = include_dir!("test/base");
|
||||||
|
|
||||||
|
#[test_case(&SIMPLE_CASE; "simple case")]
|
||||||
#[test_case(&BASE_CASE; "base case")]
|
#[test_case(&BASE_CASE; "base case")]
|
||||||
fn test_migration(test_data: &Dir) {
|
fn test_migration(test_data: &Dir) {
|
||||||
let input_files = load::load(&PathBuf::from(
|
run_with_temp_dir(test_data, |legacy, expected| {
|
||||||
r"",
|
let legacy_files = load::load(legacy).unwrap();
|
||||||
))
|
let expected_files = load::load(expected).unwrap();
|
||||||
.unwrap();
|
|
||||||
convert::convert(input_files);
|
|
||||||
|
|
||||||
// TODO
|
let converted_files = convert::convert(legacy_files);
|
||||||
assert!(false);
|
|
||||||
|
assert_eq!(converted_files.len(), expected_files.len());
|
||||||
|
|
||||||
|
for (file, content) in converted_files {
|
||||||
|
assert_peq!(&content, expected_files.get(&file).unwrap());
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,16 +22,16 @@ use path_slash::PathExt;
|
||||||
use std::{collections::HashMap, path::Path};
|
use std::{collections::HashMap, path::Path};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
use yaml_rust::{Yaml, YamlLoader, yaml::Hash};
|
use yaml_rust::{yaml::Hash, Yaml, YamlLoader};
|
||||||
|
|
||||||
pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
|
pub fn load(config_dir: &Path) -> Result<HashMap<String, Hash>> {
|
||||||
if !legacy_config_dir.is_dir() {
|
if !config_dir.is_dir() {
|
||||||
return Err(LoadError::NotDirectory.into());
|
return Err(LoadError::NotDirectory.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut input_files = HashMap::new();
|
let mut input_files = HashMap::new();
|
||||||
|
|
||||||
for entry in WalkDir::new(legacy_config_dir) {
|
for entry in WalkDir::new(config_dir) {
|
||||||
match entry {
|
match entry {
|
||||||
Ok(entry) => {
|
Ok(entry) => {
|
||||||
// Skip directories
|
// Skip directories
|
||||||
|
@ -50,7 +50,7 @@ pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
match entry.path().strip_prefix(legacy_config_dir) {
|
match entry.path().strip_prefix(config_dir) {
|
||||||
Ok(relative_path) => {
|
Ok(relative_path) => {
|
||||||
let corrected_path = relative_path.to_slash_lossy();
|
let corrected_path = relative_path.to_slash_lossy();
|
||||||
|
|
||||||
|
@ -59,30 +59,40 @@ pub fn load(legacy_config_dir: &Path) -> Result<HashMap<String, Hash>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
match std::fs::read_to_string(entry.path()) {
|
match std::fs::read_to_string(entry.path()) {
|
||||||
Ok(content) => match YamlLoader::load_from_str(&content) {
|
Ok(content) => {
|
||||||
Ok(mut yaml) => {
|
// Empty files are not valid YAML, but we want to handle them anyway
|
||||||
if !yaml.is_empty() {
|
if content.trim().is_empty() {
|
||||||
let yaml = yaml.remove(0);
|
input_files.insert(corrected_path, Hash::new());
|
||||||
if let Yaml::Hash(hash) = yaml {
|
} else {
|
||||||
input_files.insert(corrected_path, hash);
|
match YamlLoader::load_from_str(&content) {
|
||||||
} else {
|
Ok(mut yaml) => {
|
||||||
eprintln!("yaml file does not have a valid format: {}", entry.path().display());
|
if !yaml.is_empty() {
|
||||||
|
let yaml = yaml.remove(0);
|
||||||
|
if let Yaml::Hash(hash) = yaml {
|
||||||
|
input_files.insert(corrected_path, hash);
|
||||||
|
} else {
|
||||||
|
eprintln!(
|
||||||
|
"yaml file does not have a valid format: {}",
|
||||||
|
entry.path().display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
eprintln!(
|
||||||
|
"error, found empty document while reading entry: {}",
|
||||||
|
entry.path().display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!(
|
||||||
|
"experienced error while parsing file: {}, error: {}",
|
||||||
|
entry.path().display(),
|
||||||
|
err
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"error, found empty document while reading entry: {}",
|
|
||||||
entry.path().display()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
}
|
||||||
eprintln!(
|
|
||||||
"experienced error while parsing file: {}, error: {}",
|
|
||||||
entry.path().display(),
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"error while reading entry: {}, error: {}",
|
"error while reading entry: {}, error: {}",
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
filter_title: "Disabled program"
|
filter_title: "Disabled program"
|
||||||
|
|
||||||
enabled: false
|
enable: false
|
|
@ -1,6 +1,6 @@
|
||||||
global_vars:
|
global_vars:
|
||||||
- name: "name"
|
- name: "name"
|
||||||
type: "echo"
|
type: "dummy"
|
||||||
params:
|
params:
|
||||||
echo: "John"
|
echo: "John"
|
||||||
|
|
||||||
|
|
6
espanso-migrate/test/simple/expected/match/base.yml
Normal file
6
espanso-migrate/test/simple/expected/match/base.yml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
matches:
|
||||||
|
- name: ":hi"
|
||||||
|
trigger: "Hello"
|
||||||
|
|
||||||
|
- name: ":greet"
|
||||||
|
trigger: "Hi {{name}}"
|
6
espanso-migrate/test/simple/legacy/default.yml
Normal file
6
espanso-migrate/test/simple/legacy/default.yml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
matches:
|
||||||
|
- name: ":hi"
|
||||||
|
trigger: "Hello"
|
||||||
|
|
||||||
|
- name: ":greet"
|
||||||
|
trigger: "Hi {{name}}"
|
Loading…
Reference in New Issue
Block a user