feat(config): refactor config handler to also return warnings and errors

This commit is contained in:
Federico Terzi 2021-07-18 12:10:56 +02:00
parent c0d08bf1bd
commit aec2425b0b
12 changed files with 682 additions and 287 deletions

View File

@ -30,6 +30,8 @@ pub(crate) mod store;
#[cfg(test)] #[cfg(test)]
use mockall::{automock, predicate::*}; use mockall::{automock, predicate::*};
use crate::error::NonFatalErrorSet;
#[cfg_attr(test, automock)] #[cfg_attr(test, automock)]
pub trait Config: Send { pub trait Config: Send {
fn id(&self) -> i32; fn id(&self) -> i32;
@ -141,7 +143,7 @@ pub enum ToggleKey {
LeftMeta, LeftMeta,
} }
pub fn load_store(config_dir: &Path) -> Result<impl ConfigStore> { pub fn load_store(config_dir: &Path) -> Result<(impl ConfigStore, Vec<NonFatalErrorSet>)> {
store::DefaultConfigStore::load(config_dir) store::DefaultConfigStore::load(config_dir)
} }

View File

@ -17,8 +17,10 @@
* along with espanso. If not, see <https://www.gnu.org/licenses/>. * along with espanso. If not, see <https://www.gnu.org/licenses/>.
*/ */
use crate::error::NonFatalErrorSet;
use super::{resolve::ResolvedConfig, Config, ConfigStore, ConfigStoreError}; use super::{resolve::ResolvedConfig, Config, ConfigStore, ConfigStoreError};
use anyhow::Result; use anyhow::{Context, Result};
use log::{debug, error}; use log::{debug, error};
use std::{collections::HashSet, path::Path}; use std::{collections::HashSet, path::Path};
@ -67,8 +69,7 @@ impl ConfigStore for DefaultConfigStore {
} }
impl DefaultConfigStore { impl DefaultConfigStore {
// TODO: test pub fn load(config_dir: &Path) -> Result<(Self, Vec<NonFatalErrorSet>)> {
pub fn load(config_dir: &Path) -> Result<Self> {
if !config_dir.is_dir() { if !config_dir.is_dir() {
return Err(ConfigStoreError::InvalidConfigDir().into()); return Err(ConfigStoreError::InvalidConfigDir().into());
} }
@ -78,7 +79,11 @@ impl DefaultConfigStore {
if !default_file.exists() || !default_file.is_file() { if !default_file.exists() || !default_file.is_file() {
return Err(ConfigStoreError::MissingDefault().into()); return Err(ConfigStoreError::MissingDefault().into());
} }
let default = ResolvedConfig::load(&default_file, None)?;
let mut non_fatal_errors = Vec::new();
let default =
ResolvedConfig::load(&default_file, None).context("failed to load default configuration")?;
debug!("loaded default config at path: {:?}", default_file); debug!("loaded default config at path: {:?}", default_file);
// Then the others // Then the others
@ -107,15 +112,19 @@ impl DefaultConfigStore {
"unable to load config at path: {:?}, with error: {}", "unable to load config at path: {:?}, with error: {}",
config_file, err config_file, err
); );
non_fatal_errors.push(NonFatalErrorSet::single_error(&config_file, err));
} }
} }
} }
} }
Ok(Self { Ok((
Self {
default: Box::new(default), default: Box::new(default),
customs, customs,
}) },
non_fatal_errors,
))
} }
pub fn from_configs(default: Box<dyn Config>, customs: Vec<Box<dyn Config>>) -> Result<Self> { pub fn from_configs(default: Box<dyn Config>, customs: Vec<Box<dyn Config>>) -> Result<Self> {

View File

@ -0,0 +1,71 @@
/*
* This file is part of espanso.
*
* Copyright (C) 2019-2021 Federico Terzi
*
* espanso is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* espanso is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with espanso. If not, see <https://www.gnu.org/licenses/>.
*/
use std::path::{Path, PathBuf};
use anyhow::Error;
#[derive(Debug)]
pub struct NonFatalErrorSet {
pub file: PathBuf,
pub errors: Vec<ErrorRecord>,
}
impl NonFatalErrorSet {
pub fn new(file: &Path, non_fatal_errors: Vec<ErrorRecord>) -> Self {
Self {
file: file.to_owned(),
errors: non_fatal_errors,
}
}
pub fn single_error(file: &Path, error: Error) -> Self {
Self {
file: file.to_owned(),
errors: vec![ErrorRecord::error(error)],
}
}
}
#[derive(Debug)]
pub struct ErrorRecord {
pub level: ErrorLevel,
pub error: Error,
}
impl ErrorRecord {
pub fn error(error: Error) -> Self {
Self {
level: ErrorLevel::Error,
error,
}
}
pub fn warn(error: Error) -> Self {
Self {
level: ErrorLevel::Warning,
error,
}
}
}
#[derive(Debug)]
pub enum ErrorLevel {
Error,
Warning,
}

View File

@ -18,14 +18,12 @@
*/ */
use anyhow::Result; use anyhow::Result;
use log::warn;
use regex::Regex; use regex::Regex;
use std::{collections::HashMap, path::Path}; use std::{collections::HashMap, path::Path};
use self::config::LegacyConfig; use self::config::LegacyConfig;
use crate::matches::{ use crate::matches::{MatchEffect, group::loader::yaml::{parse::{YAMLMatch, YAMLVariable}, try_convert_into_match, try_convert_into_variable}};
group::loader::yaml::parse::{YAMLMatch, YAMLVariable},
MatchEffect,
};
use crate::{config::store::DefaultConfigStore, counter::StructId}; use crate::{config::store::DefaultConfigStore, counter::StructId};
use crate::{ use crate::{
config::Config, config::Config,
@ -85,7 +83,10 @@ fn split_config(config: LegacyConfig) -> (LegacyInteropConfig, LegacyMatchGroup)
.iter() .iter()
.filter_map(|var| { .filter_map(|var| {
let var: YAMLVariable = serde_yaml::from_value(var.clone()).ok()?; let var: YAMLVariable = serde_yaml::from_value(var.clone()).ok()?;
let var: Variable = var.try_into().ok()?; let (var, warnings) = try_convert_into_variable(var).ok()?;
warnings.into_iter().for_each(|warning| {
warn!("{}", warning);
});
Some(var) Some(var)
}) })
.collect(); .collect();
@ -95,7 +96,10 @@ fn split_config(config: LegacyConfig) -> (LegacyInteropConfig, LegacyMatchGroup)
.iter() .iter()
.filter_map(|var| { .filter_map(|var| {
let m: YAMLMatch = serde_yaml::from_value(var.clone()).ok()?; let m: YAMLMatch = serde_yaml::from_value(var.clone()).ok()?;
let m: Match = m.try_into().ok()?; let (m, warnings) = try_convert_into_match(m).ok()?;
warnings.into_iter().for_each(|warning| {
warn!("{}", warning);
});
Some(m) Some(m)
}) })
.collect(); .collect();
@ -376,6 +380,10 @@ impl MatchStore for LegacyMatchStore {
} }
} }
} }
fn loaded_paths(&self) -> Vec<String> {
self.groups.keys().map(|key| key.clone()).collect()
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -28,22 +28,27 @@ extern crate lazy_static;
pub mod config; pub mod config;
mod counter; mod counter;
pub mod error;
mod legacy; mod legacy;
pub mod matches; pub mod matches;
mod util; mod util;
pub fn load(base_path: &Path) -> Result<(Box<dyn ConfigStore>, Box<dyn MatchStore>)> { pub fn load(base_path: &Path) -> Result<(Box<dyn ConfigStore>, Box<dyn MatchStore>, Vec<error::NonFatalErrorSet>)> {
let config_dir = base_path.join("config"); let config_dir = base_path.join("config");
if !config_dir.exists() || !config_dir.is_dir() { if !config_dir.exists() || !config_dir.is_dir() {
return Err(ConfigError::MissingConfigDir().into()); return Err(ConfigError::MissingConfigDir().into());
} }
let config_store = config::load_store(&config_dir)?; let (config_store, non_fatal_config_errors) = config::load_store(&config_dir)?;
let root_paths = config_store.get_all_match_paths(); let root_paths = config_store.get_all_match_paths();
let match_store = matches::store::new(&root_paths.into_iter().collect::<Vec<String>>()); let (match_store, non_fatal_match_errors) = matches::store::load(&root_paths.into_iter().collect::<Vec<String>>());
Ok((Box::new(config_store), Box::new(match_store))) let mut non_fatal_errors = Vec::new();
non_fatal_errors.extend(non_fatal_config_errors.into_iter());
non_fatal_errors.extend(non_fatal_match_errors.into_iter());
Ok((Box::new(config_store), Box::new(match_store), non_fatal_errors))
} }
pub fn load_legacy(config_dir: &Path, package_dir: &Path) -> Result<(Box<dyn ConfigStore>, Box<dyn MatchStore>)> { pub fn load_legacy(config_dir: &Path, package_dir: &Path) -> Result<(Box<dyn ConfigStore>, Box<dyn MatchStore>)> {
@ -120,8 +125,9 @@ mod tests {
) )
.unwrap(); .unwrap();
let (config_store, match_store) = load(&base).unwrap(); let (config_store, match_store, errors) = load(&base).unwrap();
assert_eq!(errors.len(), 0);
assert_eq!(config_store.default().match_paths().len(), 2); assert_eq!(config_store.default().match_paths().len(), 2);
assert_eq!( assert_eq!(
config_store config_store
@ -160,6 +166,130 @@ mod tests {
}); });
} }
#[test]
fn load_non_fatal_errors() {
use_test_directory(|base, match_dir, config_dir| {
let base_file = match_dir.join("base.yml");
std::fs::write(
&base_file,
r#"
matches:
- "invalid"invalid
"#,
)
.unwrap();
let another_file = match_dir.join("another.yml");
std::fs::write(
&another_file,
r#"
imports:
- "_sub.yml"
matches:
- trigger: "hello2"
replace: "world2"
"#,
)
.unwrap();
let under_file = match_dir.join("_sub.yml");
std::fs::write(
&under_file,
r#"
matches:
- trigger: "hello3"
replace: "world3"invalid
"#,
)
.unwrap();
let config_file = config_dir.join("default.yml");
std::fs::write(&config_file, r#""#).unwrap();
let custom_config_file = config_dir.join("custom.yml");
std::fs::write(
&custom_config_file,
r#"
filter_title: "Chrome"
"
use_standard_includes: false
includes: ["../match/another.yml"]
"#,
)
.unwrap();
let (config_store, match_store, errors) = load(&base).unwrap();
assert_eq!(errors.len(), 3);
// It shouldn't have loaded the "config.yml" one because of the YAML error
assert_eq!(config_store.configs().len(), 1);
// It shouldn't load "base.yml" and "_sub.yml" due to YAML errors
assert_eq!(match_store.loaded_paths().len(), 1);
});
}
#[test]
fn load_non_fatal_match_errors() {
use_test_directory(|base, match_dir, config_dir| {
let base_file = match_dir.join("base.yml");
std::fs::write(
&base_file,
r#"
matches:
- trigger: "hello"
replace: "world"
- trigger: "invalid because there is no action field"
"#,
)
.unwrap();
let config_file = config_dir.join("default.yml");
std::fs::write(&config_file, r#""#).unwrap();
let (config_store, match_store, errors) = load(&base).unwrap();
assert_eq!(errors.len(), 1);
assert_eq!(errors[0].file, base_file);
assert_eq!(errors[0].errors.len(), 1);
assert_eq!(
match_store
.query(config_store.default().match_paths())
.matches
.len(),
1
);
});
}
#[test]
fn load_fatal_errors() {
use_test_directory(|base, match_dir, config_dir| {
let base_file = match_dir.join("base.yml");
std::fs::write(
&base_file,
r#"
matches:
- trigger: hello
replace: world
"#,
)
.unwrap();
let config_file = config_dir.join("default.yml");
std::fs::write(&config_file, r#"
invalid
"
"#).unwrap();
// A syntax error in the default.yml file cannot be handled gracefully
assert!(load(&base).is_err());
});
}
#[test] #[test]
fn load_without_valid_config_dir() { fn load_without_valid_config_dir() {
use_test_directory(|_, match_dir, _| { use_test_directory(|_, match_dir, _| {

View File

@ -21,6 +21,8 @@ use anyhow::Result;
use std::path::Path; use std::path::Path;
use thiserror::Error; use thiserror::Error;
use crate::error::NonFatalErrorSet;
use self::yaml::YAMLImporter; use self::yaml::YAMLImporter;
use super::MatchGroup; use super::MatchGroup;
@ -29,14 +31,14 @@ pub(crate) mod yaml;
trait Importer { trait Importer {
fn is_supported(&self, extension: &str) -> bool; fn is_supported(&self, extension: &str) -> bool;
fn load_group(&self, path: &Path) -> Result<MatchGroup>; fn load_group(&self, path: &Path) -> Result<(MatchGroup, Option<NonFatalErrorSet>)>;
} }
lazy_static! { lazy_static! {
static ref IMPORTERS: Vec<Box<dyn Importer + Sync + Send>> = vec![Box::new(YAMLImporter::new()),]; static ref IMPORTERS: Vec<Box<dyn Importer + Sync + Send>> = vec![Box::new(YAMLImporter::new()),];
} }
pub(crate) fn load_match_group(path: &Path) -> Result<MatchGroup> { pub(crate) fn load_match_group(path: &Path) -> Result<(MatchGroup, Option<NonFatalErrorSet>)> {
if let Some(extension) = path.extension() { if let Some(extension) = path.extension() {
let extension = extension.to_string_lossy().to_lowercase(); let extension = extension.to_string_lossy().to_lowercase();
@ -46,25 +48,25 @@ pub(crate) fn load_match_group(path: &Path) -> Result<MatchGroup> {
match importer { match importer {
Some(importer) => match importer.load_group(path) { Some(importer) => match importer.load_group(path) {
Ok(group) => Ok(group), Ok((group, non_fatal_error_set)) => Ok((group, non_fatal_error_set)),
Err(err) => Err(LoadError::ParsingError(err).into()), Err(err) => Err(LoadError::ParsingError(err).into()),
}, },
None => Err(LoadError::InvalidFormat().into()), None => Err(LoadError::InvalidFormat.into()),
} }
} else { } else {
Err(LoadError::MissingExtension().into()) Err(LoadError::MissingExtension.into())
} }
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum LoadError { pub enum LoadError {
#[error("missing extension in match group file")] #[error("missing extension in match group file")]
MissingExtension(), MissingExtension,
#[error("invalid match group format")] #[error("invalid match group format")]
InvalidFormat(), InvalidFormat,
#[error("parser reported an error: `{0}`")] #[error(transparent)]
ParsingError(anyhow::Error), ParsingError(anyhow::Error),
} }
@ -84,7 +86,7 @@ mod tests {
.unwrap_err() .unwrap_err()
.downcast::<LoadError>() .downcast::<LoadError>()
.unwrap(), .unwrap(),
LoadError::InvalidFormat() LoadError::InvalidFormat
)); ));
}); });
} }
@ -100,7 +102,7 @@ mod tests {
.unwrap_err() .unwrap_err()
.downcast::<LoadError>() .downcast::<LoadError>()
.unwrap(), .unwrap(),
LoadError::MissingExtension() LoadError::MissingExtension
)); ));
}); });
} }
@ -135,7 +137,7 @@ mod tests {
) )
.unwrap(); .unwrap();
assert_eq!(load_match_group(&file).unwrap().matches.len(), 1); assert_eq!(load_match_group(&file).unwrap().0.matches.len(), 1);
}); });
} }
@ -153,7 +155,7 @@ mod tests {
) )
.unwrap(); .unwrap();
assert_eq!(load_match_group(&file).unwrap().matches.len(), 1); assert_eq!(load_match_group(&file).unwrap().0.matches.len(), 1);
}); });
} }
@ -171,7 +173,7 @@ mod tests {
) )
.unwrap(); .unwrap();
assert_eq!(load_match_group(&file).unwrap().matches.len(), 1); assert_eq!(load_match_group(&file).unwrap().0.matches.len(), 1);
}); });
} }
} }

View File

@ -19,17 +19,16 @@
use crate::{ use crate::{
counter::next_id, counter::next_id,
error::{ErrorRecord, NonFatalErrorSet},
matches::{ matches::{
group::{path::resolve_imports, MatchGroup}, group::{path::resolve_imports, MatchGroup},
ImageEffect, Match, Params, RegexCause, TextFormat, TextInjectMode, UpperCasingStyle, Value, ImageEffect, Match, Params, RegexCause, TextFormat, TextInjectMode, UpperCasingStyle, Value,
Variable, Variable,
}, },
}; };
use anyhow::Result; use anyhow::{anyhow, bail, Context, Result};
use log::{error, warn};
use parse::YAMLMatchGroup; use parse::YAMLMatchGroup;
use regex::{Captures, Regex}; use regex::{Captures, Regex};
use std::convert::{TryFrom, TryInto};
use self::{ use self::{
parse::{YAMLMatch, YAMLVariable}, parse::{YAMLMatch, YAMLVariable},
@ -46,6 +45,9 @@ lazy_static! {
static ref VAR_REGEX: Regex = Regex::new("\\{\\{\\s*(\\w+)(\\.\\w+)?\\s*\\}\\}").unwrap(); static ref VAR_REGEX: Regex = Regex::new("\\{\\{\\s*(\\w+)(\\.\\w+)?\\s*\\}\\}").unwrap();
} }
// Create an alias to make the meaning more explicit
type Warning = anyhow::Error;
pub(crate) struct YAMLImporter {} pub(crate) struct YAMLImporter {}
impl YAMLImporter { impl YAMLImporter {
@ -62,44 +64,68 @@ impl Importer for YAMLImporter {
fn load_group( fn load_group(
&self, &self,
path: &std::path::Path, path: &std::path::Path,
) -> anyhow::Result<crate::matches::group::MatchGroup> { ) -> anyhow::Result<(crate::matches::group::MatchGroup, Option<NonFatalErrorSet>)> {
let yaml_group = YAMLMatchGroup::parse_from_file(path)?; let yaml_group =
YAMLMatchGroup::parse_from_file(path).context("failed to parse YAML match group")?;
let global_vars: Result<Vec<Variable>> = yaml_group let mut non_fatal_errors = Vec::new();
.global_vars
.as_ref()
.cloned()
.unwrap_or_default()
.iter()
.map(|var| var.clone().try_into())
.collect();
let matches: Result<Vec<Match>> = yaml_group let mut global_vars = Vec::new();
.matches for yaml_global_var in yaml_group.global_vars.as_ref().cloned().unwrap_or_default() {
.as_ref() match try_convert_into_variable(yaml_global_var) {
.cloned() Ok((var, warnings)) => {
.unwrap_or_default() global_vars.push(var);
.iter() non_fatal_errors.extend(warnings.into_iter().map(ErrorRecord::warn));
.map(|m| m.clone().try_into()) }
.collect(); Err(err) => {
non_fatal_errors.push(ErrorRecord::error(err));
}
}
}
let mut matches = Vec::new();
for yaml_match in yaml_group.matches.as_ref().cloned().unwrap_or_default() {
match try_convert_into_match(yaml_match) {
Ok((m, warnings)) => {
matches.push(m);
non_fatal_errors.extend(warnings.into_iter().map(ErrorRecord::warn));
}
Err(err) => {
non_fatal_errors.push(ErrorRecord::error(err));
}
}
}
// Resolve imports // Resolve imports
let resolved_imports = resolve_imports(path, &yaml_group.imports.unwrap_or_default())?; let (resolved_imports, import_errors) =
resolve_imports(path, &yaml_group.imports.unwrap_or_default())
.context("failed to resolve YAML match group imports")?;
non_fatal_errors.extend(import_errors);
Ok(MatchGroup { let non_fatal_error_set = if !non_fatal_errors.is_empty() {
Some(NonFatalErrorSet::new(path, non_fatal_errors))
} else {
None
};
Ok((
MatchGroup {
imports: resolved_imports, imports: resolved_imports,
global_vars: global_vars?, global_vars: global_vars,
matches: matches?, matches: matches,
}) },
non_fatal_error_set,
))
} }
} }
impl TryFrom<YAMLMatch> for Match { pub fn try_convert_into_match(yaml_match: YAMLMatch) -> Result<(Match, Vec<Warning>)> {
type Error = anyhow::Error; let mut warnings = Vec::new();
fn try_from(yaml_match: YAMLMatch) -> Result<Self, Self::Error> {
if yaml_match.uppercase_style.is_some() && yaml_match.propagate_case.is_none() { if yaml_match.uppercase_style.is_some() && yaml_match.propagate_case.is_none() {
warn!("specifying the 'uppercase_style' option without 'propagate_case' has no effect"); warnings.push(anyhow!(
"specifying the 'uppercase_style' option without 'propagate_case' has no effect"
));
} }
let triggers = if let Some(trigger) = yaml_match.trigger { let triggers = if let Some(trigger) = yaml_match.trigger {
@ -119,10 +145,10 @@ impl TryFrom<YAMLMatch> for Match {
Some("capitalize") => UpperCasingStyle::Capitalize, Some("capitalize") => UpperCasingStyle::Capitalize,
Some("capitalize_words") => UpperCasingStyle::CapitalizeWords, Some("capitalize_words") => UpperCasingStyle::CapitalizeWords,
Some(style) => { Some(style) => {
error!( warnings.push(anyhow!(
"unrecognized uppercase_style: {:?}, falling back to the default", "unrecognized uppercase_style: {:?}, falling back to the default",
style style
); ));
TriggerCause::default().uppercase_style TriggerCause::default().uppercase_style
} }
_ => TriggerCause::default().uppercase_style, _ => TriggerCause::default().uppercase_style,
@ -165,8 +191,7 @@ impl TryFrom<YAMLMatch> for Match {
}; };
let effect = let effect =
if yaml_match.replace.is_some() || yaml_match.markdown.is_some() || yaml_match.html.is_some() if yaml_match.replace.is_some() || yaml_match.markdown.is_some() || yaml_match.html.is_some() {
{
// TODO: test markdown and html cases // TODO: test markdown and html cases
let (replace, format) = if let Some(plain) = yaml_match.replace { let (replace, format) = if let Some(plain) = yaml_match.replace {
(plain, TextFormat::Plain) (plain, TextFormat::Plain)
@ -178,16 +203,17 @@ impl TryFrom<YAMLMatch> for Match {
unreachable!(); unreachable!();
}; };
let vars: Result<Vec<Variable>> = yaml_match let mut vars: Vec<Variable> = Vec::new();
.vars for yaml_var in yaml_match.vars.unwrap_or_default() {
.unwrap_or_default() let (var, var_warnings) = try_convert_into_variable(yaml_var.clone())
.into_iter() .with_context(|| format!("failed to load variable: {:?}", yaml_var))?;
.map(|var| var.try_into()) warnings.extend(var_warnings);
.collect(); vars.push(var);
}
MatchEffect::Text(TextEffect { MatchEffect::Text(TextEffect {
replace, replace,
vars: vars?, vars,
format, format,
force_mode, force_mode,
}) })
@ -233,32 +259,33 @@ impl TryFrom<YAMLMatch> for Match {
}; };
if let MatchEffect::None = effect { if let MatchEffect::None = effect {
warn!( bail!(
"match caused by {:?} does not produce any effect. Did you forget the 'replace' field?", "match triggered by {:?} does not produce any effect. Did you forget the 'replace' field?",
cause cause.long_description()
); );
} }
Ok(Self { Ok((
Match {
cause, cause,
effect, effect,
label: None, label: None,
id: next_id(), id: next_id(),
}) },
} warnings,
))
} }
impl TryFrom<YAMLVariable> for Variable { pub fn try_convert_into_variable(yaml_var: YAMLVariable) -> Result<(Variable, Vec<Warning>)> {
type Error = anyhow::Error; Ok((
Variable {
fn try_from(yaml_var: YAMLVariable) -> Result<Self, Self::Error> {
Ok(Self {
name: yaml_var.name, name: yaml_var.name,
var_type: yaml_var.var_type, var_type: yaml_var.var_type,
params: convert_params(yaml_var.params)?, params: convert_params(yaml_var.params)?,
id: next_id(), id: next_id(),
}) },
} Vec::new(),
))
} }
#[cfg(test)] #[cfg(test)]
@ -270,9 +297,9 @@ mod tests {
}; };
use std::fs::create_dir_all; use std::fs::create_dir_all;
fn create_match(yaml: &str) -> Result<Match> { fn create_match_with_warnings(yaml: &str) -> Result<(Match, Vec<Warning>)> {
let yaml_match: YAMLMatch = serde_yaml::from_str(yaml)?; let yaml_match: YAMLMatch = serde_yaml::from_str(yaml)?;
let mut m: Match = yaml_match.try_into()?; let (mut m, warnings) = try_convert_into_match(yaml_match)?;
// Reset the IDs to correctly compare them // Reset the IDs to correctly compare them
m.id = 0; m.id = 0;
@ -280,6 +307,14 @@ mod tests {
e.vars.iter_mut().for_each(|v| v.id = 0); e.vars.iter_mut().for_each(|v| v.id = 0);
} }
Ok((m, warnings))
}
fn create_match(yaml: &str) -> Result<Match> {
let (m, warnings) = create_match_with_warnings(yaml)?;
if !warnings.is_empty() {
panic!("warnings were detected but not handled: {:?}", warnings);
}
Ok(m) Ok(m)
} }
@ -444,6 +479,7 @@ mod tests {
trigger: "Hello" trigger: "Hello"
replace: "world" replace: "world"
uppercase_style: "capitalize" uppercase_style: "capitalize"
propagate_case: true
"# "#
) )
.unwrap() .unwrap()
@ -460,6 +496,7 @@ mod tests {
trigger: "Hello" trigger: "Hello"
replace: "world" replace: "world"
uppercase_style: "capitalize_words" uppercase_style: "capitalize_words"
propagate_case: true
"# "#
) )
.unwrap() .unwrap()
@ -476,6 +513,7 @@ mod tests {
trigger: "Hello" trigger: "Hello"
replace: "world" replace: "world"
uppercase_style: "uppercase" uppercase_style: "uppercase"
propagate_case: true
"# "#
) )
.unwrap() .unwrap()
@ -486,21 +524,36 @@ mod tests {
UpperCasingStyle::Uppercase, UpperCasingStyle::Uppercase,
); );
// Invalid without propagate_case
let (m, warnings) = create_match_with_warnings(
r#"
trigger: "Hello"
replace: "world"
uppercase_style: "capitalize"
"#,
)
.unwrap();
assert_eq!( assert_eq!(
create_match( m.cause.into_trigger().unwrap().uppercase_style,
UpperCasingStyle::Capitalize,
);
assert_eq!(warnings.len(), 1);
// Invalid style
let (m, warnings) = create_match_with_warnings(
r#" r#"
trigger: "Hello" trigger: "Hello"
replace: "world" replace: "world"
uppercase_style: "invalid" uppercase_style: "invalid"
"# propagate_case: true
"#,
) )
.unwrap() .unwrap();
.cause assert_eq!(
.into_trigger() m.cause.into_trigger().unwrap().uppercase_style,
.unwrap()
.uppercase_style,
UpperCasingStyle::Uppercase, UpperCasingStyle::Uppercase,
); );
assert_eq!(warnings.len(), 1);
} }
#[test] #[test]
@ -612,7 +665,10 @@ mod tests {
std::fs::write(&sub_file, "").unwrap(); std::fs::write(&sub_file, "").unwrap();
let importer = YAMLImporter::new(); let importer = YAMLImporter::new();
let mut group = importer.load_group(&base_file).unwrap(); let (mut group, non_fatal_error_set) = importer.load_group(&base_file).unwrap();
// The invalid import path should be reported as error
assert_eq!(non_fatal_error_set.unwrap().errors.len(), 1);
// Reset the ids to compare them correctly // Reset the ids to compare them correctly
group.matches.iter_mut().for_each(|mut m| m.id = 0); group.matches.iter_mut().for_each(|mut m| m.id = 0);
group.global_vars.iter_mut().for_each(|mut v| v.id = 0); group.global_vars.iter_mut().for_each(|mut v| v.id = 0);
@ -644,4 +700,23 @@ mod tests {
) )
}); });
} }
#[test]
fn importer_invalid_syntax() {
use_test_directory(|_, match_dir, _| {
let base_file = match_dir.join("base.yml");
std::fs::write(
&base_file,
r#"
imports:
- invalid
- indentation
"#,
)
.unwrap();
let importer = YAMLImporter::new();
assert!(importer.load_group(&base_file).is_err());
})
}
} }

View File

@ -20,6 +20,8 @@
use anyhow::Result; use anyhow::Result;
use std::path::Path; use std::path::Path;
use crate::error::NonFatalErrorSet;
use super::{Match, Variable}; use super::{Match, Variable};
pub(crate) mod loader; pub(crate) mod loader;
@ -44,7 +46,7 @@ impl Default for MatchGroup {
impl MatchGroup { impl MatchGroup {
// TODO: test // TODO: test
pub fn load(group_path: &Path) -> Result<Self> { pub fn load(group_path: &Path) -> Result<(Self, Option<NonFatalErrorSet>)> {
loader::load_match_group(group_path) loader::load_match_group(group_path)
} }
} }

View File

@ -17,12 +17,16 @@
* along with espanso. If not, see <https://www.gnu.org/licenses/>. * along with espanso. If not, see <https://www.gnu.org/licenses/>.
*/ */
use anyhow::Result; use anyhow::{anyhow, Context, Result};
use log::error;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use thiserror::Error; use thiserror::Error;
pub fn resolve_imports(group_path: &Path, imports: &[String]) -> Result<Vec<String>> { use crate::error::ErrorRecord;
pub fn resolve_imports(
group_path: &Path,
imports: &[String],
) -> Result<(Vec<String>, Vec<ErrorRecord>)> {
let mut paths = Vec::new(); let mut paths = Vec::new();
// Get the containing directory // Get the containing directory
@ -42,6 +46,8 @@ pub fn resolve_imports(group_path: &Path, imports: &[String]) -> Result<Vec<Stri
group_path group_path
}; };
let mut non_fatal_errors = Vec::new();
for import in imports.iter() { for import in imports.iter() {
let import_path = PathBuf::from(import); let import_path = PathBuf::from(import);
@ -52,21 +58,21 @@ pub fn resolve_imports(group_path: &Path, imports: &[String]) -> Result<Vec<Stri
import_path import_path
}; };
match dunce::canonicalize(&full_path) { match dunce::canonicalize(&full_path)
.with_context(|| format!("unable to canonicalize import path: {:?}", full_path))
{
Ok(canonical_path) => { Ok(canonical_path) => {
if canonical_path.exists() && canonical_path.is_file() { if canonical_path.exists() && canonical_path.is_file() {
paths.push(canonical_path) paths.push(canonical_path)
} else { } else {
// Best effort imports // Best effort imports
error!("unable to resolve import at path: {:?}", canonical_path); non_fatal_errors.push(ErrorRecord::error(anyhow!(
"unable to resolve import at path: {:?}",
canonical_path
)))
} }
} }
Err(error) => { Err(error) => non_fatal_errors.push(ErrorRecord::error(error)),
error!(
"unable to canonicalize import path: {:?}, with error: {}",
full_path, error
);
}
} }
} }
@ -75,7 +81,7 @@ pub fn resolve_imports(group_path: &Path, imports: &[String]) -> Result<Vec<Stri
.map(|path| path.to_string_lossy().to_string()) .map(|path| path.to_string_lossy().to_string())
.collect(); .collect();
Ok(string_paths) Ok((string_paths, non_fatal_errors))
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
@ -115,14 +121,19 @@ pub mod tests {
"sub/invalid.yml".to_string(), // Should be skipped "sub/invalid.yml".to_string(), // Should be skipped
]; ];
let (resolved_imports, errors) = resolve_imports(&base_file, &imports).unwrap();
assert_eq!( assert_eq!(
resolve_imports(&base_file, &imports).unwrap(), resolved_imports,
vec![ vec![
another_file.to_string_lossy().to_string(), another_file.to_string_lossy().to_string(),
sub_file.to_string_lossy().to_string(), sub_file.to_string_lossy().to_string(),
absolute_file.to_string_lossy().to_string(), absolute_file.to_string_lossy().to_string(),
] ]
); );
// The "sub/invalid.yml" should generate an error
assert_eq!(errors.len(), 1);
}); });
} }
@ -140,10 +151,14 @@ pub mod tests {
let imports = vec!["../base.yml".to_string()]; let imports = vec!["../base.yml".to_string()];
let (resolved_imports, errors) = resolve_imports(&sub_file, &imports).unwrap();
assert_eq!( assert_eq!(
resolve_imports(&sub_file, &imports).unwrap(), resolved_imports,
vec![base_file.to_string_lossy().to_string(),] vec![base_file.to_string_lossy().to_string(),]
); );
assert_eq!(errors.len(), 0);
}); });
} }
} }

View File

@ -64,13 +64,7 @@ impl Match {
// TODO: test // TODO: test
pub fn cause_description<'a>(&'a self) -> Option<&'a str> { pub fn cause_description<'a>(&'a self) -> Option<&'a str> {
if let MatchCause::Trigger(trigger_cause) = &self.cause { self.cause.description()
trigger_cause.triggers.first().map(|s| s.as_str())
} else {
None
}
// TODO: insert rendering for hotkey/shortcut
// TODO: insert rendering for regex? I'm worried it might be too long
} }
} }
@ -84,6 +78,30 @@ pub enum MatchCause {
// TODO: shortcut // TODO: shortcut
} }
impl MatchCause {
// TODO: test
pub fn description<'a>(&'a self) -> Option<&'a str> {
if let MatchCause::Trigger(trigger_cause) = &self {
trigger_cause.triggers.first().map(|s| s.as_str())
} else {
None
}
// TODO: insert rendering for hotkey/shortcut
// TODO: insert rendering for regex? I'm worried it might be too long
}
// TODO: test
pub fn long_description<'a>(&'a self) -> String {
if let MatchCause::Trigger(trigger_cause) = &self {
format!("triggers: {:?}", trigger_cause.triggers)
} else {
"No description available".to_owned()
}
// TODO: insert rendering for hotkey/shortcut
// TODO: insert rendering for regex? I'm worried it might be too long
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TriggerCause { pub struct TriggerCause {
pub triggers: Vec<String>, pub triggers: Vec<String>,

View File

@ -17,32 +17,33 @@
* along with espanso. If not, see <https://www.gnu.org/licenses/>. * along with espanso. If not, see <https://www.gnu.org/licenses/>.
*/ */
use log::error;
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
};
use super::{MatchSet, MatchStore}; use super::{MatchSet, MatchStore};
use crate::{ use crate::{
counter::StructId, counter::StructId,
error::NonFatalErrorSet,
matches::{group::MatchGroup, Match, Variable}, matches::{group::MatchGroup, Match, Variable},
}; };
use anyhow::Context;
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
};
pub(crate) struct DefaultMatchStore { pub(crate) struct DefaultMatchStore {
pub groups: HashMap<String, MatchGroup>, pub groups: HashMap<String, MatchGroup>,
} }
impl DefaultMatchStore { impl DefaultMatchStore {
pub fn new(paths: &[String]) -> Self { pub fn load(paths: &[String]) -> (Self, Vec<NonFatalErrorSet>) {
let mut groups = HashMap::new(); let mut groups = HashMap::new();
let mut non_fatal_error_sets = Vec::new();
// Because match groups can imports other match groups, // Because match groups can imports other match groups,
// we have to load them recursively starting from the // we have to load them recursively starting from the
// top-level ones. // top-level ones.
load_match_groups_recursively(&mut groups, paths); load_match_groups_recursively(&mut groups, paths, &mut non_fatal_error_sets);
Self { groups } (Self { groups }, non_fatal_error_sets)
} }
} }
@ -69,21 +70,35 @@ impl MatchStore for DefaultMatchStore {
global_vars, global_vars,
} }
} }
fn loaded_paths(&self) -> Vec<String> {
self.groups.keys().map(|key| key.clone()).collect()
}
} }
fn load_match_groups_recursively(groups: &mut HashMap<String, MatchGroup>, paths: &[String]) { fn load_match_groups_recursively(
groups: &mut HashMap<String, MatchGroup>,
paths: &[String],
non_fatal_error_sets: &mut Vec<NonFatalErrorSet>,
) {
for path in paths.iter() { for path in paths.iter() {
if !groups.contains_key(path) { if !groups.contains_key(path) {
let group_path = PathBuf::from(path); let group_path = PathBuf::from(path);
match MatchGroup::load(&group_path) { match MatchGroup::load(&group_path)
Ok(group) => { .with_context(|| format!("unable to load match group {:?}", group_path))
{
Ok((group, non_fatal_error_set)) => {
let imports = group.imports.clone(); let imports = group.imports.clone();
groups.insert(path.clone(), group); groups.insert(path.clone(), group);
load_match_groups_recursively(groups, &imports); if let Some(non_fatal_error_set) = non_fatal_error_set {
non_fatal_error_sets.push(non_fatal_error_set);
} }
Err(error) => {
error!("unable to load match group: {:?}", error); load_match_groups_recursively(groups, &imports, non_fatal_error_sets);
}
Err(err) => {
non_fatal_error_sets.push(NonFatalErrorSet::single_error(&group_path, err));
} }
} }
} }
@ -221,8 +236,9 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[base_file.to_string_lossy().to_string()]); let (match_store, non_fatal_error_sets) =
DefaultMatchStore::load(&[base_file.to_string_lossy().to_string()]);
assert_eq!(non_fatal_error_sets.len(), 0);
assert_eq!(match_store.groups.len(), 3); assert_eq!(match_store.groups.len(), 3);
let base_group = &match_store let base_group = &match_store
@ -230,11 +246,14 @@ mod tests {
.get(&base_file.to_string_lossy().to_string()) .get(&base_file.to_string_lossy().to_string())
.unwrap() .unwrap()
.matches; .matches;
let base_group: Vec<Match> = base_group.iter().map(|m| { let base_group: Vec<Match> = base_group
.iter()
.map(|m| {
let mut copy = m.clone(); let mut copy = m.clone();
copy.id = 0; copy.id = 0;
copy copy
}).collect(); })
.collect();
assert_eq!(base_group, create_matches(&[("hello", "world")])); assert_eq!(base_group, create_matches(&[("hello", "world")]));
@ -243,11 +262,14 @@ mod tests {
.get(&another_file.to_string_lossy().to_string()) .get(&another_file.to_string_lossy().to_string())
.unwrap() .unwrap()
.matches; .matches;
let another_group: Vec<Match> = another_group.iter().map(|m| { let another_group: Vec<Match> = another_group
.iter()
.map(|m| {
let mut copy = m.clone(); let mut copy = m.clone();
copy.id = 0; copy.id = 0;
copy copy
}).collect(); })
.collect();
assert_eq!( assert_eq!(
another_group, another_group,
create_matches(&[("hello", "world2"), ("foo", "bar")]) create_matches(&[("hello", "world2"), ("foo", "bar")])
@ -258,11 +280,14 @@ mod tests {
.get(&sub_file.to_string_lossy().to_string()) .get(&sub_file.to_string_lossy().to_string())
.unwrap() .unwrap()
.matches; .matches;
let sub_group: Vec<Match> = sub_group.iter().map(|m| { let sub_group: Vec<Match> = sub_group
.iter()
.map(|m| {
let mut copy = m.clone(); let mut copy = m.clone();
copy.id = 0; copy.id = 0;
copy copy
}).collect(); })
.collect();
assert_eq!(sub_group, create_matches(&[("hello", "world3")])); assert_eq!(sub_group, create_matches(&[("hello", "world3")]));
}); });
} }
@ -317,9 +342,11 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[base_file.to_string_lossy().to_string()]); let (match_store, non_fatal_error_sets) =
DefaultMatchStore::load(&[base_file.to_string_lossy().to_string()]);
assert_eq!(match_store.groups.len(), 3); assert_eq!(match_store.groups.len(), 3);
assert_eq!(non_fatal_error_sets.len(), 0);
}); });
} }
@ -378,7 +405,9 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[base_file.to_string_lossy().to_string()]); let (match_store, non_fatal_error_sets) =
DefaultMatchStore::load(&[base_file.to_string_lossy().to_string()]);
assert_eq!(non_fatal_error_sets.len(), 0);
let match_set = match_store.query(&[base_file.to_string_lossy().to_string()]); let match_set = match_store.query(&[base_file.to_string_lossy().to_string()]);
@ -387,7 +416,10 @@ mod tests {
.matches .matches
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut m| { m.id = 0; m }) .map(|mut m| {
m.id = 0;
m
})
.collect::<Vec<Match>>(), .collect::<Vec<Match>>(),
create_matches(&[ create_matches(&[
("hello", "world3"), ("hello", "world3"),
@ -402,7 +434,10 @@ mod tests {
.global_vars .global_vars
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut v| { v.id = 0; v }) .map(|mut v| {
v.id = 0;
v
})
.collect::<Vec<Variable>>(), .collect::<Vec<Variable>>(),
create_vars(&["var2", "var1"]) create_vars(&["var2", "var1"])
); );
@ -467,7 +502,9 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[base_file.to_string_lossy().to_string()]); let (match_store, non_fatal_error_sets) =
DefaultMatchStore::load(&[base_file.to_string_lossy().to_string()]);
assert_eq!(non_fatal_error_sets.len(), 0);
let match_set = match_store.query(&[base_file.to_string_lossy().to_string()]); let match_set = match_store.query(&[base_file.to_string_lossy().to_string()]);
@ -476,7 +513,10 @@ mod tests {
.matches .matches
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut m| { m.id = 0; m }) .map(|mut m| {
m.id = 0;
m
})
.collect::<Vec<Match>>(), .collect::<Vec<Match>>(),
create_matches(&[ create_matches(&[
("hello", "world3"), ("hello", "world3"),
@ -491,7 +531,10 @@ mod tests {
.global_vars .global_vars
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut v| { v.id = 0; v}) .map(|mut v| {
v.id = 0;
v
})
.collect::<Vec<Variable>>(), .collect::<Vec<Variable>>(),
create_vars(&["var2", "var1"]) create_vars(&["var2", "var1"])
); );
@ -550,10 +593,11 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[ let (match_store, non_fatal_error_sets) = DefaultMatchStore::load(&[
base_file.to_string_lossy().to_string(), base_file.to_string_lossy().to_string(),
sub_file.to_string_lossy().to_string(), sub_file.to_string_lossy().to_string(),
]); ]);
assert_eq!(non_fatal_error_sets.len(), 0);
let match_set = match_store.query(&[ let match_set = match_store.query(&[
base_file.to_string_lossy().to_string(), base_file.to_string_lossy().to_string(),
@ -565,7 +609,10 @@ mod tests {
.matches .matches
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut m| { m.id = 0; m }) .map(|mut m| {
m.id = 0;
m
})
.collect::<Vec<Match>>(), .collect::<Vec<Match>>(),
create_matches(&[ create_matches(&[
("hello", "world2"), ("hello", "world2"),
@ -580,7 +627,10 @@ mod tests {
.global_vars .global_vars
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut v| { v.id = 0; v }) .map(|mut v| {
v.id = 0;
v
})
.collect::<Vec<Variable>>(), .collect::<Vec<Variable>>(),
create_vars(&["var1", "var2"]) create_vars(&["var1", "var2"])
); );
@ -642,7 +692,9 @@ mod tests {
) )
.unwrap(); .unwrap();
let match_store = DefaultMatchStore::new(&[base_file.to_string_lossy().to_string()]); let (match_store, non_fatal_error_sets) =
DefaultMatchStore::load(&[base_file.to_string_lossy().to_string()]);
assert_eq!(non_fatal_error_sets.len(), 0);
let match_set = match_store.query(&[ let match_set = match_store.query(&[
base_file.to_string_lossy().to_string(), base_file.to_string_lossy().to_string(),
@ -654,7 +706,10 @@ mod tests {
.matches .matches
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut m| { m.id = 0; m }) .map(|mut m| {
m.id = 0;
m
})
.collect::<Vec<Match>>(), .collect::<Vec<Match>>(),
create_matches(&[ create_matches(&[
("hello", "world3"), // This appears only once, though it appears 2 times ("hello", "world3"), // This appears only once, though it appears 2 times
@ -669,10 +724,15 @@ mod tests {
.global_vars .global_vars
.into_iter() .into_iter()
.cloned() .cloned()
.map(|mut v| { v.id = 0; v }) .map(|mut v| {
v.id = 0;
v
})
.collect::<Vec<Variable>>(), .collect::<Vec<Variable>>(),
create_vars(&["var2", "var1"]) create_vars(&["var2", "var1"])
); );
}); });
} }
// TODO: add fatal and non-fatal error cases
} }

View File

@ -17,12 +17,15 @@
* along with espanso. If not, see <https://www.gnu.org/licenses/>. * along with espanso. If not, see <https://www.gnu.org/licenses/>.
*/ */
use crate::error::NonFatalErrorSet;
use super::{Match, Variable}; use super::{Match, Variable};
mod default; mod default;
pub trait MatchStore: Send { pub trait MatchStore: Send {
fn query(&self, paths: &[String]) -> MatchSet; fn query(&self, paths: &[String]) -> MatchSet;
fn loaded_paths(&self) -> Vec<String>;
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -31,8 +34,8 @@ pub struct MatchSet<'a> {
pub global_vars: Vec<&'a Variable>, pub global_vars: Vec<&'a Variable>,
} }
pub fn new(paths: &[String]) -> impl MatchStore { pub fn load(paths: &[String]) -> (impl MatchStore, Vec<NonFatalErrorSet>) {
// TODO: here we can replace the DefaultMatchStore with a caching wrapper // TODO: here we can replace the DefaultMatchStore with a caching wrapper
// that returns the same response for the given "paths" query // that returns the same response for the given "paths" query
default::DefaultMatchStore::new(paths) default::DefaultMatchStore::load(paths)
} }