Implement the basics of the Y10n structure and the merging of values
This commit is contained in:
parent
de77a2321e
commit
8eef1b1f1d
|
@ -0,0 +1,3 @@
|
||||||
|
# Hier ist ein Beispiel mit ein paar Strings drinne
|
||||||
|
---
|
||||||
|
greeting: 'moin moin'
|
|
@ -0,0 +1,4 @@
|
||||||
|
# This is an example file with some strings in it for localization
|
||||||
|
---
|
||||||
|
greeting: 'hello world'
|
||||||
|
secret: 'pancakes'
|
166
src/lib.rs
166
src/lib.rs
|
@ -4,10 +4,100 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
|
||||||
|
use glob::glob;
|
||||||
use log::*;
|
use log::*;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref LANG_REGEX: regex::Regex = regex::Regex::new(r"(?P<code>\w+)-?(?P<region>\w+)?(;q=(?P<quality>([0-9]*[.])?[0-9]+)?)?").unwrap();
|
static ref LANG_REGEX: regex::Regex =
|
||||||
|
regex::Regex::new(r"(?P<code>\w+)-?(?P<region>\w+)?(;q=(?P<quality>([0-9]*[.])?[0-9]+)?)?")
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Y10n is a stateful struct that can be loaded with localization files
|
||||||
|
*/
|
||||||
|
pub struct Y10n {
|
||||||
|
translations: HashMap<String, serde_yaml::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Y10n {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
translations: HashMap::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and load a Y10n instance from the yml files in the given glob
|
||||||
|
*
|
||||||
|
* For example `"l10n/**/*.yml"` will load all the yml files in the `l10n` directory using each
|
||||||
|
* file's name (e.g. `en.yml`) to derive it's language key (`en`).
|
||||||
|
*/
|
||||||
|
fn from_glob(pattern: &str) -> Self {
|
||||||
|
let mut this = Self::new();
|
||||||
|
trace!(
|
||||||
|
"Attempting to load translations from glob pattern: {:?}",
|
||||||
|
pattern
|
||||||
|
);
|
||||||
|
|
||||||
|
for entry in glob(pattern).expect("Failed to read glob pattern") {
|
||||||
|
match entry {
|
||||||
|
Ok(path) => {
|
||||||
|
trace!("Loading translations from: {}", path.display());
|
||||||
|
|
||||||
|
if let Some(stem) = path.file_stem() {
|
||||||
|
let key = stem.to_string_lossy();
|
||||||
|
// TODO: Make this error handling more robust
|
||||||
|
let value = serde_yaml::from_reader(
|
||||||
|
File::open(&path).expect("Failed to load file"),
|
||||||
|
)
|
||||||
|
.expect("Failed to deserialize YAML");
|
||||||
|
|
||||||
|
this.translations.insert(key.to_string(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => warn!("{:?}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a Vec of all the names of languages that have been loaded
|
||||||
|
* These are conventionally just the file stems of the yml files loaded
|
||||||
|
*/
|
||||||
|
fn languages(&self) -> Vec<&String> {
|
||||||
|
self.translations.keys().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the merged serde_yaml::Value for the given sets of languages.
|
||||||
|
*
|
||||||
|
* THis function is useful for managing language fallbacks to account for partial translations.
|
||||||
|
* FOr example if the German `de` translation file only has one string in it, but the English
|
||||||
|
* `en` file has 10, then this function could be called with a Vec of `Language` instances of
|
||||||
|
* `[de, en]` and the result would contain the one German string and 9 English strings.
|
||||||
|
*/
|
||||||
|
fn localize(&self, languages: &[Language]) -> serde_yaml::Value {
|
||||||
|
use serde_yaml::{Mapping, Value};
|
||||||
|
|
||||||
|
let mut values = vec![];
|
||||||
|
|
||||||
|
for lang in languages {
|
||||||
|
if let Some(value) = self.translations.get(&lang.code) {
|
||||||
|
values.push(value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut map = Value::Mapping(Mapping::new());
|
||||||
|
|
||||||
|
for value in values.into_iter().rev() {
|
||||||
|
merge_yaml(&mut map, value);
|
||||||
|
}
|
||||||
|
map
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,16 +128,25 @@ pub struct Language {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Language {
|
impl Language {
|
||||||
|
/**
|
||||||
|
* Create a `Language` instance from a segment of an `Accepts-Language` header
|
||||||
|
*
|
||||||
|
* For example `en` or `de;q=0.5`.
|
||||||
|
*/
|
||||||
fn from(segment: &str) -> Result<Language, Error> {
|
fn from(segment: &str) -> Result<Language, Error> {
|
||||||
if let Some(captures) = LANG_REGEX.captures(segment) {
|
if let Some(captures) = LANG_REGEX.captures(segment) {
|
||||||
println!("caps: {:?}", captures);
|
|
||||||
Ok(Language {
|
Ok(Language {
|
||||||
code: captures.name("code").map_or("unknown".to_string(), |c| c.as_str().to_string()),
|
code: captures
|
||||||
region: captures.name("region").map_or(None, |c| Some(c.as_str().to_string())),
|
.name("code")
|
||||||
quality: captures.name("quality").map_or(1.0, |c| c.as_str().parse().unwrap_or(0.0)),
|
.map_or("unknown".to_string(), |c| c.as_str().to_string()),
|
||||||
|
region: captures
|
||||||
|
.name("region")
|
||||||
|
.map_or(None, |c| Some(c.as_str().to_string())),
|
||||||
|
quality: captures
|
||||||
|
.name("quality")
|
||||||
|
.map_or(1.0, |c| c.as_str().parse().unwrap_or(0.0)),
|
||||||
})
|
})
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
Err(Error::Generic)
|
Err(Error::Generic)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,11 +157,63 @@ enum Error {
|
||||||
Generic,
|
Generic,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge a couple of serde_yaml together
|
||||||
|
*
|
||||||
|
* THis code courtesy of https://stackoverflow.com/a/67743348
|
||||||
|
*/
|
||||||
|
fn merge_yaml(a: &mut serde_yaml::Value, b: serde_yaml::Value) {
|
||||||
|
match (a, b) {
|
||||||
|
(a @ &mut serde_yaml::Value::Mapping(_), serde_yaml::Value::Mapping(b)) => {
|
||||||
|
let a = a.as_mapping_mut().unwrap();
|
||||||
|
for (k, v) in b {
|
||||||
|
if v.is_sequence() && a.contains_key(&k) && a[&k].is_sequence() {
|
||||||
|
let mut _b = a.get(&k).unwrap().as_sequence().unwrap().to_owned();
|
||||||
|
_b.append(&mut v.as_sequence().unwrap().to_owned());
|
||||||
|
a[&k] = serde_yaml::Value::from(_b);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !a.contains_key(&k) {
|
||||||
|
a.insert(k.to_owned(), v.to_owned());
|
||||||
|
} else {
|
||||||
|
merge_yaml(&mut a[&k], v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(a, b) => *a = b,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn y10n_from_valid_glob() {
|
||||||
|
let y10n = Y10n::from_glob("l10n/*.yml");
|
||||||
|
assert_eq!(y10n.languages().len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn y10n_localize() {
|
||||||
|
use serde_yaml::Value;
|
||||||
|
|
||||||
|
let y10n = Y10n::from_glob("l10n/*.yml");
|
||||||
|
let en = Language::from("en").expect("Failed to parse!");
|
||||||
|
let de = Language::from("de").expect("Failed to parse!");
|
||||||
|
let value = y10n.localize(&[de, en]);
|
||||||
|
if let Some(map) = value.as_mapping() {
|
||||||
|
let key = "greeting".into();
|
||||||
|
let greeting = map.get(&key).expect("Failed to find a greeting");
|
||||||
|
assert_eq!(&Value::String("moin moin".to_string()), greeting);
|
||||||
|
|
||||||
|
let secret = map.get(&"secret".into()).expect("Failed to find a secret");
|
||||||
|
assert_eq!(&Value::String("pancakes".to_string()), secret);
|
||||||
|
} else {
|
||||||
|
assert!(false, "The value wasn't a map like I expected");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn language_from_segment() {
|
fn language_from_segment() {
|
||||||
let lang = Language::from("en-US");
|
let lang = Language::from("en-US");
|
||||||
|
@ -90,4 +241,3 @@ mod tests {
|
||||||
assert_eq!(0.3, de.quality);
|
assert_eq!(0.3, de.quality);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue