M Cargo.lock => Cargo.lock +235 -0
@@ 356,6 356,15 @@ dependencies = [
]
[[package]]
+name = "convert_case"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
name = "crossterm"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 406,10 415,14 @@ dependencies = [
"deunicode",
"dirs",
"figment",
+ "fixedstr",
"fs_extra",
"inquire",
"maud",
"miette",
+ "nanoid",
+ "nutype",
+ "rstest",
"serde",
"serde_json",
"time",
@@ 489,6 502,12 @@ dependencies = [
]
[[package]]
+name = "fixedstr"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3903955aba5314e4089fb47975345a580c88a746918cea1a59ee5e1418ba630"
+
+[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 510,12 529,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
[[package]]
+name = "futures"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
name = "futures-channel"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
dependencies = [
"futures-core",
+ "futures-sink",
]
[[package]]
@@ 525,6 560,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
+name = "futures-executor"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
+
+[[package]]
+name = "futures-macro"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
name = "futures-sink"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 537,15 600,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
[[package]]
+name = "futures-timer"
+version = "3.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
+
+[[package]]
name = "futures-util"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [
+ "futures-channel",
"futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
"futures-task",
+ "memchr",
"pin-project-lite",
"pin-utils",
+ "slab",
]
[[package]]
@@ 584,6 659,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
[[package]]
+name = "glob"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
+
+[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 730,6 811,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
[[package]]
+name = "kinded"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce4bdbb2f423660b19f0e9f7115182214732d8dd5f840cd0a3aee3e22562f34c"
+dependencies = [
+ "kinded_macros",
+]
+
+[[package]]
+name = "kinded_macros"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a13b4ddc5dcb32f45dac3d6f606da2a52fdb9964a18427e63cd5ef6c0d13288d"
+dependencies = [
+ "convert_case",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 893,6 995,15 @@ dependencies = [
]
[[package]]
+name = "nanoid"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8"
+dependencies = [
+ "rand",
+]
+
+[[package]]
name = "newline-converter"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 947,6 1058,29 @@ dependencies = [
]
[[package]]
+name = "nutype"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "801187d4ee2f03db47daf0f5fc335a7b1b94f60f47942293060b762641b83f2e"
+dependencies = [
+ "nutype_macros",
+]
+
+[[package]]
+name = "nutype_macros"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e96467936d36285839340d692fcd974106d9bc203e36f55a477e0243737a8af7"
+dependencies = [
+ "cfg-if",
+ "kinded",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "urlencoding",
+]
+
+[[package]]
name = "object"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1070,6 1204,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
+name = "ppv-lite86"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+
+[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1124,6 1264,36 @@ dependencies = [
]
[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
name = "redox_syscall"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1188,12 1358,56 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
+name = "relative-path"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
+
+[[package]]
+name = "rstest"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d5316d2a1479eeef1ea21e7f9ddc67c191d497abc8fc3ba2467857abbb68330"
+dependencies = [
+ "futures",
+ "futures-timer",
+ "rstest_macros",
+ "rustc_version",
+]
+
+[[package]]
+name = "rstest_macros"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04a9df72cc1f67020b0d63ad9bfe4a323e459ea7eb68e03bd9824db49f9a4c25"
+dependencies = [
+ "cfg-if",
+ "glob",
+ "proc-macro2",
+ "quote",
+ "regex",
+ "relative-path",
+ "rustc_version",
+ "syn",
+ "unicode-ident",
+]
+
+[[package]]
name = "rustc-demangle"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
+name = "rustc_version"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
+dependencies = [
+ "semver",
+]
+
+[[package]]
name = "rustix"
version = "0.38.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1225,6 1439,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "semver"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+
+[[package]]
name = "serde"
version = "1.0.199"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1317,6 1537,15 @@ dependencies = [
]
[[package]]
+name = "slab"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
name = "smallvec"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ 1702,6 1931,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6"
[[package]]
+name = "urlencoding"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
+
+[[package]]
name = "utf8parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
M Cargo.toml => Cargo.toml +11 -0
@@ 15,10 15,15 @@ categories = ["command-line-utilities", "games"]
[[bin]]
name = "ddd"
path = "src/bin/ddd/main.rs"
+
[[bin]]
name = "homepage"
path = "src/bin/homepage/main.rs"
+[[bin]]
+name = "windows-setup"
+path = "src/bin/windows-setup/main.rs"
+
[lib]
path = "src/lib.rs"
@@ 33,10 38,13 @@ comfy-table = "7.1.1"
deunicode = "1.4.4"
dirs = "5.0.1"
figment = { version = "0.10.18", features = ["env", "json"] }
+fixedstr = "0.5.6"
fs_extra = "1.3.0"
inquire = "0.7.4"
maud = { version = "0.26.0", features = ["axum"], optional = true }
miette = { version = "7.2.0", features = ["fancy"] }
+nanoid = "0.4.0"
+nutype = "0.4.2"
serde = { version = "1.0.198", features = ["derive"] }
serde_json = "1.0.116"
time = { version = "0.3.36", features = ["serde", "local-offset", "serde-well-known"] }
@@ 57,3 65,6 @@ homepage = ["dep:tokio", "dep:axum", "dep:maud", "dep:tower-http", "dep:tower"
[target.x86_64-pc-windows-gnu]
linker = "x86_64-w64-mingw32-gcc"
+
+[dev-dependencies]
+rstest = "0.19.0"
M src/bin/ddd/arguments.rs => src/bin/ddd/arguments.rs +5 -1
@@ 41,6 41,9 @@ pub enum Commands {
/// edit Items.
#[command(subcommand)]
Edit(EditCommands),
+
+ /// migrate Items.
+ Migrate,
}
#[derive(Clone, Debug, Subcommand)]
@@ 61,7 64,7 @@ pub enum NewCommands {
pub enum EditCommands {
Noun {
#[arg(short = 'i', long, value_name = "ID")]
- id: i128,
+ id: String,
#[arg(short = 'a', long, value_name = "ARTICLES")]
articles: Option<Vec<Article>>,
#[arg(short = 'w', long, value_name = "WORD")]
@@ 82,6 85,7 @@ pub enum ListCommands {
pub enum AskCommands {
Nouns,
Group,
+ Case,
}
#[derive(Clone, Debug, Subcommand)]
M src/bin/ddd/commands.rs => src/bin/ddd/commands.rs +285 -44
@@ 1,24 1,33 @@
use color_print::{cformat, cprintln};
use der_die_das::{
- attempt::{self, Attempt},
history::History,
- nouns::{self},
+ id::ID,
+ noun_attempt::{self, Conclusion, NounAttempt, NounAttemptSaved},
+ nouns::{self, SavedNoun},
+ storage::{nouns_attempts_repo::NounAttemptFile, nouns_repo::NounFile},
};
use inquire::{
validator::{ErrorMessage, StringValidator, Validation},
CustomUserError,
};
-use std::{fs, io::Read, path::PathBuf};
+use std::{
+ collections::HashMap,
+ fs::{self, read_to_string},
+ io::Read,
+ path::PathBuf,
+};
+use tracing::{debug, error, info, instrument, trace, warn};
use comfy_table::{Cell, Row};
-use der_die_das::nouns::{Article, Noun};
+use der_die_das::nouns::Article;
use deunicode::deunicode;
-use miette::{miette, IntoDiagnostic as _, Result};
+use miette::{miette, Context as _, IntoDiagnostic as _, Result};
use crate::{config::GroupConfiguration, importer};
pub fn new_noun(
- id: i128,
+ id: ID,
+ at: time::OffsetDateTime,
word: String,
meaning: String,
article: Vec<Article>,
@@ 31,20 40,26 @@ pub fn new_noun(
if meaning.is_empty() {
return Err(miette!("meaning must be at least 1 character"));
}
- storage.save_noun(der_die_das::nouns::Noun {
+ storage.save_noun(nouns::SavedNoun {
id,
- articles: article,
- word,
- meaning,
- group: group.unwrap_or("ungroup".to_owned()),
+ at,
+ noun: der_die_das::nouns::Noun {
+ articles: article,
+ word,
+ meaning,
+ group: group.unwrap_or("ungroup".to_owned()),
+ },
})?;
Ok(())
}
-pub fn list_nouns(noun_repo: impl nouns::Repo, attempt_repo: impl attempt::Repo) -> Result<()> {
+pub fn list_nouns(
+ noun_repo: impl nouns::Repo,
+ attempt_repo: impl noun_attempt::Repo,
+) -> Result<()> {
let h = History {
nouns: noun_repo.all_nouns()?,
- attempts: attempt_repo.all_attempts()?,
+ attempts: attempt_repo.all_noun_attempts()?,
};
h.grouped_confidence_map()
@@ 53,17 68,17 @@ pub fn list_nouns(noun_repo: impl nouns::Repo, attempt_repo: impl attempt::Repo)
let t = items
.iter()
.fold(comfy_table::Table::new(), |mut table, w| {
- let id_cell = Cell::new(w.0.id);
+ let id_cell = Cell::new(w.0.id.clone());
let article_cell =
- Cell::new(w.0.articles.iter().fold(String::new(), |s, a| {
+ Cell::new(w.0.noun.articles.iter().fold(String::new(), |s, a| {
if s.is_empty() {
a.to_string()
} else {
cformat!("<red,bold>{s}</red,bold>, {}", a)
}
}));
- let word_cell = Cell::new(&w.0.word);
- let meaning_cell = Cell::new(&w.0.meaning);
+ let word_cell = Cell::new(&w.0.noun.word);
+ let meaning_cell = Cell::new(&w.0.noun.meaning);
let confidence_cell = Cell::new(w.1);
let mut r = Row::new();
r.add_cell(id_cell)
@@ 147,14 162,16 @@ impl StringValidator for ValidateLastWordShouldMatchTheSpelling {
}
}
-fn ask_noun(attempt_repo: impl attempt::Repo, noun: &Noun) -> Result<()> {
- let prompt = cformat!("<reverse>___</> <reverse>{}</>\n>", noun.word);
+fn ask_noun(attempt_repo: impl noun_attempt::Repo, noun: &SavedNoun) -> Result<()> {
+ let prompt = cformat!("<reverse>___</> <reverse>{}</>\n>", noun.noun.word);
let resp = inquire::Text::new(&prompt)
- .with_help_message(&noun.meaning)
+ .with_help_message(&noun.noun.meaning)
.with_validators(&[
Box::new(ValidateShouldBeTwoWords {}),
Box::new(ValidateFirstWordShouldBeArticle {}),
- Box::new(ValidateLastWordShouldMatchTheSpelling(noun.word.clone())),
+ Box::new(ValidateLastWordShouldMatchTheSpelling(
+ noun.noun.word.clone(),
+ )),
])
.prompt()
.into_diagnostic()?
@@ 176,12 193,14 @@ fn ask_noun(attempt_repo: impl attempt::Repo, noun: &Noun) -> Result<()> {
}?;
let now = time::OffsetDateTime::now_utc();
- if noun.articles.contains(&article) {
- attempt_repo.save_attempt(Attempt {
- id: now.unix_timestamp_nanos(),
+ if noun.noun.articles.contains(&article) {
+ attempt_repo.save_noun_attempt(NounAttemptSaved {
+ id: ID::new(),
at: now,
- for_word: noun.id,
- what_happened: der_die_das::attempt::Conclusion::Success,
+ noun_attempt: noun_attempt::NounAttempt {
+ for_word: noun.id.clone(),
+ what_happened: der_die_das::noun_attempt::Conclusion::Success,
+ },
})?;
cprintln!("<green,bold>Correct</>");
Ok(())
@@ 189,18 208,21 @@ fn ask_noun(attempt_repo: impl attempt::Repo, noun: &Noun) -> Result<()> {
cprintln!("<red>Article did not match!</>");
cprintln!(
"correct article(s) is(are):\t<bold>{}</>",
- noun.articles
+ noun.noun
+ .articles
.iter()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(", ")
);
- attempt_repo.clone().save_attempt(Attempt {
- id: now.unix_timestamp_nanos(),
+ attempt_repo.clone().save_noun_attempt(NounAttemptSaved {
+ id: ID::new(),
at: now,
- for_word: noun.id,
- what_happened: der_die_das::attempt::Conclusion::WrongArticle(article),
+ noun_attempt: NounAttempt {
+ for_word: noun.id.clone(),
+ what_happened: der_die_das::noun_attempt::Conclusion::WrongArticle(article),
+ },
})?;
ask_noun(attempt_repo, noun)
}
@@ 208,25 230,25 @@ fn ask_noun(attempt_repo: impl attempt::Repo, noun: &Noun) -> Result<()> {
fn get_confidence(
noun_repo: impl nouns::Repo,
- attempt_repo: impl attempt::Repo,
- id: i128,
-) -> Result<(Noun, u8)> {
+ attempt_repo: impl noun_attempt::Repo,
+ id: &ID,
+) -> Result<(SavedNoun, u8)> {
let nouns = noun_repo.all_nouns()?;
- let attempts = attempt_repo.all_attempts()?;
+ let attempts = attempt_repo.all_noun_attempts()?;
let h = History { nouns, attempts };
h.confidence_map()
.into_iter()
- .find(|(n, _)| n.id == id)
+ .find(|(n, _)| n.id.eq(id))
.ok_or(miette!("Some how the word is not there any more!"))
}
pub fn ask_next(
noun_repo: impl nouns::Repo,
- attempt_repo: impl attempt::Repo,
+ attempt_repo: impl noun_attempt::Repo,
group_config: &GroupConfiguration,
) -> Result<()> {
let nouns = noun_repo.clone().all_nouns()?;
- let attempts = attempt_repo.clone().all_attempts()?;
+ let attempts = attempt_repo.clone().all_noun_attempts()?;
let h = History { nouns, attempts };
let chosen = if group_config.enable {
@@ 240,7 262,7 @@ pub fn ask_next(
ask_noun(attempt_repo.clone(), &chosen.0)?;
- let (_, conf) = get_confidence(noun_repo, attempt_repo, chosen.0.id)?;
+ let (_, conf) = get_confidence(noun_repo, attempt_repo, &chosen.0.id)?;
cprintln!(
"The confidence for the word is now at <bold,reverse>{}</>.",
@@ 252,11 274,11 @@ pub fn ask_next(
pub fn group_ask(
noun_repo: &impl nouns::Repo,
- attempt_repo: &impl attempt::Repo,
+ attempt_repo: &impl noun_attempt::Repo,
group_config: &GroupConfiguration,
) -> Result<()> {
let nouns = noun_repo.clone().all_nouns()?;
- let attempts = attempt_repo.clone().all_attempts()?;
+ let attempts = attempt_repo.clone().all_noun_attempts()?;
let h = History { nouns, attempts };
let gr = h
@@ 270,7 292,7 @@ pub fn group_ask(
ask_noun(attempt_repo.clone(), &noun)?;
- let (_, conf) = get_confidence(noun_repo.clone(), attempt_repo.clone(), noun.id)?;
+ let (_, conf) = get_confidence(noun_repo.clone(), attempt_repo.clone(), &noun.id)?;
cprintln!(
"The confidence for the word is now at <bold, reverse>{}</>.",
@@ 303,7 325,7 @@ pub fn import_nouns(noun_repo: &impl nouns::Repo, at: PathBuf) -> Result<()> {
imported_groups
.into_iter()
- .flat_map(Vec::<Noun>::from)
+ .flat_map(Vec::<SavedNoun>::from)
.try_for_each(|n| noun_repo.clone().save_noun(n))?;
Ok(())
@@ 311,7 333,7 @@ pub fn import_nouns(noun_repo: &impl nouns::Repo, at: PathBuf) -> Result<()> {
pub fn edit_noun(
noun_repo: impl nouns::Repo,
- id: i128,
+ id: ID,
article: Option<Vec<Article>>,
word: Option<String>,
meaning: Option<String>,
@@ 319,3 341,222 @@ pub fn edit_noun(
) -> Result<()> {
noun_repo.edit_noun_by_id(id, article, word, meaning, group)
}
+
+pub(crate) fn case_ask() -> Result<()> {
+ Err(miette!("Sorry, I have not implemented it, yet!"))
+}
+
+#[instrument]
+pub(crate) fn migrate(storage: &der_die_das::storage::Storage) -> Result<()> {
+ info!("starting migration");
+ /*
+ Gathering information*
+ */
+
+ // nouns
+ let old_nouns_path = storage.0.join("words");
+
+ let new_nouns_path = storage.0.join("nouns");
+
+ if old_nouns_path.try_exists().into_diagnostic()? {
+ info!(
+ old_path = old_nouns_path.to_string_lossy().into_owned(),
+ "old nouns directory exist"
+ );
+ } else {
+ error!(
+ old_path = old_nouns_path.to_string_lossy().into_owned(),
+ "old nouns directory does not exist"
+ );
+ return Ok(());
+ };
+
+ if new_nouns_path.try_exists().into_diagnostic()? {
+ info!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "new nouns directory exist"
+ );
+ } else {
+ trace!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "Creating new nouns directory"
+ );
+ fs_extra::dir::create_all(&new_nouns_path, false).into_diagnostic()?;
+ debug!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "Created new nouns directory"
+ );
+ }
+
+ let old_nouns_files = std::fs::read_dir(old_nouns_path).into_diagnostic()?;
+ debug!("read old noun path");
+
+ let old_nouns = old_nouns_files
+ .filter_map(std::result::Result::ok)
+ .map(|f| f.path())
+ .map(read_to_string)
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ accu.push(item.into_diagnostic()?);
+ Ok(accu)
+ })?;
+ debug!("read nouns to string");
+
+ let nouns_serialized =
+ old_nouns
+ .iter()
+ .map(String::as_str)
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ let i = serde_json::from_str::<NounFile>(item)
+ .into_diagnostic()
+ .wrap_err_with(|| format!("item: {item:#?}"))?;
+ accu.push(i);
+ Ok(accu)
+ })?;
+
+ let nouns_with_new_id: HashMap<_, _> =
+ nouns_serialized
+ .iter()
+ .try_fold(HashMap::new(), |mut accu, i| -> Result<_> {
+ let old_id = match i {
+ NounFile::NounFileV1(item) => item.id,
+ NounFile::NounFileV2(item) => item.at.unix_timestamp_nanos(),
+ };
+
+ accu.insert(old_id, SavedNoun::from(i.to_owned()));
+
+ Ok(accu)
+ })?;
+
+ // attempts
+ let old_attempts_path = storage.0.join("attempts");
+
+ let new_attempts_path = storage.0.join("nouns_attempts");
+
+ if old_attempts_path.try_exists().into_diagnostic()? {
+ info!(
+ old_path = old_attempts_path.to_string_lossy().into_owned(),
+ "old noun attempts directory exist"
+ );
+ } else {
+ error!(
+ old_path = old_attempts_path.to_string_lossy().into_owned(),
+ "old noun attempts directory does not exist"
+ );
+ return Ok(());
+ };
+
+ let old_attempts_files = std::fs::read_dir(old_attempts_path).into_diagnostic()?;
+
+ let old_attempts = old_attempts_files
+ .filter_map(std::result::Result::ok)
+ .map(|f| f.path())
+ .map(read_to_string)
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ accu.push(item.into_diagnostic()?);
+ Ok(accu)
+ })?;
+
+ let attempts_serialized = old_attempts
+ .iter()
+ .map(String::as_str)
+ .map(serde_json::from_str::<NounAttemptFile>)
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ accu.push(item.into_diagnostic()?);
+ Ok(accu)
+ })?;
+
+ /*
+ Modernize
+ */
+ let attempts_modernized: Vec<_> =
+ attempts_serialized
+ .into_iter()
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ let saved = match item {
+ NounAttemptFile::V1(item) => NounAttemptSaved {
+ id: ID::new(),
+ at: item.at,
+ noun_attempt: NounAttempt {
+ for_word: nouns_with_new_id
+ .get(&item.for_word)
+ .ok_or(miette!("word for attempt not found"))?
+ .id
+ .clone(),
+ what_happened: Conclusion::from(item.what_happened),
+ },
+ },
+ NounAttemptFile::V2(item) => NounAttemptSaved {
+ id: item.id,
+ at: item.at,
+ noun_attempt: NounAttempt {
+ for_word: item.for_word,
+ what_happened: Conclusion::from(item.what_happened),
+ },
+ },
+ };
+ accu.push(saved);
+ Ok(accu)
+ })?;
+
+ /*
+ Writing Files
+ */
+
+ if new_nouns_path.try_exists().into_diagnostic()? {
+ info!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "new noun directory exist"
+ );
+ } else {
+ trace!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "creating new noun directory"
+ );
+ fs_extra::dir::create_all(&new_nouns_path, false).into_diagnostic()?;
+ debug!(
+ new_path = new_nouns_path.to_string_lossy().into_owned(),
+ "created new noun directory"
+ );
+ }
+
+ nouns_with_new_id
+ .into_values()
+ .try_for_each(|a| -> Result<_> {
+ let file_path = new_nouns_path.join(format!("{}.json", a.id));
+ let new_content =
+ serde_json::to_string_pretty(&NounFile::from(a.clone())).into_diagnostic()?;
+ fs_extra::file::write_all(file_path, &new_content).into_diagnostic()?;
+ info!(file_id = a.id.as_ref(), "wrote the new noun file");
+
+ Ok(())
+ })?;
+
+ // noun attempts
+ if new_attempts_path.try_exists().into_diagnostic()? {
+ info!(
+ new_path = new_attempts_path.to_string_lossy().into_owned(),
+ "new noun attempts directory exist"
+ );
+ } else {
+ trace!(
+ new_path = new_attempts_path.to_string_lossy().into_owned(),
+ "creating new noun attempts directory"
+ );
+ fs_extra::dir::create_all(&new_attempts_path, false).into_diagnostic()?;
+ debug!(
+ new_path = new_attempts_path.to_string_lossy().into_owned(),
+ "created new noun attempts directory"
+ );
+ }
+ attempts_modernized.iter().try_for_each(|a| -> Result<_> {
+ let file_path = new_attempts_path.join(format!("{}.json", a.id));
+ let new_content =
+ serde_json::to_string_pretty(&NounAttemptFile::from(a.to_owned())).into_diagnostic()?;
+ fs_extra::file::write_all(file_path, &new_content).into_diagnostic()?;
+ info!(file_id = a.id.as_ref(), "wrote the new noun attempt file");
+
+ Ok(())
+ })?;
+
+ Ok(())
+}
M src/bin/ddd/importer.rs => src/bin/ddd/importer.rs +12 -8
@@ 1,5 1,6 @@
use der_die_das::{
- nouns::{Article, Noun},
+ id::ID,
+ nouns::{Article, Noun, SavedNoun},
storage::ArticleFileV1,
};
use serde::{Deserialize, Serialize};
@@ 17,19 18,22 @@ pub struct Group {
words: Vec<WordSet>,
}
-impl From<Group> for Vec<Noun> {
+impl From<Group> for Vec<SavedNoun> {
fn from(value: Group) -> Self {
let group_name = value.name;
value
.words
.into_iter()
- .map(|w| Noun {
- id: time::OffsetDateTime::now_utc().unix_timestamp_nanos(),
- articles: w.articles.into_iter().map(Article::from).collect(),
- word: w.word,
- meaning: w.meaning,
- group: group_name.clone(),
+ .map(|w| SavedNoun {
+ noun: Noun {
+ articles: w.articles.into_iter().map(Article::from).collect(),
+ word: w.word,
+ meaning: w.meaning,
+ group: group_name.clone(),
+ },
+ id: ID::new(),
+ at: time::OffsetDateTime::now_utc(),
})
.collect()
}
M src/bin/ddd/main.rs => src/bin/ddd/main.rs +17 -5
@@ 7,6 7,7 @@ use std::env;
use arguments::{Args, Commands, NewCommands};
use clap::{CommandFactory, Parser};
+use der_die_das::id::ID;
use miette::{miette, Error, Result};
use tracing::{debug, level_filters::LevelFilter};
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
@@ 47,7 48,8 @@ fn main() -> Result<()> {
return Err(miette!("You should enter at least one article."));
}
commands::new_noun(
- time::OffsetDateTime::now_utc().unix_timestamp_nanos(),
+ ID::new(),
+ time::OffsetDateTime::now_utc(),
word,
meaning,
articles,
@@ 69,6 71,7 @@ fn main() -> Result<()> {
arguments::AskCommands::Group => {
commands::group_ask(&&storage, &&storage, &configs.group)
}
+ arguments::AskCommands::Case => commands::case_ask(),
},
Commands::Edit(edit_commands) => match edit_commands {
arguments::EditCommands::Noun {
@@ 77,17 80,24 @@ fn main() -> Result<()> {
meaning,
group,
id,
- } => commands::edit_noun(&storage, id, articles, word, meaning, group),
+ } => {
+ commands::edit_noun(&storage, ID::try_from(id)?, articles, word, meaning, group)
+ }
},
+ Commands::Migrate => commands::migrate(&storage),
},
- None => todo!(),
+ None => Err(miette!("This should not happen.")),
}?;
Ok(())
}
fn trace_install() {
- let fmt_layer = fmt::layer().with_target(false);
+ let fmt_layer = fmt::layer()
+ .pretty()
+ .without_time()
+ .with_file(false)
+ .with_target(false);
let (filter_layer, current_filter) = match EnvFilter::try_from_default_env() {
Err(_) => (EnvFilter::try_new("warn").unwrap(), LevelFilter::OFF),
@@ 100,7 110,9 @@ fn trace_install() {
let registry = tracing_subscriber::registry().with(filter_layer);
if current_filter == LevelFilter::OFF {
- registry.with(fmt_layer.without_time()).init();
+ registry
+ .with(fmt_layer.with_file(false).with_target(false).without_time())
+ .init();
} else {
registry.with(fmt_layer).init();
}
M src/bin/homepage/web.rs => src/bin/homepage/web.rs +2 -2
@@ 1,4 1,4 @@
-use der_die_das::{attempt, nouns};
+use der_die_das::{noun_attempt, nouns};
use crate::config::GroupConfiguration;
pub(crate) mod components;
@@ 6,7 6,7 @@ pub(crate) mod errors;
pub(crate) mod routes;
#[derive(Debug)]
-pub struct AppState<A: attempt::Repo, N: nouns::Repo> {
+pub struct AppState<A: noun_attempt::Repo, N: nouns::Repo> {
pub nouns_repo: N,
pub attempts_repo: A,
pub group_config: GroupConfiguration,
M src/bin/homepage/web/components.rs => src/bin/homepage/web/components.rs +20 -17
@@ 1,3 1,4 @@
+use der_die_das::id::ID;
use maud::{html, Markup, DOCTYPE};
use time::OffsetDateTime;
use tracing::instrument;
@@ 29,51 30,55 @@ pub fn body(header: &Markup, main: &Markup, footer: &Markup) -> Markup {
pub fn super_mark() -> Markup {
html! {
- div id="res" {
+ // div id="res" {
div id="super" {
- "SUPER!"
+ p {
+ "SUPER!"
+ }
}
- }
+ // }
}
}
pub fn sorry_mark() -> Markup {
html! {
- div id="res" {
+ // div id="res" {
div id="sorry" {
+ p {
"sorry! :("
+ }
}
- }
+ // }
}
}
pub fn new_day_new_word() -> Markup {
html! {
- div id="res" {
+ // div id="res" {
div id="new-day-new-word" {
- p id="new-day" {
- "new day"
- }
- p id="new-word" {
- "new word"
+ div id="x" {
+ p id="new-day" {
+ "new day"
+ }
+ p id="new-word" {
+ "new word"
+ }
}
}
- }
+ // }
}
}
pub fn quiz(question_box: &Markup, res_mark: &Markup) -> Markup {
html!(
- main {
div id="quiz" {
(question_box)
(res_mark)
}
- }
)
}
#[instrument]
-pub fn question_box(id: i128, word: &str, meaning: &str) -> Markup {
+pub fn question_box(id: ID, word: &str, meaning: &str) -> Markup {
tracing::debug!("haha");
html!(
form id="question-box"{
@@ 92,7 97,6 @@ pub fn question_box(id: i128, word: &str, meaning: &str) -> Markup {
pub fn header(date: OffsetDateTime, confidence: u8, how_many_words_today: usize) -> Markup {
html!(
- header {
div id="header-wrapper" {
div id="logo-wrapper" {
img id="logo" src="/logo.svg" width="350" alt="Der Die Das" {}
@@ 102,7 106,6 @@ pub fn header(date: OffsetDateTime, confidence: u8, how_many_words_today: usize)
(info_box(date, confidence,how_many_words_today ))
}
- }
)
}
M src/bin/homepage/web/errors.rs => src/bin/homepage/web/errors.rs +1 -1
@@ 7,7 7,7 @@ use maud::{html, Markup};
pub enum ErrorMessage {
Internal(miette::Report),
NoWordsAvailable(miette::Report),
- WordNotFound(miette::Report, i128),
+ WordNotFound(miette::Report, String),
}
fn error_message(input: &str) -> Markup {
M src/bin/homepage/web/routes.rs => src/bin/homepage/web/routes.rs +24 -21
@@ 6,8 6,9 @@ use axum::{
response::IntoResponse,
};
use der_die_das::{
- attempt,
history::History,
+ id::ID,
+ noun_attempt::{self, NounAttempt, NounAttemptSaved},
nouns::{self, Article},
};
use maud::{html, Markup};
@@ 54,7 55,7 @@ pub async fn htmx() -> impl IntoResponse {
}
#[instrument]
-pub async fn home<A: attempt::Repo, N: nouns::Repo>(
+pub async fn home<A: noun_attempt::Repo, N: nouns::Repo>(
State(storage): State<Arc<AppState<A, N>>>,
) -> Result<impl IntoResponse, ErrorMessage> {
let nouns = storage
@@ 66,7 67,7 @@ pub async fn home<A: attempt::Repo, N: nouns::Repo>(
let attempts = storage
.attempts_repo
.clone()
- .all_attempts()
+ .all_noun_attempts()
.map_err(ErrorMessage::Internal)?;
let h = History { nouns, attempts };
@@ 86,10 87,10 @@ pub async fn home<A: attempt::Repo, N: nouns::Repo>(
let body_generated = components::body(
&header_generated,
&components::quiz(
- &question_box(chosen.0.id, &chosen.0.word, &chosen.0.meaning),
+ &question_box(chosen.0.id, &chosen.0.noun.word, &chosen.0.noun.meaning),
&components::new_day_new_word(),
),
- &html!(footer {}),
+ &html!(),
);
let page_generated = components::page("DerDieDas", &body_generated);
@@ 100,11 101,11 @@ pub async fn home<A: attempt::Repo, N: nouns::Repo>(
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct Answer {
attempt: Article,
- word: i128,
+ word: String,
}
#[instrument]
-pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
+pub async fn answer<A: noun_attempt::Repo, N: nouns::Repo>(
State(storage): State<Arc<AppState<A, N>>>,
Path(payload): Path<Answer>,
) -> Result<impl IntoResponse, ErrorMessage> {
@@ 118,21 119,21 @@ pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
attempts: storage
.attempts_repo
.clone()
- .all_attempts()
+ .all_noun_attempts()
.map_err(ErrorMessage::Internal)?,
};
let words = h.confidence_map();
let (noun, confidence) = words
.iter()
- .find(|(n, _)| n.id == payload.word)
+ .find(|(n, _)| n.id.to_string() == payload.word)
.ok_or_else(|| ErrorMessage::WordNotFound(miette!("not found"), payload.word))?;
debug!("found noun: {:#?}", noun);
debug!(
"here is the result of contains: {}, here is the articles:{:#?}, here is the article: {:#?}",
- noun.articles.contains(&payload.attempt),
- noun.articles,
+ noun.noun.articles.contains(&payload.attempt),
+ noun.noun.articles,
payload.attempt,
);
let today_start = {
@@ 149,14 150,14 @@ pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
.number_of_words_at(today_start, time::Duration::hours(24))
.map_err(ErrorMessage::Internal)?;
- if !noun.articles.contains(&payload.attempt) {
+ if !noun.noun.articles.contains(&payload.attempt) {
return Ok(body(
&components::header(time::OffsetDateTime::now_utc(), *confidence, num),
&components::quiz(
- &question_box(noun.id, &noun.word, &noun.meaning),
+ &question_box(noun.id.clone(), &noun.noun.word, &noun.noun.meaning),
&sorry_mark(),
),
- &html!(footer {}),
+ &html!(),
));
}
@@ 166,11 167,13 @@ pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
storage
.attempts_repo
.clone()
- .save_attempt(attempt::Attempt {
- id: now.unix_timestamp_nanos(),
+ .save_noun_attempt(NounAttemptSaved {
+ id: ID::new(),
at: now,
- for_word: noun.id,
- what_happened: attempt::Conclusion::Success,
+ noun_attempt: NounAttempt {
+ for_word: noun.id.clone(),
+ what_happened: noun_attempt::Conclusion::Success,
+ },
})
.map_err(ErrorMessage::Internal)?;
@@ 183,7 186,7 @@ pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
attempts: storage
.attempts_repo
.clone()
- .all_attempts()
+ .all_noun_attempts()
.map_err(ErrorMessage::Internal)?,
};
@@ 200,9 203,9 @@ pub async fn answer<A: attempt::Repo, N: nouns::Repo>(
Ok(body(
&components::header(time::OffsetDateTime::now_utc(), n.1, num),
&components::quiz(
- &question_box(n.0.id, &n.0.word, &n.0.meaning),
+ &question_box(n.0.id, &n.0.noun.word, &n.0.noun.meaning),
&super_mark(),
),
- &html!(footer {}),
+ &html!(),
))
}
M src/bin/homepage/web/style.css => src/bin/homepage/web/style.css +329 -166
@@ 1,197 1,360 @@
-body {
- display: flex;
- justify-content: center;
- align-content: center;
- align-items: center;
- flex-direction: column;
-}
+@media (max-width: 700px) {
+ body {
+ display: flex;
+ flex-direction: column;
+ gap: 5rem;
-header,
-main,
-footer {
- padding: 1em;
- width: 90%;
- display: flex;
- flex-direction: column;
- justify-content: center;
- align-content: center;
- align-items: center;
-}
+ header {
+ #header-wrapper {
+ border: 1px solid rgba(139, 42, 0, 1);
+ border-radius: 1rem;
+ box-shadow: -0.1rem 0.1rem 0;
+ padding: 1rem;
-#header-wrapper {
- width: 70em;
- display: flex;
- flex-direction: row;
- justify-content: space-between;
- align-content: center;
- align-items: center;
- border: 1px solid rgba(139, 42, 0, 1);
- padding: 1em;
- border-radius: 1em;
- max-height: 50em;
- box-shadow: -0.1em 0.1em 0;
-}
+ #logo-wrapper {
+ display: flex;
+ justify-items: center;
+ justify-content: center;
-#statistics-box {
- display: flex;
- flex-direction: column;
- justify-content: stretch;
- align-content: center;
- align-items: center;
- padding: 1em;
- border: 1px solid rgba(139, 42, 0, 1);
- border-radius: 1em;
- font-size: 1.5em;
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
- color: rgba(139, 42, 0, 1);
- box-shadow: -0.1em 0.1em 0;
-}
+ #logo {
+ display: flex;
+ object-fit: contain;
+ height: 100px;
+ }
+ }
-#statistics-box hr {
- width: 90%;
+ #statistics-box {
+ display: flex;
+ flex-direction: row;
+ color: rgba(139, 42, 0, 1);
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ font-weight: 700;
+ }
+ }
+ }
+ main {
+ #quiz {
+ width: 100%;
- color: rgba(139, 42, 0, 1);
-}
+ #question-box {
+ border: 1px solid rgba(139, 42, 0, 1);
+ box-shadow: -0.1rem 0.1rem 0;
+ border-radius: 1rem;
+ padding: 1rem;
-#logo-wrapper {
- padding: 1em;
-}
+ .buttons {
+ display: flex;
+ flex-direction: row;
+ justify-items: center;
+ justify-content: space-between;
+ /* padding: 1rem; */
+ column-gap: 1rem;
+ gap: 1rem;
-#logo {
- display: flex;
- object-fit: contain;
- max-height: 340px;
+ .article-button {
+ font-size: 1rem;
+ padding: 1rem;
+ border: 1px solid rgba(139, 42, 0, 1);
+ background-color: rgba(221, 233, 212, 1);
+ color: rgba(139, 42, 0, 1);
-}
+ border-radius: 1rem;
+ box-shadow: -0.1rem 0.1rem 0;
+ transition: all ease 0.5s;
+ min-width: 10rem;
-#quiz {
- width: 72em;
- max-width: none;
- display: flex;
- flex-direction: row;
- justify-content: space-between;
- align-content: center;
- align-items: center;
- transition: all ease 0.5s;
-}
+ }
-#new-day-new-word {
- flex-grow: 102;
- display: flex;
- flex-direction: column;
- justify-items: stretch;
- align-items: center;
- column-gap: 0px;
- padding: 0.7em;
- font-size: 3.5em;
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
- color: rgba(139, 42, 0, 1);
-}
+ .article-button:hover {
+ font-weight: bold;
+ border-style: solid;
+ box-shadow: -0.5rem 0.5rem 0;
+ transition: all ease 0.5s;
+ }
+ }
-#new-day {
- margin-block-start: 0em;
- margin-block-end: 0em;
-}
+ .word-box {
-#new-word {
- margin-block-start: 0em;
- margin-block-end: 0em;
- transition: all ease 0.5s;
-}
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ align-content: center;
+ align-items: center;
+ padding: 2rem;
+ margin-block-end: 0;
+ margin-block-start: 0;
-#new-word:hover {
- transition: all ease 0.5s;
- transform: rotate(10deg);
-}
+ #meaning {
+ font-size: 2rem;
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ margin-block-end: 0;
+ margin-block-start: 0;
+ }
-#sorry,
-#super {
- display: flex;
- flex-direction: column;
- justify-items: stretch;
- align-items: center;
- font-size: 4em;
- padding: 1em;
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
- color: rgba(139, 42, 0, 1);
-}
+ #word {
+ font-size: 4rem;
+ font-weight: 700;
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ margin-block-end: 0;
+ margin-block-start: 0;
-#question-box {
- display: flex;
- border: 1px solid rgba(139, 42, 0, 1);
- border-radius: 1em;
- align-content: center;
- align-items: center;
- max-width: none;
- min-width: 50%;
- justify-content: space-evenly;
- box-shadow: -0.1em 0.1em 0;
+ :hover {
-}
+ text-decoration-style: dashed;
+ text-decoration-line: underline;
+ transition: all ease 0.5s;
+ }
+ }
+ }
+ }
-.buttons {
- display: flex;
- flex-direction: column;
- padding: 1em;
- column-gap: 1em;
- gap: 1em;
-}
-.article-button {
- font-size: 1em;
- padding: 1em;
+ }
- border: 1px solid rgba(139, 42, 0, 1);
- background-color: rgba(221, 233, 212, 1);
- color: rgba(139, 42, 0, 1);
+ #new-day-new-word {
+ #x {
+ display: flex;
+ flex-direction: row;
+ justify-items: center;
+ justify-content: space-between;
+ text-align: center;
- border-radius: 1em;
- box-shadow: -0.1em 0.1em 0;
- transition: all ease 0.5s;
- min-width: 10em;
+ #new-day,
+ #new-word {
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
-}
+ padding: 0.7rem;
+ font-size: 3.5rem;
+ color: rgba(139, 42, 0, 1);
+ margin-block-end: 0;
+ margin-block-start: 0;
+ }
-.article-button:hover {
- font-weight: bold;
- border-style: solid;
- box-shadow: -0.5em 0.5em 0;
- transition: all ease 0.5s;
-}
+ }
-.word-box {
- display: flex;
- flex-direction: column;
- justify-content: center;
- align-content: center;
- align-items: center;
- padding: 2em;
- margin-block-end: 0;
- margin-block-start: 0;
-}
+ }
-#word {
- font-size: 4em;
- font-weight: 700;
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
- color: rgba(139, 42, 0, 1);
- margin-block-end: 0;
- margin-block-start: 0;
-}
-#meaning {
- font-size: 2em;
- font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
- color: rgba(139, 42, 0, 1);
- margin-block-end: 0;
- margin-block-start: 0;
+ }
+ }
+
+ footer {}
}
-#word:hover {
- text-decoration-style: dashed;
- text-decoration-line: underline;
- transition: all ease 0.5s;
+
+
+
+@media (min-width: 700px) {
+ body {
+ padding: 0 15rem 15rem;
+ display: flex;
+ justify-content: center;
+ overflow-x: hidden;
+ align-content: center;
+ align-items: center;
+ flex-direction: column;
+ }
+
+ header,
+ main,
+ footer {
+ width: 100%;
+ padding: 0;
+ margin: 0;
+ display: flex;
+ flex-direction: column;
+ gap: 0 20px;
+ /* justify-content: center; */
+ /* align-content: center; */
+ /* align-itrems: center; */
+
+ }
+
+ #header-wrapper {
+ width: 100%;
+ justify-content: space-between;
+ align-items: center;
+ display: flex;
+ padding: 1rem;
+ flex-wrap: wrap;
+ border: 1px solid rgba(139, 42, 0, 1);
+ border-radius: 1rem;
+ box-shadow: -0.1rem 0.1rem 0;
+ }
+
+ #statistics-box {
+ display: flex;
+ flex-direction: column;
+ justify-content: stretch;
+ align-content: center;
+ align-items: center;
+ padding: 1rem;
+ border: 1px solid rgba(139, 42, 0, 1);
+ border-radius: 1rem;
+ font-size: 1.5rem;
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ box-shadow: -0.1rem 0.1rem 0;
+
+
+ hr {
+
+ width: 90%;
+ color: rgba(139, 42, 0, 1);
+ }
+ }
+
+
+
+
+
+ #logo-wrapper {
+ #logo {
+ display: flex;
+ object-fit: contain;
+ max-height: 340px;
+ }
+ }
+
+ #quiz {
+ width: 100%;
+ display: flex;
+ flex-direction: row;
+ justify-content: space-between;
+ align-content: center;
+ align-items: center;
+ transition: all ease 0.5s;
+ margin-top: 1rem;
+ }
+
+ #new-day-new-word {
+ /* background: red; */
+ display: flex;
+ width: 100%;
+ /* flex-direction: column; */
+ /* justify-itrems: stretch; */
+ justify-content: center;
+ align-items: center;
+ text-align: center;
+
+ /* column-gap: 0px; */
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ padding: 0.7rem;
+ font-size: 3.5rem;
+ color: rgba(139, 42, 0, 1);
+ }
+
+ #new-day {
+ margin-block-start: 0rem;
+ margin-block-end: 0rem;
+ }
+
+ #new-word {
+ margin-block-start: 0rem;
+ margin-block-end: 0rem;
+ transition: all ease 0.5s;
+ }
+
+ #new-word:hover {
+ transition: all ease 0.5s;
+ transform: rotate(10deg);
+ }
+
+ #sorry,
+ #super {
+ justify-content: center;
+ align-items: center;
+ text-align: center;
+ font-size: 4rem;
+ width: 100%;
+ /* padding: 1rem; */
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ }
+
+ #question-box {
+ display: flex;
+ border: 1px solid rgba(139, 42, 0, 1);
+ box-shadow: -0.1rem 0.1rem 0;
+ border-radius: 1rem;
+ align-content: center;
+ align-items: center;
+ max-width: none;
+ min-width: 50%;
+ justify-content: space-evenly;
+
+ }
+
+ .buttons {
+ display: flex;
+ flex-direction: column;
+ padding: 1rem;
+ column-gap: 1rem;
+ gap: 1rem;
+ }
+
+ .article-button {
+ font-size: 1rem;
+ padding: 1rem;
+
+ border: 1px solid rgba(139, 42, 0, 1);
+ background-color: rgba(221, 233, 212, 1);
+ color: rgba(139, 42, 0, 1);
+
+ border-radius: 1rem;
+ box-shadow: -0.1rem 0.1rem 0;
+ transition: all ease 0.5s;
+ /* min-width: 10rem; */
+
+ }
+
+ .article-button:hover {
+ font-weight: bold;
+ border-style: solid;
+ box-shadow: -0.5rem 0.5rem 0;
+ transition: all ease 0.5s;
+ }
+
+ .word-box {
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ align-content: center;
+ align-items: center;
+ padding: 2rem;
+ margin-block-end: 0;
+ margin-block-start: 0;
+
+ #meaning {
+ font-size: 2rem;
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ margin-block-end: 0;
+ margin-block-start: 0;
+ }
+
+ #word {
+ font-size: 4rem;
+ font-weight: 700;
+ font-family: systrem-ui, -apple-systrem, BlinkMacSystremFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ color: rgba(139, 42, 0, 1);
+ margin-block-end: 0;
+ margin-block-start: 0;
+
+ :hover {
+
+ text-decoration-style: dashed;
+ text-decoration-line: underline;
+ transition: all ease 0.5s;
+ }
+ }
+ }
+
+
+
}=
\ No newline at end of file
A src/bin/windows-setup/main.rs => src/bin/windows-setup/main.rs +75 -0
@@ 0,0 1,75 @@
+use std::{
+ io::Write as _,
+ path::{Path, PathBuf},
+};
+
+use fs_extra::{
+ dir::create_all,
+ file::{copy, CopyOptions},
+};
+use miette::{IntoDiagnostic as _, Result};
+
+const PROGRAM_FILES_LOCATION: &str = "C:/Program Files/ddd/";
+const STARTUP_FOLDER_LOCATION: &str =
+ "C:/ProgramData/Microsoft/Windows/Start Menu/Programs/StartUp";
+
+fn main() -> Result<()> {
+ if !std::env::consts::OS.eq("windows") {
+ return Err(miette::miette!(
+ "This installation file is not designed for anything but microsoft Windows."
+ ));
+ }
+
+ let exe_location = std::env::current_exe().into_diagnostic()?;
+ println!(
+ "current exe location is: {}",
+ exe_location.to_string_lossy()
+ );
+
+ let exe_dir = exe_location
+ .parent()
+ .ok_or(miette::miette!("where is this exe located at?!"))?;
+
+ let cwd_location = std::env::current_dir().into_diagnostic()?;
+ println!(
+ "current working directory is: {}",
+ cwd_location.to_string_lossy()
+ );
+
+ let homepage_path = setup_program_files(exe_dir)?;
+
+ setup_startup(&homepage_path)?;
+
+ Ok(())
+}
+
+fn setup_program_files(exe_dir: &Path) -> Result<PathBuf> {
+ let program_files_ddd = PathBuf::from(PROGRAM_FILES_LOCATION);
+ let homepage_path = program_files_ddd.join("homepage.exe");
+
+ create_all(&program_files_ddd, false).into_diagnostic()?;
+ copy(
+ exe_dir.join("homepage.exe"),
+ &homepage_path,
+ &CopyOptions {
+ overwrite: true,
+ skip_exist: true,
+ buffer_size: 1024,
+ },
+ )
+ .into_diagnostic()?;
+ Ok(homepage_path)
+}
+
+fn setup_startup(home_page_path: &Path) -> Result<()> {
+ let startup_path = PathBuf::from(STARTUP_FOLDER_LOCATION);
+ let homepage = startup_path.join("homepage.bat");
+ let mut startup_file = std::fs::File::create(homepage).into_diagnostic()?;
+ let startup_file_content = format!(
+ "start \"homepage\" \"{}\"",
+ home_page_path.to_string_lossy()
+ );
+ startup_file
+ .write_all(startup_file_content.as_bytes())
+ .into_diagnostic()
+}
A src/cases.rs => src/cases.rs +90 -0
@@ 0,0 1,90 @@
+use std::fmt::Display;
+
+#[derive(
+ Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq, Hash, serde::Deserialize, serde::Serialize,
+)]
+#[serde(rename_all = "lowercase")]
+pub enum ArticleWords {
+ Der,
+ Die,
+ Das,
+ Dem,
+ Den,
+ Des,
+}
+
+#[derive(
+ Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq, Hash, serde::Deserialize, serde::Serialize,
+)]
+#[serde(rename_all = "lowercase")]
+pub enum Case {
+ Nominative,
+ Accusative,
+ Dative,
+ Genitive,
+}
+
+#[derive(
+ Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq, Hash, serde::Deserialize, serde::Serialize,
+)]
+#[serde(rename_all = "lowercase")]
+pub enum Gender {
+ Masculine,
+ Feminine,
+ Neuter,
+ Plural,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Deserialize, serde::Serialize)]
+pub struct Articles {
+ pub article: ArticleWords,
+ pub gender: Gender,
+ pub case: Case,
+}
+
+impl Articles {
+ #[must_use]
+ pub const fn new(article: ArticleWords, gender: Gender, case: Case) -> Self {
+ Self {
+ article,
+ gender,
+ case,
+ }
+ }
+}
+
+impl Display for ArticleWords {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "{}",
+ match self {
+ ArticleWords::Der => "der",
+ ArticleWords::Die => "die",
+ ArticleWords::Das => "das",
+ ArticleWords::Dem => "dem",
+ ArticleWords::Den => "den",
+ ArticleWords::Des => "des",
+ }
+ )
+ }
+}
+
+pub const ALL_ARTICLES: [Articles; 16] = [
+ Articles::new(ArticleWords::Der, Gender::Masculine, Case::Nominative),
+ Articles::new(ArticleWords::Die, Gender::Feminine, Case::Nominative),
+ Articles::new(ArticleWords::Das, Gender::Neuter, Case::Nominative),
+ Articles::new(ArticleWords::Die, Gender::Plural, Case::Nominative),
+ Articles::new(ArticleWords::Den, Gender::Masculine, Case::Accusative),
+ Articles::new(ArticleWords::Die, Gender::Feminine, Case::Accusative),
+ Articles::new(ArticleWords::Das, Gender::Neuter, Case::Accusative),
+ Articles::new(ArticleWords::Die, Gender::Plural, Case::Accusative),
+ Articles::new(ArticleWords::Dem, Gender::Masculine, Case::Dative),
+ Articles::new(ArticleWords::Der, Gender::Feminine, Case::Dative),
+ Articles::new(ArticleWords::Dem, Gender::Neuter, Case::Dative),
+ Articles::new(ArticleWords::Den, Gender::Plural, Case::Dative),
+ Articles::new(ArticleWords::Des, Gender::Masculine, Case::Genitive),
+ Articles::new(ArticleWords::Der, Gender::Feminine, Case::Genitive),
+ Articles::new(ArticleWords::Des, Gender::Neuter, Case::Genitive),
+ Articles::new(ArticleWords::Der, Gender::Plural, Case::Genitive),
+];
A src/db.rs => src/db.rs +0 -0
M src/history.rs => src/history.rs +27 -25
@@ 2,16 2,16 @@ use std::collections::HashMap;
use time::{Duration, OffsetDateTime};
-use crate::{attempt::Attempt, nouns::Noun};
+use crate::{noun_attempt::NounAttemptSaved, nouns::SavedNoun};
pub struct History {
- pub nouns: Vec<Noun>,
- pub attempts: Vec<Attempt>,
+ pub nouns: Vec<SavedNoun>,
+ pub attempts: Vec<NounAttemptSaved>,
}
impl History {
#[must_use]
- pub fn attempt_per_noun(&self) -> Vec<(Noun, Vec<Attempt>)> {
+ pub fn attempt_per_noun(&self) -> Vec<(SavedNoun, Vec<NounAttemptSaved>)> {
let n: Vec<_> = self
.nouns
.iter()
@@ 20,7 20,7 @@ impl History {
.attempts
.iter()
.filter_map(|s| {
- if s.for_word == n.id {
+ if s.noun_attempt.for_word == n.id {
Some(s.clone())
} else {
None
@@ 34,33 34,35 @@ impl History {
}
#[must_use]
- pub fn confidence_map(&self) -> Vec<(Noun, u8)> {
+ pub fn confidence_map(&self) -> Vec<(SavedNoun, u8)> {
self.attempt_per_noun()
.iter()
.map(|s| {
- let confidence = s.1.iter().fold(0u8, |confidence, a| match a.what_happened {
- crate::attempt::Conclusion::Success => match confidence {
- u8::MAX => confidence,
- _ => confidence + 1,
- },
- crate::attempt::Conclusion::WrongArticle(_) => match confidence {
- u8::MIN => confidence,
- _ => confidence - 1,
- },
- });
+ let confidence =
+ s.1.iter()
+ .fold(0u8, |confidence, a| match a.noun_attempt.what_happened {
+ crate::noun_attempt::Conclusion::Success => match confidence {
+ u8::MAX => confidence,
+ _ => confidence + 1,
+ },
+ crate::noun_attempt::Conclusion::WrongArticle(_) => match confidence {
+ u8::MIN => confidence,
+ _ => confidence - 1,
+ },
+ });
(s.0.clone(), confidence)
})
.collect()
}
#[must_use]
- pub fn grouped_confidence_map(&self) -> HashMap<String, Vec<(Noun, u8)>> {
+ pub fn grouped_confidence_map(&self) -> HashMap<String, Vec<(SavedNoun, u8)>> {
self.confidence_map().into_iter().fold(
HashMap::new(),
- |mut acc: HashMap<String, Vec<(Noun, u8)>>, (n, c)| {
- match acc.get_mut(&n.group) {
+ |mut acc: HashMap<String, Vec<(SavedNoun, u8)>>, (n, c)| {
+ match acc.get_mut(&n.noun.group) {
Some(cu) => cu.push((n, c)),
- None => _ = acc.insert(n.group.clone(), vec![(n, c)]),
+ None => _ = acc.insert(n.noun.group.clone(), vec![(n, c)]),
};
acc
},
@@ 68,7 70,7 @@ impl History {
}
#[must_use]
- pub fn next(&self) -> Option<(Noun, u8)> {
+ pub fn next(&self) -> Option<(SavedNoun, u8)> {
let mut sets = self.confidence_map();
sets.sort_unstable_by_key(|s| s.1);
@@ 77,7 79,7 @@ impl History {
}
#[must_use]
- pub fn next_group(&self, threshold: u8) -> Option<Vec<(Noun, u8)>> {
+ pub fn next_group(&self, threshold: u8) -> Option<Vec<(SavedNoun, u8)>> {
let sets = self.grouped_confidence_map();
if sets.is_empty() {
return None;
@@ 98,7 100,7 @@ impl History {
}
#[must_use]
- pub fn next_with_group(&self, threshold: u8) -> Option<(Noun, u8)> {
+ pub fn next_with_group(&self, threshold: u8) -> Option<(SavedNoun, u8)> {
let sets = self.grouped_confidence_map();
if sets.is_empty() {
return None;
@@ 140,8 142,8 @@ impl History {
.iter()
.filter(|a| a.at.ge(&from_time) && a.at.lt(&to_time))
.fold(vec![], |mut accu, att| {
- if !accu.contains(&att.for_word) {
- accu.push(att.for_word);
+ if !accu.contains(&att.noun_attempt.for_word) {
+ accu.push(att.noun_attempt.for_word.clone());
}
accu
});
A src/id.rs => src/id.rs +106 -0
@@ 0,0 1,106 @@
+use std::fmt::Display;
+
+const ALPHABET: [char; 36] = [
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
+ 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
+];
+
+const LENGHT: usize = 5;
+
+#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize, Hash)]
+pub struct ID(String);
+
+impl AsRef<String> for ID {
+ fn as_ref(&self) -> &String {
+ &self.0
+ }
+}
+
+impl Display for ID {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+impl TryFrom<&str> for ID {
+ type Error = miette::Report;
+
+ fn try_from(id: &str) -> Result<Self, Self::Error> {
+ if id.len() != LENGHT {
+ return Err(miette::miette!(
+ "the lenght of id should be 5 character, instead it was {}. the id was {id}.",
+ id.len()
+ ));
+ }
+
+ id.chars().try_for_each(|ch| {
+ if !ALPHABET.contains(&ch) {
+ return Err(miette::miette!(
+ "The id is in valid. It should not have this character: {ch}, the full id {id}",
+ ));
+ }
+ Ok(())
+ })?;
+ Ok(Self(id.to_string()))
+ }
+}
+
+impl TryFrom<String> for ID {
+ type Error = miette::Report;
+
+ fn try_from(id: String) -> Result<Self, Self::Error> {
+ let id_s = ID::try_from(id.as_str())?;
+ Ok(id_s)
+ }
+}
+
+impl ID {
+ pub fn new() -> Self {
+ let id = nanoid::nanoid!(LENGHT, &ALPHABET);
+ Self(id)
+ }
+}
+
+impl Default for ID {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use rstest::rstest;
+
+ use super::*;
+
+ #[test]
+ fn new_id_test() {
+ for i in 0..=1 {
+ let ids: Vec<_> = (0..5_000).map(|_| ID::new()).collect();
+
+ let mut ids_dedup = ids.clone();
+
+ ids_dedup.dedup();
+
+ assert_eq!(ids.len(), ids_dedup.len(), "{i}");
+ }
+ }
+
+ #[rstest]
+ #[case::no_letter("", false)]
+ #[case::short_lenght("hell", false)]
+ #[case::long_lenght("hell00", false)]
+ #[case::invalid_characters1("Hell0", false)]
+ #[case::invalid_characters2("@ell0", false)]
+ #[case::invalid_characters3("_ell0", false)]
+ #[case::valid("hell0", true)]
+ #[case::valid("hello", true)]
+ #[case::valid("12384", true)]
+ fn validate_id(#[case] input: &str, #[case] should_be_valid: bool) {
+ if should_be_valid {
+ assert!(ID::try_from(input).is_ok());
+ } else {
+ assert!(ID::try_from(input).is_err());
+ }
+ }
+}
M src/lib.rs => src/lib.rs +5 -1
@@ 1,4 1,8 @@
-pub mod attempt;
+pub mod cases;
pub mod history;
+pub mod id;
+pub mod noun_attempt;
pub mod nouns;
+pub mod object;
+pub mod sentences;
pub mod storage;
R src/attempt.rs => src/noun_attempt.rs +14 -9
@@ 1,12 1,17 @@
use miette::Result;
-use crate::nouns::Article;
+use crate::{id::ID, nouns::Article};
#[derive(Debug, Clone)]
-pub struct Attempt {
- pub id: i128,
+pub struct NounAttemptSaved {
+ pub id: ID,
pub at: time::OffsetDateTime,
- pub for_word: i128,
+ pub noun_attempt: NounAttempt,
+}
+
+#[derive(Debug, Clone)]
+pub struct NounAttempt {
+ pub for_word: ID,
pub what_happened: Conclusion,
}
@@ 23,14 28,14 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if the IO fails or the item already
/// Exists.
- fn save_attempt(self, attempt: Attempt) -> Result<()>;
+ fn save_noun_attempt(self, attempt: NounAttemptSaved) -> Result<()>;
/// Returns all the attempts
///
/// # Errors
///
/// When IO failed.
- fn all_attempts(self) -> Result<Vec<Attempt>>;
+ fn all_noun_attempts(self) -> Result<Vec<NounAttemptSaved>>;
/// Find an attempt by its ID.
///
@@ 38,7 43,7 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if the item was not found or there
/// was an IO failure .
- fn find_attempt_by_id(self, id: i128) -> Result<Attempt>;
+ fn find_noun_attempt_by_id(self, id: ID) -> Result<NounAttemptSaved>;
/// Finds the attempts for an specific noun.
///
@@ 46,7 51,7 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if the item was not found or there
/// was an IO failure .
- fn find_attempt_by_noun_id(self, id: i128) -> Result<Vec<Attempt>>;
+ fn find_noun_attempt_by_noun_id(self, id: ID) -> Result<Vec<NounAttemptSaved>>;
/// Deletes an attempt by its ID.
///
@@ 54,5 59,5 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if the item was not found or there
/// was an IO failure .
- fn delete_attempt_by_id(self, id: i128) -> Result<()>;
+ fn delete_noun_attempt_by_id(self, id: ID) -> Result<()>;
}
M src/nouns.rs => src/nouns.rs +25 -7
@@ 1,16 1,34 @@
use std::fmt::Display;
use miette::Result;
+use serde::{Deserialize, Serialize};
-#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+use crate::id::ID;
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub struct Noun {
- pub id: i128,
pub articles: Vec<Article>,
pub word: String,
pub meaning: String,
pub group: String,
}
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct SavedNoun {
+ pub id: ID,
+ pub at: time::OffsetDateTime,
+ pub noun: Noun,
+}
+impl From<Noun> for SavedNoun {
+ fn from(noun: Noun) -> Self {
+ SavedNoun {
+ id: ID::new(),
+ at: time::OffsetDateTime::now_utc(),
+ noun,
+ }
+ }
+}
+
#[derive(
Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq, Hash, serde::Deserialize, serde::Serialize,
)]
@@ 42,14 60,14 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if the noun with the same ID already
/// exist or there is an IO failure.
- fn save_noun(self, noun: Noun) -> Result<()>;
+ fn save_noun(self, noun: SavedNoun) -> Result<()>;
/// Returns all the nouns
///
/// # Errors
///
/// This function will return an error if there is an IO Failure.
- fn all_nouns(self) -> Result<Vec<Noun>>;
+ fn all_nouns(self) -> Result<Vec<SavedNoun>>;
/// Find noun by specific ID
///
@@ 57,7 75,7 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if a noun by that ID does not exist
/// or there is an IO failure.
- fn find_noun_by_id(self, id: i128) -> Result<Noun>;
+ fn find_noun_by_id(self, id: ID) -> Result<SavedNoun>;
/// Delete the noun by its ID
///
@@ 65,7 83,7 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
///
/// This function will return an error if a noun by that ID does not exist
/// or there is an IO failure.
- fn delete_noun_by_id(self, id: i128) -> Result<()>;
+ fn delete_noun_by_id(self, id: ID) -> Result<()>;
/// Edit noun
///
@@ 75,7 93,7 @@ pub trait Repo: Clone + Send + Sync + std::fmt::Debug {
/// or there is an IO failure.
fn edit_noun_by_id(
self,
- id: i128,
+ id: ID,
article: Option<Vec<Article>>,
word: Option<String>,
meaning: Option<String>,
A src/object.rs => src/object.rs +1 -0
A src/sentences.rs => src/sentences.rs +43 -0
@@ 0,0 1,43 @@
+use miette::Result;
+
+use crate::{cases::Articles, id::ID};
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Sentence {
+ pub before: String,
+ pub after: String,
+ pub article_cases: Articles,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct SavedSentence {
+ pub id: ID,
+ pub added: time::OffsetDateTime,
+ pub sentence: Sentence,
+}
+
+impl From<Sentence> for SavedSentence {
+ fn from(sentence: Sentence) -> Self {
+ SavedSentence {
+ id: ID::new(),
+ added: time::OffsetDateTime::now_utc(),
+ sentence,
+ }
+ }
+}
+
+pub trait Repo {
+ /// Saves a sentence.
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if there are IO errors.
+ fn save_sentence(self, sentence: SavedSentence) -> Result<()>;
+
+ /// Returns all the sentences found.
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if there was an IO error.
+ fn all_sentences(self) -> Result<Vec<SavedSentence>>;
+}
M src/storage.rs => src/storage.rs +39 -407
@@ 1,38 1,46 @@
-use std::{path::PathBuf, sync::Arc};
+pub mod nouns_attempts_repo;
+pub mod nouns_repo;
+pub mod sentence_attempts_repo;
+pub mod sentence_repo;
+use std::path::PathBuf;
-use miette::{miette, Context as _, IntoDiagnostic as _, Result};
+use miette::{miette, Context as _, IntoDiagnostic as _, Report, Result};
use serde::{Deserialize, Serialize};
-use tracing::debug;
+use tracing::{info, instrument};
-use crate::{
- attempt::{self, Attempt, Conclusion},
- nouns::{self, Article, Noun},
-};
+use crate::nouns::Article;
#[derive(Debug, Clone)]
-pub struct Storage(PathBuf);
+pub struct Storage(pub PathBuf);
impl Storage {
- fn attempts_path(&self) -> PathBuf {
- self.0.join("attempts")
+ fn nouns_attempts_path(&self) -> PathBuf {
+ self.0.join("nouns_attempts")
}
- fn words_path(&self) -> PathBuf {
- self.0.join("words")
+ fn nouns_path(&self) -> PathBuf {
+ self.0.join("nouns")
+ }
+ fn sentence_path(&self) -> PathBuf {
+ self.0.join("sentences")
+ }
+ #[allow(unused)]
+ fn sentence_attempts_path(&self) -> PathBuf {
+ self.0.join("sentence_attempts")
}
}
impl TryFrom<PathBuf> for Storage {
- type Error = miette::Report;
-
+ type Error = Report;
+ #[instrument]
fn try_from(at: PathBuf) -> Result<Self> {
if !at
.try_exists()
.into_diagnostic()
.wrap_err_with(|| "somethig good!")?
{
- println!(
- "{} does not exist yet. I will create a directory, there.",
- at.to_string_lossy()
+ info!(
+ directory = at.to_string_lossy().into_owned(),
+ "storage directory does not exist yet. creating it right now."
);
fs_extra::dir::create_all(&at, false).into_diagnostic()?;
};
@@ 45,206 53,30 @@ impl TryFrom<PathBuf> for Storage {
};
let storage = Storage(at);
- if !storage.words_path().try_exists().into_diagnostic()? {
- println!(
- "{} does not exist yet. I will create a directory, there.",
- storage.words_path().to_string_lossy()
+ if !storage.nouns_path().try_exists().into_diagnostic()? {
+ info!(
+ directory = storage.nouns_path().to_string_lossy().into_owned(),
+ "storage nouns directory does not exist yet. creating it right now."
);
- fs_extra::dir::create_all(storage.words_path(), false).into_diagnostic()?;
- }
-
- if !storage.attempts_path().try_exists().into_diagnostic()? {
- println!(
- "{} does not exist yet. I will create a directory, there.",
- storage.attempts_path().to_string_lossy()
- );
- fs_extra::dir::create_all(storage.attempts_path(), false).into_diagnostic()?;
- }
-
- Ok(storage)
- }
-}
-impl nouns::Repo for Storage {
- fn save_noun(self, noun: Noun) -> Result<()> {
- (&self).save_noun(noun)
- }
-
- fn all_nouns(self) -> Result<Vec<Noun>> {
- (&self).all_nouns()
- }
-
- fn find_noun_by_id(self, id: i128) -> Result<Noun> {
- (&self).find_noun_by_id(id)
- }
-
- fn delete_noun_by_id(self, id: i128) -> Result<()> {
- (&self).delete_noun_by_id(id)
- }
-
- fn edit_noun_by_id(
- self,
- id: i128,
- article: Option<Vec<Article>>,
- word: Option<String>,
- meaning: Option<String>,
- group: Option<String>,
- ) -> Result<()> {
- (&self).edit_noun_by_id(id, article, word, meaning, group)
- }
-}
-impl nouns::Repo for Arc<Storage> {
- fn save_noun(self, noun: Noun) -> Result<()> {
- self.as_ref().save_noun(noun)
- }
-
- fn all_nouns(self) -> Result<Vec<Noun>> {
- self.as_ref().all_nouns()
- }
-
- fn find_noun_by_id(self, id: i128) -> Result<Noun> {
- self.as_ref().find_noun_by_id(id)
- }
-
- fn delete_noun_by_id(self, id: i128) -> Result<()> {
- self.as_ref().delete_noun_by_id(id)
- }
-
- fn edit_noun_by_id(
- self,
- id: i128,
- article: Option<Vec<Article>>,
- word: Option<String>,
- meaning: Option<String>,
- group: Option<String>,
- ) -> Result<()> {
- self.as_ref()
- .edit_noun_by_id(id, article, word, meaning, group)
- }
-}
-
-impl nouns::Repo for &Storage {
- fn save_noun(self, noun: nouns::Noun) -> Result<()> {
- let file_name = format!("{}.json", noun.id);
-
- let file_path = self.words_path().join(&file_name);
-
- if file_path.try_exists().into_diagnostic()? {
- return Err(miette!(format!("a file with at {} exist", &file_name)));
+ fs_extra::dir::create_all(storage.nouns_path(), false).into_diagnostic()?;
}
- let json_content = serde_json::to_string_pretty(&NounFile::from(noun)).into_diagnostic()?;
-
- fs_extra::file::write_all(file_path, &json_content).into_diagnostic()?;
-
- Ok(())
- }
-
- fn all_nouns(self) -> Result<Vec<Noun>> {
- Ok(std::fs::read_dir(self.words_path())
+ if !storage
+ .nouns_attempts_path()
+ .try_exists()
.into_diagnostic()?
- .filter_map(std::result::Result::ok)
- .filter_map(|e| {
- let at = e.path();
- match at.extension().map(|ext| ext.to_str()) {
- Some(Some("json")) => Some(at),
-
- _ => None,
- }
- })
- .map(std::fs::read)
- .filter_map(core::result::Result::ok)
- .map(|content| serde_json::from_slice::<NounFile>(&content))
- .filter_map(core::result::Result::ok)
- .map(Noun::from)
- .collect::<Vec<_>>())
- }
-
- fn find_noun_by_id(self, id: i128) -> Result<nouns::Noun> {
- let file = self.words_path().join(format!("{id}.json"));
- if !file.try_exists().into_diagnostic()? {
- return Err(miette!("could not find a word with that id."));
- }
- let content = std::fs::read(file).into_diagnostic()?;
- let noun = Noun::from(serde_json::from_slice::<NounFile>(&content).into_diagnostic()?);
-
- Ok(noun)
- }
-
- fn delete_noun_by_id(self, id: i128) -> Result<()> {
- let file = self.words_path().join(format!("{id}.json"));
- if !file.try_exists().into_diagnostic()? {
- return Err(miette!("could not find a word with that id."));
- }
- let content = std::fs::read(file.clone()).into_diagnostic()?;
- let noun = Noun::from(serde_json::from_slice::<NounFile>(&content).into_diagnostic()?);
- tracing::debug!("deleting: {noun:#?}");
-
- std::fs::remove_file(&file).into_diagnostic()?;
- Ok(())
- }
-
- fn edit_noun_by_id(
- self,
- id: i128,
- article: Option<Vec<Article>>,
- word: Option<String>,
- meaning: Option<String>,
- group: Option<String>,
- ) -> Result<()> {
- let mut n = self.clone().find_noun_by_id(id)?;
-
- if let Some(x) = article {
- debug!(
- "editing article. previous: {:#?}, current: {:#?}",
- n.articles, x
- );
- n.articles = x;
- };
-
- if let Some(x) = word {
- debug!("editing word. previous: {:#?}, current: {:#?}", n.word, x);
- n.word = x;
- };
-
- if let Some(x) = meaning {
- debug!(
- "editing meaning. previous: {:#?}, current: {:#?}",
- n.meaning, x
+ {
+ info!(
+ directory = storage.nouns_attempts_path().to_string_lossy().into_owned(),
+ "storage nouns attempts directory does not exist yet. creating it right now."
);
- n.meaning = x;
- };
- if let Some(x) = group {
- debug!("editing group. previous: {:#?}, current: {:#?}", n.group, x);
- n.group = x;
- };
-
- let file = self.words_path().join(format!("{id}.json"));
-
- if !file.try_exists().into_diagnostic()? {
- return Err(miette!("could not find a word with that id."));
+ fs_extra::dir::create_all(storage.nouns_attempts_path(), false).into_diagnostic()?;
}
- let json_content = serde_json::to_string_pretty(&NounFile::from(n)).into_diagnostic()?;
-
- fs_extra::file::write_all(file, &json_content).into_diagnostic()?;
- Ok(())
+ Ok(storage)
}
}
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum NounFile {
- NounFileV1(NounFileV1),
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct NounFileV1 {
- pub id: i128,
- pub article: Vec<ArticleFileV1>,
- pub word: String,
- pub meaning: String,
- pub group: String,
-}
-
#[derive(Debug, Clone, Copy, clap::ValueEnum, PartialEq, Eq, Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum ArticleFileV1 {
@@ 271,206 103,6 @@ impl From<Article> for ArticleFileV1 {
}
}
}
-impl From<Noun> for NounFile {
- fn from(value: Noun) -> Self {
- NounFile::NounFileV1(NounFileV1 {
- id: value.id,
- article: value
- .articles
- .into_iter()
- .map(ArticleFileV1::from)
- .collect(),
- word: value.word,
- meaning: value.meaning,
- group: value.group,
- })
- }
-}
-impl From<NounFile> for Noun {
- fn from(value: NounFile) -> Self {
- match value {
- NounFile::NounFileV1(n) => Noun {
- id: n.id,
- articles: n.article.into_iter().map(Article::from).collect(),
- word: n.word,
- meaning: n.meaning,
- group: n.group,
- },
- }
- }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct AttemptFileV1 {
- id: i128,
- at: time::OffsetDateTime,
- for_word: i128,
- what_happened: AttemptResultFileV1,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum AttemptResultFileV1 {
- Success,
- WrongArticle(ArticleFileV1),
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum AttemptFile {
- V1(AttemptFileV1),
-}
-
-impl From<AttemptFile> for Attempt {
- fn from(a: AttemptFile) -> Self {
- match a {
- AttemptFile::V1(att) => Self {
- id: att.id,
- at: att.at,
- for_word: att.for_word,
- what_happened: att.what_happened.into(),
- },
- }
- }
-}
-
-impl From<AttemptResultFileV1> for Conclusion {
- fn from(ar: AttemptResultFileV1) -> Self {
- match ar {
- AttemptResultFileV1::Success => Conclusion::Success,
- AttemptResultFileV1::WrongArticle(art) => Conclusion::WrongArticle(art.into()),
- }
- }
-}
-
-impl From<Attempt> for AttemptFile {
- fn from(a: Attempt) -> Self {
- Self::V1(AttemptFileV1 {
- id: a.id,
- at: a.at,
- for_word: a.for_word,
- what_happened: a.what_happened.into(),
- })
- }
-}
-
-impl From<Conclusion> for AttemptResultFileV1 {
- fn from(ar: Conclusion) -> Self {
- match ar {
- Conclusion::Success => AttemptResultFileV1::Success,
- Conclusion::WrongArticle(art) => AttemptResultFileV1::WrongArticle(art.into()),
- }
- }
-}
-
-impl attempt::Repo for Arc<Storage> {
- fn save_attempt(self, attempt: Attempt) -> Result<()> {
- self.as_ref().save_attempt(attempt)
- }
-
- fn all_attempts(self) -> Result<Vec<Attempt>> {
- self.as_ref().all_attempts()
- }
-
- fn find_attempt_by_id(self, id: i128) -> Result<Attempt> {
- self.as_ref().find_attempt_by_id(id)
- }
-
- fn find_attempt_by_noun_id(self, id: i128) -> Result<Vec<Attempt>> {
- self.as_ref().find_attempt_by_noun_id(id)
- }
-
- fn delete_attempt_by_id(self, id: i128) -> Result<()> {
- self.as_ref().delete_attempt_by_id(id)
- }
-}
-
-impl attempt::Repo for Storage {
- fn save_attempt(self, attempt: Attempt) -> Result<()> {
- (&self).save_attempt(attempt)
- }
-
- fn all_attempts(self) -> Result<Vec<Attempt>> {
- (&self).all_attempts()
- }
-
- fn find_attempt_by_id(self, id: i128) -> Result<Attempt> {
- (&self).find_attempt_by_id(id)
- }
-
- fn find_attempt_by_noun_id(self, id: i128) -> Result<Vec<Attempt>> {
- (&self).find_attempt_by_noun_id(id)
- }
-
- fn delete_attempt_by_id(self, id: i128) -> Result<()> {
- (&self).delete_attempt_by_id(id)
- }
-}
-impl attempt::Repo for &Storage {
- fn save_attempt(self, a: Attempt) -> Result<()> {
- let file_name = format!("{}.json", a.id);
-
- let file_path = self.attempts_path().join(&file_name);
-
- if file_path.try_exists().into_diagnostic()? {
- return Err(miette!("a file with at {} exist", &file_name));
- }
-
- let json_content = serde_json::to_string_pretty(&AttemptFile::from(a)).into_diagnostic()?;
-
- fs_extra::file::write_all(file_path, &json_content).into_diagnostic()?;
-
- Ok(())
- }
-
- fn all_attempts(self) -> Result<Vec<Attempt>> {
- Ok(std::fs::read_dir(self.attempts_path())
- .into_diagnostic()?
- .filter_map(std::result::Result::ok)
- .filter_map(|e| {
- let at = e.path();
- match at.extension().map(|ext| ext.to_str()) {
- Some(Some("json")) => Some(at),
-
- _ => None,
- }
- })
- .map(std::fs::read)
- .filter_map(core::result::Result::ok)
- .map(|content| serde_json::from_slice::<AttemptFile>(&content))
- .filter_map(core::result::Result::ok)
- .map(Attempt::from)
- .collect::<Vec<_>>())
- }
-
- fn find_attempt_by_id(self, id: i128) -> Result<Attempt> {
- let file = self.attempts_path().join(format!("{id}.json"));
- if !file.try_exists().into_diagnostic()? {
- return Err(miette!("could not find an attempt with that id."));
- }
- let content = std::fs::read(file).into_diagnostic()?;
- let a = Attempt::from(serde_json::from_slice::<AttemptFile>(&content).into_diagnostic()?);
-
- Ok(a)
- }
-
- fn find_attempt_by_noun_id(self, id: i128) -> Result<Vec<Attempt>> {
- let a = self.all_attempts()?;
- let s = a.into_iter().filter(|t| t.for_word.eq(&id)).collect();
- Ok(s)
- }
-
- fn delete_attempt_by_id(self, id: i128) -> Result<()> {
- let file = self.attempts_path().join(format!("{id}.json"));
- if !file.try_exists().into_diagnostic()? {
- return Err(miette!("could not find a attempt with that id."));
- }
- let content = std::fs::read(file.clone()).into_diagnostic()?;
- let a = Attempt::from(serde_json::from_slice::<AttemptFile>(&content).into_diagnostic()?);
- tracing::debug!("deleting: {a:#?}");
-
- std::fs::remove_file(&file).into_diagnostic()?;
- Ok(())
- }
-}
#[cfg(test)]
mod tests {
A src/storage/nouns_attempts_repo.rs => src/storage/nouns_attempts_repo.rs +210 -0
@@ 0,0 1,210 @@
+use std::sync::Arc;
+
+use miette::{miette, IntoDiagnostic as _, Result};
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ id::ID,
+ noun_attempt::{self, Conclusion, NounAttempt, NounAttemptSaved},
+};
+
+use super::{ArticleFileV1, Storage};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct NounAttemptFileV2 {
+ pub id: ID,
+ pub at: time::OffsetDateTime,
+ pub for_word: ID,
+ pub what_happened: NounAttemptResultFileV1,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct NounAttemptFileV1 {
+ pub id: i128,
+ pub at: time::OffsetDateTime,
+ pub for_word: i128,
+ pub what_happened: NounAttemptResultFileV1,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub enum NounAttemptResultFileV1 {
+ Success,
+ WrongArticle(ArticleFileV1),
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub enum NounAttemptFile {
+ V1(NounAttemptFileV1),
+ V2(NounAttemptFileV2),
+}
+
+impl From<NounAttemptFile> for NounAttemptSaved {
+ fn from(a: NounAttemptFile) -> Self {
+ match a {
+ NounAttemptFile::V1(att) => Self {
+ id: ID::new(),
+ at: att.at,
+ noun_attempt: NounAttempt {
+ for_word: ID::new(), //TODO
+ what_happened: att.what_happened.into(),
+ },
+ },
+ NounAttemptFile::V2(att) => Self {
+ id: att.id,
+ at: att.at,
+ noun_attempt: NounAttempt {
+ for_word: att.for_word,
+ what_happened: att.what_happened.into(),
+ },
+ },
+ }
+ }
+}
+
+impl From<NounAttemptResultFileV1> for Conclusion {
+ fn from(ar: NounAttemptResultFileV1) -> Self {
+ match ar {
+ NounAttemptResultFileV1::Success => Conclusion::Success,
+ NounAttemptResultFileV1::WrongArticle(art) => Conclusion::WrongArticle(art.into()),
+ }
+ }
+}
+
+impl From<NounAttemptSaved> for NounAttemptFile {
+ fn from(a: NounAttemptSaved) -> Self {
+ Self::V2(NounAttemptFileV2 {
+ id: a.id,
+ at: a.at,
+ for_word: a.noun_attempt.for_word,
+ what_happened: a.noun_attempt.what_happened.into(),
+ })
+ }
+}
+
+impl From<Conclusion> for NounAttemptResultFileV1 {
+ fn from(ar: Conclusion) -> Self {
+ match ar {
+ Conclusion::Success => NounAttemptResultFileV1::Success,
+ Conclusion::WrongArticle(art) => NounAttemptResultFileV1::WrongArticle(art.into()),
+ }
+ }
+}
+
+impl noun_attempt::Repo for Arc<Storage> {
+ fn save_noun_attempt(self, attempt: NounAttemptSaved) -> Result<()> {
+ self.as_ref().save_noun_attempt(attempt)
+ }
+
+ fn all_noun_attempts(self) -> Result<Vec<NounAttemptSaved>> {
+ self.as_ref().all_noun_attempts()
+ }
+
+ fn find_noun_attempt_by_id(self, id: ID) -> Result<NounAttemptSaved> {
+ self.as_ref().find_noun_attempt_by_id(id)
+ }
+
+ fn find_noun_attempt_by_noun_id(self, id: ID) -> Result<Vec<NounAttemptSaved>> {
+ self.as_ref().find_noun_attempt_by_noun_id(id)
+ }
+
+ fn delete_noun_attempt_by_id(self, id: ID) -> Result<()> {
+ self.as_ref().delete_noun_attempt_by_id(id)
+ }
+}
+
+impl noun_attempt::Repo for Storage {
+ fn save_noun_attempt(self, attempt: NounAttemptSaved) -> Result<()> {
+ (&self).save_noun_attempt(attempt)
+ }
+
+ fn all_noun_attempts(self) -> Result<Vec<NounAttemptSaved>> {
+ (&self).all_noun_attempts()
+ }
+
+ fn find_noun_attempt_by_id(self, id: ID) -> Result<NounAttemptSaved> {
+ (&self).find_noun_attempt_by_id(id)
+ }
+
+ fn find_noun_attempt_by_noun_id(self, id: ID) -> Result<Vec<NounAttemptSaved>> {
+ (&self).find_noun_attempt_by_noun_id(id)
+ }
+
+ fn delete_noun_attempt_by_id(self, id: ID) -> Result<()> {
+ (&self).delete_noun_attempt_by_id(id)
+ }
+}
+impl noun_attempt::Repo for &Storage {
+ fn save_noun_attempt(self, a: NounAttemptSaved) -> Result<()> {
+ let file_name = format!("{}.json", a.id);
+
+ let file_path = self.nouns_attempts_path().join(&file_name);
+
+ if file_path.try_exists().into_diagnostic()? {
+ return Err(miette!("a file with at {} exist", &file_name));
+ }
+
+ let json_content =
+ serde_json::to_string_pretty(&NounAttemptFile::from(a)).into_diagnostic()?;
+
+ fs_extra::file::write_all(file_path, &json_content).into_diagnostic()?;
+
+ Ok(())
+ }
+
+ fn all_noun_attempts(self) -> Result<Vec<NounAttemptSaved>> {
+ Ok(std::fs::read_dir(self.nouns_attempts_path())
+ .into_diagnostic()?
+ .filter_map(std::result::Result::ok)
+ .filter_map(|e| {
+ let at = e.path();
+ match at.extension().map(|ext| ext.to_str()) {
+ Some(Some("json")) => Some(at),
+
+ _ => None,
+ }
+ })
+ .map(std::fs::read)
+ .filter_map(core::result::Result::ok)
+ .map(|content| serde_json::from_slice::<NounAttemptFile>(&content))
+ .filter_map(core::result::Result::ok)
+ .map(NounAttemptSaved::from)
+ .collect::<Vec<_>>())
+ }
+
+ fn find_noun_attempt_by_id(self, id: ID) -> Result<NounAttemptSaved> {
+ let file = self.nouns_attempts_path().join(format!("{id}.json"));
+ if !file.try_exists().into_diagnostic()? {
+ return Err(miette!("could not find an attempt with that id."));
+ }
+ let content = std::fs::read(file).into_diagnostic()?;
+ let a = NounAttemptSaved::from(
+ serde_json::from_slice::<NounAttemptFile>(&content).into_diagnostic()?,
+ );
+
+ Ok(a)
+ }
+
+ fn find_noun_attempt_by_noun_id(self, id: ID) -> Result<Vec<NounAttemptSaved>> {
+ let a = self.all_noun_attempts()?;
+ let s = a
+ .into_iter()
+ .filter(|t| t.noun_attempt.for_word.eq(&id))
+ .collect();
+ Ok(s)
+ }
+
+ fn delete_noun_attempt_by_id(self, id: ID) -> Result<()> {
+ let file = self.nouns_attempts_path().join(format!("{id}.json"));
+ if !file.try_exists().into_diagnostic()? {
+ return Err(miette!("could not find a attempt with that id."));
+ }
+ let content = std::fs::read(file.clone()).into_diagnostic()?;
+ let a = NounAttemptSaved::from(
+ serde_json::from_slice::<NounAttemptFile>(&content).into_diagnostic()?,
+ );
+ tracing::debug!("deleting: {a:#?}");
+
+ std::fs::remove_file(&file).into_diagnostic()?;
+ Ok(())
+ }
+}
A src/storage/nouns_repo.rs => src/storage/nouns_repo.rs +254 -0
@@ 0,0 1,254 @@
+use std::sync::Arc;
+
+use miette::{miette, IntoDiagnostic as _, Result};
+use serde::{Deserialize, Serialize};
+use tracing::debug;
+
+use crate::{
+ id::ID,
+ nouns::{self, Article, Noun, SavedNoun},
+};
+
+use super::{ArticleFileV1, Storage};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub enum NounFile {
+ NounFileV1(NounFileV1),
+ NounFileV2(NounFileV2),
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct NounFileV1 {
+ pub id: i128,
+ pub article: Vec<ArticleFileV1>,
+ pub word: String,
+ pub meaning: String,
+ pub group: String,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct NounFileV2 {
+ pub id: ID,
+ pub at: time::OffsetDateTime,
+ pub article: Vec<ArticleFileV1>,
+ pub word: String,
+ pub meaning: String,
+ pub group: String,
+}
+
+impl From<SavedNoun> for NounFile {
+ fn from(value: SavedNoun) -> Self {
+ NounFile::NounFileV2(NounFileV2 {
+ id: value.id,
+ article: value
+ .noun
+ .articles
+ .into_iter()
+ .map(ArticleFileV1::from)
+ .collect(),
+ word: value.noun.word,
+ meaning: value.noun.meaning,
+ group: value.noun.group,
+ at: value.at,
+ })
+ }
+}
+impl From<NounFile> for SavedNoun {
+ fn from(value: NounFile) -> Self {
+ match value {
+ NounFile::NounFileV2(n) => SavedNoun {
+ noun: Noun {
+ articles: n.article.into_iter().map(Article::from).collect(),
+ word: n.word,
+ meaning: n.meaning,
+ group: n.group,
+ },
+ id: n.id,
+ at: n.at,
+ },
+ NounFile::NounFileV1(n) => SavedNoun {
+ noun: Noun {
+ articles: n.article.into_iter().map(Article::from).collect(),
+ word: n.word,
+ meaning: n.meaning,
+ group: n.group,
+ },
+ id: ID::new(),
+ at: time::OffsetDateTime::from_unix_timestamp_nanos(n.id).expect("corrupt file"),
+ },
+ }
+ }
+}
+
+impl nouns::Repo for Storage {
+ fn save_noun(self, noun: SavedNoun) -> Result<()> {
+ (&self).save_noun(noun)
+ }
+
+ fn all_nouns(self) -> Result<Vec<SavedNoun>> {
+ (&self).all_nouns()
+ }
+
+ fn find_noun_by_id(self, id: ID) -> Result<SavedNoun> {
+ (&self).find_noun_by_id(id)
+ }
+
+ fn delete_noun_by_id(self, id: ID) -> Result<()> {
+ (&self).delete_noun_by_id(id)
+ }
+
+ fn edit_noun_by_id(
+ self,
+ id: ID,
+ article: Option<Vec<Article>>,
+ word: Option<String>,
+ meaning: Option<String>,
+ group: Option<String>,
+ ) -> Result<()> {
+ (&self).edit_noun_by_id(id, article, word, meaning, group)
+ }
+}
+impl nouns::Repo for Arc<Storage> {
+ fn save_noun(self, noun: SavedNoun) -> Result<()> {
+ self.as_ref().save_noun(noun)
+ }
+
+ fn all_nouns(self) -> Result<Vec<SavedNoun>> {
+ self.as_ref().all_nouns()
+ }
+
+ fn find_noun_by_id(self, id: ID) -> Result<SavedNoun> {
+ self.as_ref().find_noun_by_id(id)
+ }
+
+ fn delete_noun_by_id(self, id: ID) -> Result<()> {
+ self.as_ref().delete_noun_by_id(id)
+ }
+
+ fn edit_noun_by_id(
+ self,
+ id: ID,
+ article: Option<Vec<Article>>,
+ word: Option<String>,
+ meaning: Option<String>,
+ group: Option<String>,
+ ) -> Result<()> {
+ self.as_ref()
+ .edit_noun_by_id(id, article, word, meaning, group)
+ }
+}
+
+impl nouns::Repo for &Storage {
+ fn save_noun(self, noun: nouns::SavedNoun) -> Result<()> {
+ let file_name = format!("{}.json", noun.id);
+
+ let file_path = self.nouns_path().join(&file_name);
+
+ if file_path.try_exists().into_diagnostic()? {
+ return Err(miette!(format!("a file with at {} exist", &file_name)));
+ }
+
+ let json_content = serde_json::to_string_pretty(&NounFile::from(noun)).into_diagnostic()?;
+
+ fs_extra::file::write_all(file_path, &json_content).into_diagnostic()?;
+
+ Ok(())
+ }
+
+ fn all_nouns(self) -> Result<Vec<SavedNoun>> {
+ Ok(std::fs::read_dir(self.nouns_path())
+ .into_diagnostic()?
+ .filter_map(std::result::Result::ok)
+ .filter_map(|e| {
+ let at = e.path();
+ match at.extension().map(|ext| ext.to_str()) {
+ Some(Some("json")) => Some(at),
+
+ _ => None,
+ }
+ })
+ .map(std::fs::read)
+ .filter_map(core::result::Result::ok)
+ .map(|content| serde_json::from_slice::<NounFile>(&content))
+ .filter_map(core::result::Result::ok)
+ .map(SavedNoun::from)
+ .collect::<Vec<_>>())
+ }
+
+ fn find_noun_by_id(self, id: ID) -> Result<nouns::SavedNoun> {
+ let file = self.nouns_path().join(format!("{id}.json"));
+ if !file.try_exists().into_diagnostic()? {
+ return Err(miette!("could not find a word with that id."));
+ }
+ let content = std::fs::read(file).into_diagnostic()?;
+ let noun = SavedNoun::from(serde_json::from_slice::<NounFile>(&content).into_diagnostic()?);
+
+ Ok(noun)
+ }
+
+ fn delete_noun_by_id(self, id: ID) -> Result<()> {
+ let file = self.nouns_path().join(format!("{id}.json"));
+ if !file.try_exists().into_diagnostic()? {
+ return Err(miette!("could not find a word with that id."));
+ }
+ let content = std::fs::read(file.clone()).into_diagnostic()?;
+ let noun = SavedNoun::from(serde_json::from_slice::<NounFile>(&content).into_diagnostic()?);
+ tracing::debug!("deleting: {noun:#?}");
+
+ std::fs::remove_file(&file).into_diagnostic()?;
+ Ok(())
+ }
+
+ fn edit_noun_by_id(
+ self,
+ id: ID,
+ article: Option<Vec<Article>>,
+ word: Option<String>,
+ meaning: Option<String>,
+ group: Option<String>,
+ ) -> Result<()> {
+ let mut n = self.clone().find_noun_by_id(id.clone())?;
+
+ if let Some(x) = article {
+ debug!(
+ "editing article. previous: {:#?}, current: {:#?}",
+ n.noun.articles, x
+ );
+ n.noun.articles = x;
+ };
+
+ if let Some(x) = word {
+ debug!(
+ "editing word. previous: {:#?}, current: {:#?}",
+ n.noun.word, x
+ );
+ n.noun.word = x;
+ };
+
+ if let Some(x) = meaning {
+ debug!(
+ "editing meaning. previous: {:#?}, current: {:#?}",
+ n.noun.meaning, x
+ );
+ n.noun.meaning = x;
+ };
+ if let Some(x) = group {
+ debug!(
+ "editing group. previous: {:#?}, current: {:#?}",
+ n.noun.group, x
+ );
+ n.noun.group = x;
+ };
+
+ let file = self.nouns_path().join(format!("{id}.json"));
+
+ if !file.try_exists().into_diagnostic()? {
+ return Err(miette!("could not find a word with that id."));
+ }
+
+ let json_content = serde_json::to_string_pretty(&NounFile::from(n)).into_diagnostic()?;
+
+ fs_extra::file::write_all(file, &json_content).into_diagnostic()?;
+ Ok(())
+ }
+}
A src/storage/sentence_attempts_repo.rs => src/storage/sentence_attempts_repo.rs +0 -0
A src/storage/sentence_repo.rs => src/storage/sentence_repo.rs +107 -0
@@ 0,0 1,107 @@
+use miette::{miette, Error, IntoDiagnostic as _, Result};
+use serde::{Deserialize, Serialize};
+
+use crate::{
+ cases::{ArticleWords, Case, Gender},
+ id::ID,
+ sentences::{self, SavedSentence, Sentence},
+};
+
+use super::Storage;
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+enum SentenceFile {
+ V1(SentenceFileV1),
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+struct SentenceFileV1 {
+ id: String,
+ at: time::OffsetDateTime,
+ pub before: String,
+ pub after: String,
+ pub article: ArticleWords,
+ pub gender: Gender,
+ pub case: Case,
+}
+impl TryFrom<SentenceFile> for SavedSentence {
+ type Error = Error;
+
+ fn try_from(value: SentenceFile) -> Result<Self> {
+ match value {
+ SentenceFile::V1(v) => {
+ let id = ID::try_from(v.id)?;
+ let sentence_file = SavedSentence {
+ id,
+ added: v.at,
+ sentence: Sentence {
+ before: v.before,
+ after: v.after,
+ article_cases: crate::cases::Articles {
+ article: v.article,
+ gender: v.gender,
+ case: v.case,
+ },
+ },
+ };
+ Ok(sentence_file)
+ }
+ }
+ }
+}
+
+impl From<SavedSentence> for SentenceFile {
+ fn from(ss: SavedSentence) -> Self {
+ Self::V1(SentenceFileV1 {
+ id: ss.id.to_string(),
+ at: ss.added,
+ before: ss.sentence.before,
+ after: ss.sentence.after,
+ article: ss.sentence.article_cases.article,
+ gender: ss.sentence.article_cases.gender,
+ case: ss.sentence.article_cases.case,
+ })
+ }
+}
+
+impl sentences::Repo for Storage {
+ fn save_sentence(self, sentence: sentences::SavedSentence) -> Result<()> {
+ let file_name = format!("{}.json", sentence.id);
+
+ let file_path = self.sentence_path().join(&file_name);
+
+ if file_path.try_exists().into_diagnostic()? {
+ return Err(miette!("a file with at {} exist", &file_name));
+ }
+
+ let json_content =
+ serde_json::to_string_pretty(&SentenceFile::from(sentence)).into_diagnostic()?;
+
+ fs_extra::file::write_all(file_path, &json_content).into_diagnostic()?;
+
+ Ok(())
+ }
+
+ fn all_sentences(self) -> Result<Vec<sentences::SavedSentence>> {
+ std::fs::read_dir(self.sentence_path())
+ .into_diagnostic()?
+ .filter_map(std::result::Result::ok)
+ .filter_map(|e| {
+ let at = e.path();
+ match at.extension().map(|ext| ext.to_str()) {
+ Some(Some("json")) => Some(at),
+
+ _ => None,
+ }
+ })
+ .map(std::fs::read)
+ .filter_map(core::result::Result::ok)
+ .map(|content| serde_json::from_slice::<SentenceFile>(&content))
+ .filter_map(core::result::Result::ok)
+ .map(SavedSentence::try_from)
+ .try_fold(vec![], |mut accu, item| -> Result<_> {
+ accu.push(item?);
+ Ok(accu)
+ })
+ }
+}