Merge remote-tracking branch 'upstream/main' into x-axis

This commit is contained in:
Luc Mcgrady 2025-08-06 20:38:13 +01:00
commit 908778dd17
No known key found for this signature in database
GPG key ID: 4F3D7A0B17CC3D9C
24 changed files with 240 additions and 106 deletions

View file

@ -236,6 +236,7 @@ Marvin Kopf <marvinkopf@outlook.com>
Kevin Nakamura <grinkers@grinkers.net>
Bradley Szoke <bradleyszoke@gmail.com>
jcznk <https://github.com/jcznk>
Thomas Rixen <thomas.rixen@student.uclouvain.be>
********************

46
Cargo.lock generated
View file

@ -130,7 +130,7 @@ dependencies = [
"prost",
"prost-reflect",
"pulldown-cmark 0.13.0",
"rand 0.9.1",
"rand 0.9.2",
"rayon",
"regex",
"reqwest 0.12.20",
@ -144,7 +144,7 @@ dependencies = [
"serde_tuple",
"sha1",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
"syn 2.0.103",
"tempfile",
"tokio",
@ -220,7 +220,7 @@ dependencies = [
"prost-types",
"serde",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
]
[[package]]
@ -706,7 +706,7 @@ dependencies = [
"log",
"num-traits",
"portable-atomic-util",
"rand 0.9.1",
"rand 0.9.2",
"rmp-serde",
"serde",
"serde_json",
@ -732,7 +732,7 @@ dependencies = [
"hashbrown 0.15.4",
"log",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"serde",
"spin 0.10.0",
"text_placeholder",
@ -762,12 +762,12 @@ dependencies = [
"csv",
"derive-new 0.7.0",
"dirs 6.0.0",
"rand 0.9.1",
"rand 0.9.2",
"rmp-serde",
"sanitize-filename 0.6.0",
"serde",
"serde_json",
"strum 0.27.1",
"strum 0.27.2",
"tempfile",
"thiserror 2.0.12",
]
@ -817,7 +817,7 @@ dependencies = [
"num-traits",
"paste",
"portable-atomic-util",
"rand 0.9.1",
"rand 0.9.2",
"seq-macro",
"spin 0.10.0",
]
@ -865,7 +865,7 @@ dependencies = [
"half",
"hashbrown 0.15.4",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"serde",
"serde_bytes",
@ -959,7 +959,7 @@ dependencies = [
"memmap2",
"num-traits",
"num_cpus",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"rayon",
"safetensors",
@ -1403,7 +1403,7 @@ dependencies = [
"log",
"num-traits",
"portable-atomic",
"rand 0.9.1",
"rand 0.9.2",
"sanitize-filename 0.5.0",
"serde",
"serde_json",
@ -2214,20 +2214,20 @@ dependencies = [
[[package]]
name = "fsrs"
version = "5.0.0"
version = "5.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f590cfcbe25079bb54a39900f45e6e308935bd6067249ce00d265b280465cde2"
checksum = "04954cc67c3c11ee342a2ee1f5222bf76d73f7772df08d37dc9a6cdd73c467eb"
dependencies = [
"burn",
"itertools 0.14.0",
"log",
"ndarray",
"priority-queue",
"rand 0.9.1",
"rand 0.9.2",
"rayon",
"serde",
"snafu",
"strum 0.27.1",
"strum 0.27.2",
]
[[package]]
@ -2804,7 +2804,7 @@ dependencies = [
"cfg-if",
"crunchy",
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
"rand_distr",
"serde",
]
@ -3658,7 +3658,7 @@ dependencies = [
"linkcheck",
"regex",
"reqwest 0.12.20",
"strum 0.27.1",
"strum 0.27.2",
"tokio",
]
@ -5096,7 +5096,7 @@ dependencies = [
"bytes",
"getrandom 0.3.3",
"lru-slab",
"rand 0.9.1",
"rand 0.9.2",
"ring",
"rustc-hash 2.1.1",
"rustls",
@ -5150,9 +5150,9 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.1"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.3",
@ -5203,7 +5203,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463"
dependencies = [
"num-traits",
"rand 0.9.1",
"rand 0.9.2",
]
[[package]]
@ -6113,9 +6113,9 @@ dependencies = [
[[package]]
name = "strum"
version = "0.27.1"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
dependencies = [
"strum_macros 0.27.1",
]

View file

@ -33,10 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs]
version = "5.0.0"
version = "5.1.0"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# branch = "Refactor/expected_workload_via_dp"
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
# path = "../open-spaced-repetition/fsrs-rs"
[workspace.dependencies]

View file

@ -1450,7 +1450,7 @@
},
{
"name": "fsrs",
"version": "5.0.0",
"version": "5.1.0",
"authors": "Open Spaced Repetition",
"repository": "https://github.com/open-spaced-repetition/fsrs-rs",
"license": "BSD-3-Clause",
@ -3322,7 +3322,7 @@
},
{
"name": "rand",
"version": "0.9.1",
"version": "0.9.2",
"authors": "The Rand Project Developers|The Rust Project Developers",
"repository": "https://github.com/rust-random/rand",
"license": "Apache-2.0 OR MIT",
@ -4132,7 +4132,7 @@
},
{
"name": "strum",
"version": "0.27.1",
"version": "0.27.2",
"authors": "Peter Glotfelty <peter.glotfelty@microsoft.com>",
"repository": "https://github.com/Peternator7/strum",
"license": "MIT",

View file

@ -5,6 +5,11 @@ database-check-card-properties =
[one] Fixed { $count } invalid card property.
*[other] Fixed { $count } invalid card properties.
}
database-check-card-last-review-time-empty =
{ $count ->
[one] Added last review time to { $count } card.
*[other] Added last review time to { $count } cards.
}
database-check-missing-templates =
{ $count ->
[one] Deleted { $count } card with missing template.

View file

@ -407,6 +407,8 @@ message SimulateFsrsReviewRequest {
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
optional uint32 suspend_after_lapse_count = 12;
float historical_retention = 13;
uint32 learning_step_count = 14;
uint32 relearning_step_count = 15;
}
message SimulateFsrsReviewResponse {

View file

@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
return BackendError(err.message, help_page, context, backtrace)
elif val == kind.SEARCH_ERROR:
return SearchError(markdown(err.message), help_page, context, backtrace)
return SearchError(err.message, help_page, context, backtrace)
elif val == kind.UNDO_EMPTY:
return UndoEmpty(err.message, help_page, context, backtrace)

View file

@ -10,6 +10,8 @@ import re
from collections.abc import Callable, Sequence
from typing import Any, cast
from markdown import markdown
import aqt
import aqt.browser
import aqt.editor
@ -20,7 +22,7 @@ from anki.cards import Card, CardId
from anki.collection import Collection, Config, OpChanges, SearchNode
from anki.consts import *
from anki.decks import DeckId
from anki.errors import NotFoundError
from anki.errors import NotFoundError, SearchError
from anki.lang import without_unicode_isolation
from anki.models import NotetypeId
from anki.notes import NoteId
@ -498,6 +500,8 @@ class Browser(QMainWindow):
text = self.current_search()
try:
normed = self.col.build_search_string(text)
except SearchError as err:
showWarning(markdown(str(err)))
except Exception as err:
showWarning(str(err))
else:

View file

@ -126,8 +126,9 @@ impl Card {
}
}
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
/// date' or an add-on has changed the due date, this won't be accurate.
/// If last_review_date isn't stored in the card, this uses card.due and
/// card.ivl to infer the elapsed time, which won't be accurate if
/// 'set due date' or an add-on has changed the due date.
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
if let Some(last_review_time) = self.last_review_time {
Some(timing.now.elapsed_secs_since(last_review_time) as u32)

View file

@ -24,6 +24,7 @@ use crate::notetype::NotetypeId;
use crate::notetype::NotetypeKind;
use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler;
use crate::storage::card::CardFixStats;
use crate::timestamp::TimestampMillis;
use crate::timestamp::TimestampSecs;
@ -40,6 +41,7 @@ pub struct CheckDatabaseOutput {
notetypes_recovered: usize,
invalid_utf8: usize,
invalid_ids: usize,
card_last_review_time_empty: usize,
}
#[derive(Debug, Clone, Copy, Default)]
@ -69,6 +71,11 @@ impl CheckDatabaseOutput {
if self.card_properties_invalid > 0 {
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
}
if self.card_last_review_time_empty > 0 {
probs.push(
tr.database_check_card_last_review_time_empty(self.card_last_review_time_empty),
);
}
if self.cards_missing_note > 0 {
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
}
@ -158,14 +165,25 @@ impl Collection {
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
let timing = self.timing_today()?;
let (new_cnt, other_cnt) = self.storage.fix_card_properties(
let CardFixStats {
new_cards_fixed,
other_cards_fixed,
last_review_time_fixed,
} = self.storage.fix_card_properties(
timing.days_elapsed,
TimestampSecs::now(),
self.usn()?,
self.scheduler_version() == SchedulerVersion::V1,
)?;
out.card_position_too_high = new_cnt;
out.card_properties_invalid += other_cnt;
out.card_position_too_high = new_cards_fixed;
out.card_properties_invalid += other_cards_fixed;
out.card_last_review_time_empty = last_review_time_fixed;
// Trigger one-way sync if last_review_time was updated to avoid conflicts
if last_review_time_fixed > 0 {
self.set_schema_modified()?;
}
Ok(())
}

View file

@ -3,6 +3,8 @@
use std::collections::HashMap;
use anki_proto::generic;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
use crate::collection::Collection;
use crate::deckconfig::DeckConfSchema11;
@ -11,6 +13,7 @@ use crate::deckconfig::DeckConfigId;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::error::Result;
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
use crate::scheduler::fsrs::simulator::is_included_card;
impl crate::services::DeckConfigService for Collection {
fn add_or_update_deck_config_legacy(
@ -103,6 +106,7 @@ impl crate::services::DeckConfigService for Collection {
&mut self,
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
let guard =
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
@ -112,12 +116,26 @@ impl crate::services::DeckConfigService for Collection {
.get_revlog_entries_for_searched_cards_in_card_order()?;
let config = guard.col.get_optimal_retention_parameters(revlogs)?;
let cards = guard
.col
.storage
.all_searched_cards()?
.into_iter()
.filter(is_included_card)
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
.collect::<Vec<fsrs::Card>>();
let costs = (70u32..=99u32)
.into_par_iter()
.map(|dr| {
Ok((
dr,
fsrs::expected_workload(&input.w, dr as f32 / 100., &config)?,
fsrs::expected_workload_with_existing_cards(
&input.w,
dr as f32 / 100.,
&config,
&cards,
)?,
))
})
.collect::<Result<HashMap<_, _>>>()?;

View file

@ -84,6 +84,42 @@ impl RevlogEntry {
})
.unwrap()
}
/// Returns true if this entry represents a reset operation.
/// These entries are created when a card is reset using
/// [`Collection::reschedule_cards_as_new`].
/// The 0 value of `ease_factor` differentiates it
/// from entry created by [`Collection::set_due_date`] that has
/// `RevlogReviewKind::Manual` but non-zero `ease_factor`.
pub(crate) fn is_reset(&self) -> bool {
self.review_kind == RevlogReviewKind::Manual && self.ease_factor == 0
}
/// Returns true if this entry represents a cramming operation.
/// These entries are created when a card is reviewed in a
/// filtered deck with "Reschedule cards based on my answers
/// in this deck" disabled.
/// [`crate::scheduler::answering::CardStateUpdater::apply_preview_state`].
/// The 0 value of `ease_factor` distinguishes it from the entry
/// created when a card is reviewed before its due date in a
/// filtered deck with reschedule enabled or using Grade Now.
pub(crate) fn is_cramming(&self) -> bool {
self.review_kind == RevlogReviewKind::Filtered && self.ease_factor == 0
}
pub(crate) fn has_rating(&self) -> bool {
self.button_chosen > 0
}
/// Returns true if the review entry is not manually rescheduled and not
/// cramming. Used to filter out entries that shouldn't be considered
/// for statistics and scheduling.
pub(crate) fn has_rating_and_affects_scheduling(&self) -> bool {
// not rescheduled/set due date/reset
self.has_rating()
// not cramming
&& !self.is_cramming()
}
}
impl Collection {

View file

@ -306,15 +306,15 @@ pub(crate) fn fsrs_items_for_memory_states(
.collect()
}
struct LastRevlogInfo {
pub(crate) struct LastRevlogInfo {
/// Used to determine the actual elapsed time between the last time the user
/// reviewed the card and now, so that we can determine an accurate period
/// when the card has subsequently been rescheduled to a different day.
last_reviewed_at: Option<TimestampSecs>,
pub(crate) last_reviewed_at: Option<TimestampSecs>,
}
/// Return a map of cards to info about last review/reschedule.
fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
/// Return a map of cards to info about last review.
pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
let mut out = HashMap::new();
revlogs
.iter()
@ -323,8 +323,10 @@ fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogIn
.for_each(|(card_id, group)| {
let mut last_reviewed_at = None;
for e in group.into_iter() {
if e.button_chosen >= 1 {
if e.has_rating_and_affects_scheduling() {
last_reviewed_at = Some(e.id.as_secs());
} else if e.is_reset() {
last_reviewed_at = None;
}
}
out.insert(card_id, LastRevlogInfo { last_reviewed_at });

View file

@ -394,13 +394,13 @@ pub(crate) fn reviews_for_fsrs(
let mut revlogs_complete = false;
// Working backwards from the latest review...
for (index, entry) in entries.iter().enumerate().rev() {
if entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0 {
if entry.is_cramming() {
continue;
}
// For incomplete review histories, initial memory state is based on the first
// user-graded review after the cutoff date with interval >= 1d.
let within_cutoff = entry.id.0 > ignore_revlogs_before.0;
let user_graded = matches!(entry.button_chosen, 1..=4);
let user_graded = entry.has_rating();
let interday = entry.interval >= 1 || entry.interval <= -86400;
if user_graded && within_cutoff && interday {
first_user_grade_idx = Some(index);
@ -409,10 +409,7 @@ pub(crate) fn reviews_for_fsrs(
if user_graded && entry.review_kind == RevlogReviewKind::Learning {
first_of_last_learn_entries = Some(index);
revlogs_complete = true;
} else if matches!(
(entry.review_kind, entry.ease_factor),
(RevlogReviewKind::Manual, 0)
) {
} else if entry.is_reset() {
// Ignore entries prior to a `Reset` if a learning step has come after,
// but consider revlogs complete.
if first_of_last_learn_entries.is_some() {
@ -472,16 +469,7 @@ pub(crate) fn reviews_for_fsrs(
}
// Filter out unwanted entries
entries.retain(|entry| {
!(
// set due date, reset or rescheduled
(entry.review_kind == RevlogReviewKind::Manual || entry.button_chosen == 0)
|| // cram
(entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0)
|| // rescheduled
(entry.review_kind == RevlogReviewKind::Rescheduled)
)
});
entries.retain(|entry| entry.has_rating_and_affects_scheduling());
// Compute delta_t for each entry
let delta_ts = iter::once(0)
@ -560,10 +548,14 @@ pub(crate) mod tests {
}
pub(crate) fn revlog(review_kind: RevlogReviewKind, days_ago: i64) -> RevlogEntry {
let button_chosen = match review_kind {
RevlogReviewKind::Manual | RevlogReviewKind::Rescheduled => 0,
_ => 3,
};
RevlogEntry {
review_kind,
id: days_ago_ms(days_ago).into(),
button_chosen: 3,
button_chosen,
interval: 1,
..Default::default()
}

View file

@ -121,6 +121,12 @@ fn create_review_priority_fn(
}
}
pub(crate) fn is_included_card(c: &Card) -> bool {
c.queue != CardQueue::Suspended
&& c.queue != CardQueue::PreviewRepeat
&& c.ctype != CardType::New
}
impl Collection {
pub fn simulate_request_to_config(
&mut self,
@ -133,11 +139,6 @@ impl Collection {
.get_revlog_entries_for_searched_cards_in_card_order()?;
let mut cards = guard.col.storage.all_searched_cards()?;
drop(guard);
fn is_included_card(c: &Card) -> bool {
c.queue != CardQueue::Suspended
&& c.queue != CardQueue::PreviewRepeat
&& c.ctype != CardType::New
}
// calculate any missing memory state
for c in &mut cards {
if is_included_card(c) && c.memory_state.is_none() {
@ -237,8 +238,8 @@ impl Collection {
learning_step_transitions: p.learning_step_transitions,
relearning_step_transitions: p.relearning_step_transitions,
state_rating_costs: p.state_rating_costs,
learning_step_count: p.learning_step_count,
relearning_step_count: p.relearning_step_count,
learning_step_count: req.learning_step_count as usize,
relearning_step_count: req.relearning_step_count as usize,
};
Ok((config, converted_cards))
@ -306,7 +307,11 @@ impl Collection {
}
impl Card {
fn convert(card: Card, days_elapsed: i32, memory_state: FsrsMemoryState) -> Option<fsrs::Card> {
pub(crate) fn convert(
card: Card,
days_elapsed: i32,
memory_state: FsrsMemoryState,
) -> Option<fsrs::Card> {
match card.queue {
CardQueue::DayLearn | CardQueue::Review => {
let due = card.original_or_current_due();

View file

@ -57,10 +57,10 @@ const SECOND: f32 = 1.0;
const MINUTE: f32 = 60.0 * SECOND;
const HOUR: f32 = 60.0 * MINUTE;
const DAY: f32 = 24.0 * HOUR;
const MONTH: f32 = 30.417 * DAY; // 365/12 ≈ 30.417
const YEAR: f32 = 365.0 * DAY;
const MONTH: f32 = YEAR / 12.0;
#[derive(Clone, Copy)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) enum TimespanUnit {
Seconds,
Minutes,
@ -111,6 +111,13 @@ impl Timespan {
}
}
pub fn to_unit(self, unit: TimespanUnit) -> Timespan {
Timespan {
seconds: self.seconds,
unit,
}
}
/// Round seconds and days to integers, otherwise
/// truncates to one decimal place.
pub fn as_rounded_unit(self) -> f32 {

View file

@ -30,14 +30,24 @@ impl Collection {
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
let timing = self.timing_today()?;
let seconds_elapsed = if let Some(last_review_time) = card.last_review_time {
timing.now.elapsed_secs_since(last_review_time) as u32
let last_review_time = if let Some(last_review_time) = card.last_review_time {
last_review_time
} else {
self.storage
let mut new_card = card.clone();
let last_review_time = self
.storage
.time_of_last_review(card.id)?
.map(|ts| timing.now.elapsed_secs_since(ts))
.unwrap_or_default() as u32
.unwrap_or_default();
new_card.last_review_time = Some(last_review_time);
self.storage.update_card(&new_card)?;
last_review_time
};
let seconds_elapsed = timing.now.elapsed_secs_since(last_review_time) as u32;
let fsrs_retrievability = card
.memory_state
.zip(Some(seconds_elapsed))
@ -187,7 +197,7 @@ impl Collection {
}
fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
let normal_answer_count = revlog.iter().filter(|r| r.button_chosen > 0).count();
let normal_answer_count = revlog.iter().filter(|r| r.has_rating()).count();
let total_secs: f32 = revlog
.iter()
.map(|entry| (entry.taken_millis as f32) / 1000.0)

View file

@ -53,10 +53,7 @@ impl GraphsContext {
self.revlog
.iter()
.filter(|review| {
// not rescheduled/set due date/reset
review.button_chosen > 0
// not cramming
&& (review.review_kind != RevlogReviewKind::Filtered || review.ease_factor != 0)
review.has_rating_and_affects_scheduling()
// cards with an interval ≥ 1 day
&& (review.review_kind == RevlogReviewKind::Review
|| review.last_interval <= -86400

View file

@ -5,17 +5,18 @@ use anki_i18n::I18n;
use crate::prelude::*;
use crate::scheduler::timespan::Timespan;
use crate::scheduler::timespan::TimespanUnit;
pub fn studied_today(cards: u32, secs: f32, tr: &I18n) -> String {
let span = Timespan::from_secs(secs).natural_span();
let amount = span.as_unit();
let unit = span.unit().as_str();
let unit = std::cmp::min(span.unit(), TimespanUnit::Minutes);
let amount = span.to_unit(unit).as_unit();
let secs_per_card = if cards > 0 {
secs / (cards as f32)
} else {
0.0
};
tr.statistics_studied_today(unit, secs_per_card, amount, cards)
tr.statistics_studied_today(unit.as_str(), secs_per_card, amount, cards)
.into()
}
@ -41,5 +42,9 @@ mod test {
&studied_today(3, 13.0, &tr).replace('\n', " "),
"Studied 3 cards in 13 seconds today (4.33s/card)"
);
assert_eq!(
&studied_today(300, 5400.0, &tr).replace('\n', " "),
"Studied 300 cards in 90 minutes today (18s/card)"
);
}
}

View file

@ -33,6 +33,7 @@ use crate::decks::DeckKind;
use crate::error::Result;
use crate::notes::NoteId;
use crate::scheduler::congrats::CongratsInfo;
use crate::scheduler::fsrs::memory_state::get_last_revlog_info;
use crate::scheduler::queue::BuryMode;
use crate::scheduler::queue::DueCard;
use crate::scheduler::queue::DueCardKind;
@ -42,6 +43,13 @@ use crate::timestamp::TimestampMillis;
use crate::timestamp::TimestampSecs;
use crate::types::Usn;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct CardFixStats {
pub new_cards_fixed: usize,
pub other_cards_fixed: usize,
pub last_review_time_fixed: usize,
}
impl FromSql for CardType {
fn column_result(value: ValueRef<'_>) -> result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value {
@ -365,7 +373,7 @@ impl super::SqliteStorage {
mtime: TimestampSecs,
usn: Usn,
v1_sched: bool,
) -> Result<(usize, usize)> {
) -> Result<CardFixStats> {
let new_cnt = self
.db
.prepare(include_str!("fix_due_new.sql"))?
@ -390,7 +398,24 @@ impl super::SqliteStorage {
.db
.prepare(include_str!("fix_ordinal.sql"))?
.execute(params![mtime, usn])?;
Ok((new_cnt, other_cnt))
let mut last_review_time_cnt = 0;
let revlog = self.get_all_revlog_entries_in_card_order()?;
let last_revlog_info = get_last_revlog_info(&revlog);
for (card_id, last_revlog_info) in last_revlog_info {
let card = self.get_card(card_id)?;
if let Some(mut card) = card {
if card.ctype != CardType::New && card.last_review_time.is_none() {
card.last_review_time = last_revlog_info.last_reviewed_at;
self.update_card(&card)?;
last_review_time_cnt += 1;
}
}
}
Ok(CardFixStats {
new_cards_fixed: new_cnt,
other_cards_fixed: other_cnt,
last_review_time_fixed: last_review_time_cnt,
})
}
pub(crate) fn delete_orphaned_cards(&self) -> Result<usize> {

View file

@ -89,7 +89,7 @@ export function naturalWholeUnit(secs: number): TimespanUnit {
}
export function studiedToday(cards: number, secs: number): string {
const unit = naturalUnit(secs);
const unit = Math.min(naturalUnit(secs), TimespanUnit.Minutes);
const amount = unitAmount(unit, secs);
const name = unitName(unit);

View file

@ -113,6 +113,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
easyDaysPercentages: $config.easyDaysPercentages,
reviewOrder: $config.reviewOrder,
historicalRetention: $config.historicalRetention,
learningStepCount: $config.learnSteps.length,
relearningStepCount: $config.relearnSteps.length,
});
const DESIRED_RETENTION_LOW_THRESHOLD = 0.8;
@ -128,7 +130,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
}
let retentionWorloadInfo: undefined | Promise<GetRetentionWorkloadResponse> =
let retentionWorkloadInfo: undefined | Promise<GetRetentionWorkloadResponse> =
undefined;
let lastParams = [...fsrsParams($config)];
@ -139,7 +141,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
if (
// If the cache is empty and a request has not yet been made to fill it
!retentionWorloadInfo ||
!retentionWorkloadInfo ||
// If the parameters have been changed
lastParams.toString() !== params.toString()
) {
@ -148,12 +150,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
search: defaultparamSearch,
});
lastParams = [...params];
retentionWorloadInfo = getRetentionWorkload(request);
retentionWorkloadInfo = getRetentionWorkload(request);
}
const previous = +startingDesiredRetention * 100;
const after = retention * 100;
const resp = await retentionWorloadInfo;
const resp = await retentionWorkloadInfo;
const factor = resp.costs[after] / resp.costs[previous];
desiredRetentionChangeInfo = tr.deckConfigWorkloadFactorChange({
@ -218,29 +220,34 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
healthCheck: $healthCheck,
});
const already_optimal =
const alreadyOptimal =
(params.length &&
params.every(
(n, i) => n.toFixed(4) === resp.params[i].toFixed(4),
)) ||
resp.params.length === 0;
let healthCheckMessage = "";
if (resp.healthCheckPassed !== undefined) {
if (resp.healthCheckPassed) {
setTimeout(() => alert(tr.deckConfigFsrsGoodFit()), 200);
} else {
setTimeout(
() => alert(tr.deckConfigFsrsBadFitWarning()),
200,
);
}
} else if (already_optimal) {
const msg = resp.fsrsItems
healthCheckMessage = resp.healthCheckPassed
? tr.deckConfigFsrsGoodFit()
: tr.deckConfigFsrsBadFitWarning();
}
let alreadyOptimalMessage = "";
if (alreadyOptimal) {
alreadyOptimalMessage = resp.fsrsItems
? tr.deckConfigFsrsParamsOptimal()
: tr.deckConfigFsrsParamsNoReviews();
setTimeout(() => alert(msg), 200);
}
if (!already_optimal) {
const message = [alreadyOptimalMessage, healthCheckMessage]
.filter((a) => a)
.join("\n\n");
if (message) {
setTimeout(() => alert(message), 200);
}
if (!alreadyOptimal) {
$config.fsrsParams6 = resp.params;
setTimeout(() => {
optimized = true;

View file

@ -627,7 +627,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
.svg-container {
width: 100%;
max-height: calc(100vh - 400px); /* Account for modal header, controls, etc */
/* Account for modal header, controls, etc */
max-height: max(calc(100vh - 400px), 200px);
aspect-ratio: 600 / 250;
display: flex;
align-items: center;

View file

@ -18,8 +18,8 @@ import {
bin,
cumsum,
curveBasis,
interpolateBlues,
interpolateGreens,
interpolateOranges,
interpolatePurples,
interpolateReds,
max,
@ -181,7 +181,7 @@ export function renderReviews(
const reds = scaleSequential((n) => interpolateReds(cappedRange(n)!)).domain(
x.domain() as any,
);
const blues = scaleSequential((n) => interpolateBlues(cappedRange(n)!)).domain(
const oranges = scaleSequential((n) => interpolateOranges(cappedRange(n)!)).domain(
x.domain() as any,
);
const purples = scaleSequential((n) => interpolatePurples(cappedRange(n)!)).domain(
@ -195,7 +195,7 @@ export function renderReviews(
case BinIndex.Young:
return lighterGreens;
case BinIndex.Learn:
return blues;
return oranges;
case BinIndex.Relearn:
return reds;
case BinIndex.Filtered: