mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 06:22:22 -04:00
Check required review count for FSRS after filtering (#3019)
* Check for required review count for FSRS after filtering * Remove unreachable check * Update minimum review count in optimal retention calculation * Fix review check in optimal retention routine too
This commit is contained in:
parent
0018f126ea
commit
6843d65ed1
5 changed files with 37 additions and 36 deletions
|
@ -344,7 +344,6 @@ message ComputeFsrsWeightsRequest {
|
||||||
|
|
||||||
message ComputeFsrsWeightsResponse {
|
message ComputeFsrsWeightsResponse {
|
||||||
repeated float weights = 1;
|
repeated float weights = 1;
|
||||||
// if less than 1000, should warn user
|
|
||||||
uint32 fsrs_items = 2;
|
uint32 fsrs_items = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -296,7 +296,7 @@ pub(crate) fn single_card_revlog_to_item(
|
||||||
} as f32
|
} as f32
|
||||||
/ 1000.0,
|
/ 1000.0,
|
||||||
});
|
});
|
||||||
if let Some((mut items, revlogs_complete)) =
|
if let Some((mut items, revlogs_complete, _)) =
|
||||||
single_card_revlog_to_items(entries, next_day_at, false, ignore_revlogs_before)
|
single_card_revlog_to_items(entries, next_day_at, false, ignore_revlogs_before)
|
||||||
{
|
{
|
||||||
let mut item = items.pop().unwrap();
|
let mut item = items.pop().unwrap();
|
||||||
|
|
|
@ -75,12 +75,6 @@ impl Collection {
|
||||||
.col
|
.col
|
||||||
.storage
|
.storage
|
||||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||||
if revlogs.len() < 1000 {
|
|
||||||
return Err(AnkiError::FsrsInsufficientReviews {
|
|
||||||
count: revlogs.len(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let first_rating_count = revlogs
|
let first_rating_count = revlogs
|
||||||
.iter()
|
.iter()
|
||||||
.group_by(|r| r.cid)
|
.group_by(|r| r.cid)
|
||||||
|
@ -109,14 +103,19 @@ impl Collection {
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|r| r.review_kind == RevlogReviewKind::Review && r.button_chosen != 1)
|
.filter(|r| r.review_kind == RevlogReviewKind::Review && r.button_chosen != 1)
|
||||||
.counts_by(|r| r.button_chosen);
|
.counts_by(|r| r.button_chosen);
|
||||||
let total_reviews = review_rating_count.values().sum::<usize>() as f64;
|
let total_reviews = review_rating_count.values().sum::<usize>();
|
||||||
let review_rating_prob = if total_reviews > 0.0 {
|
if total_reviews < 400 {
|
||||||
|
return Err(AnkiError::FsrsInsufficientReviews {
|
||||||
|
count: total_reviews,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let review_rating_prob = if total_reviews as f64 > 0.0 {
|
||||||
let mut arr = [0.0; 3];
|
let mut arr = [0.0; 3];
|
||||||
review_rating_count
|
review_rating_count
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(&button_chosen, ..)| button_chosen >= 2)
|
.filter(|(&button_chosen, ..)| button_chosen >= 2)
|
||||||
.for_each(|(button_chosen, count)| {
|
.for_each(|(button_chosen, count)| {
|
||||||
arr[*button_chosen as usize - 2] = *count as f64 / total_reviews;
|
arr[*button_chosen as usize - 2] = *count as f64 / total_reviews as f64;
|
||||||
});
|
});
|
||||||
arr
|
arr
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -60,13 +60,14 @@ impl Collection {
|
||||||
let mut anki_progress = self.new_progress_handler::<ComputeWeightsProgress>();
|
let mut anki_progress = self.new_progress_handler::<ComputeWeightsProgress>();
|
||||||
let timing = self.timing_today()?;
|
let timing = self.timing_today()?;
|
||||||
let revlogs = self.revlog_for_srs(search)?;
|
let revlogs = self.revlog_for_srs(search)?;
|
||||||
if revlogs.len() < 400 {
|
let (items, review_count) =
|
||||||
|
fsrs_items_for_training(revlogs.clone(), timing.next_day_at, ignore_revlogs_before);
|
||||||
|
|
||||||
|
if review_count < 400 {
|
||||||
return Err(AnkiError::FsrsInsufficientReviews {
|
return Err(AnkiError::FsrsInsufficientReviews {
|
||||||
count: revlogs.len(),
|
count: review_count,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
let items =
|
|
||||||
fsrs_items_for_training(revlogs.clone(), timing.next_day_at, ignore_revlogs_before);
|
|
||||||
let fsrs_items = items.len() as u32;
|
let fsrs_items = items.len() as u32;
|
||||||
anki_progress.update(false, |p| {
|
anki_progress.update(false, |p| {
|
||||||
p.fsrs_items = fsrs_items;
|
p.fsrs_items = fsrs_items;
|
||||||
|
@ -156,13 +157,14 @@ impl Collection {
|
||||||
.col
|
.col
|
||||||
.storage
|
.storage
|
||||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||||
if revlogs.len() < 400 {
|
anki_progress.state.fsrs_items = revlogs.len() as u32;
|
||||||
|
let (items, review_count) =
|
||||||
|
fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before);
|
||||||
|
if review_count < 400 {
|
||||||
return Err(AnkiError::FsrsInsufficientReviews {
|
return Err(AnkiError::FsrsInsufficientReviews {
|
||||||
count: revlogs.len(),
|
count: review_count,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
anki_progress.state.fsrs_items = revlogs.len() as u32;
|
|
||||||
let items = fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before);
|
|
||||||
let fsrs = FSRS::new(Some(weights))?;
|
let fsrs = FSRS::new(Some(weights))?;
|
||||||
Ok(fsrs.evaluate(items, |ip| {
|
Ok(fsrs.evaluate(items, |ip| {
|
||||||
anki_progress
|
anki_progress
|
||||||
|
@ -191,7 +193,8 @@ fn fsrs_items_for_training(
|
||||||
revlogs: Vec<RevlogEntry>,
|
revlogs: Vec<RevlogEntry>,
|
||||||
next_day_at: TimestampSecs,
|
next_day_at: TimestampSecs,
|
||||||
review_revlogs_before: TimestampMillis,
|
review_revlogs_before: TimestampMillis,
|
||||||
) -> Vec<FSRSItem> {
|
) -> (Vec<FSRSItem>, usize) {
|
||||||
|
let mut review_count: usize = 0;
|
||||||
let mut revlogs = revlogs
|
let mut revlogs = revlogs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.group_by(|r| r.cid)
|
.group_by(|r| r.cid)
|
||||||
|
@ -199,10 +202,14 @@ fn fsrs_items_for_training(
|
||||||
.filter_map(|(_cid, entries)| {
|
.filter_map(|(_cid, entries)| {
|
||||||
single_card_revlog_to_items(entries.collect(), next_day_at, true, review_revlogs_before)
|
single_card_revlog_to_items(entries.collect(), next_day_at, true, review_revlogs_before)
|
||||||
})
|
})
|
||||||
.flat_map(|i| i.0)
|
.flat_map(|i| {
|
||||||
|
review_count += i.2;
|
||||||
|
|
||||||
|
i.0
|
||||||
|
})
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
revlogs.sort_by_cached_key(|r| r.reviews.len());
|
revlogs.sort_by_cached_key(|r| r.reviews.len());
|
||||||
revlogs
|
(revlogs, review_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Transform the revlog history for a card into a list of FSRSItems. FSRS
|
/// Transform the revlog history for a card into a list of FSRSItems. FSRS
|
||||||
|
@ -211,16 +218,18 @@ fn fsrs_items_for_training(
|
||||||
/// in training, and `[1]`, [1,2]` and `[1,2,3]` when calculating memory
|
/// in training, and `[1]`, [1,2]` and `[1,2,3]` when calculating memory
|
||||||
/// state.
|
/// state.
|
||||||
///
|
///
|
||||||
/// Returns (items, revlog_complete), the latter of which is assumed
|
/// Returns (items, revlog_complete, review_count).
|
||||||
/// when the revlogs have a learning step, or start with manual scheduling. When
|
/// revlog_complete is assumed when the revlogs have a learning step, or start
|
||||||
/// revlogs are incomplete, the starting difficulty is later inferred from the
|
/// with manual scheduling. When revlogs are incomplete, the starting difficulty
|
||||||
/// SM2 data, instead of using the standard FSRS initial difficulty.
|
/// is later inferred from the SM2 data, instead of using the standard FSRS
|
||||||
|
/// initial difficulty. review_count is the number of reviews used after
|
||||||
|
/// filtering out unwanted ones.
|
||||||
pub(crate) fn single_card_revlog_to_items(
|
pub(crate) fn single_card_revlog_to_items(
|
||||||
mut entries: Vec<RevlogEntry>,
|
mut entries: Vec<RevlogEntry>,
|
||||||
next_day_at: TimestampSecs,
|
next_day_at: TimestampSecs,
|
||||||
training: bool,
|
training: bool,
|
||||||
ignore_revlogs_before: TimestampMillis,
|
ignore_revlogs_before: TimestampMillis,
|
||||||
) -> Option<(Vec<FSRSItem>, bool)> {
|
) -> Option<(Vec<FSRSItem>, bool, usize)> {
|
||||||
let mut first_of_last_learn_entries = None;
|
let mut first_of_last_learn_entries = None;
|
||||||
let mut revlogs_complete = false;
|
let mut revlogs_complete = false;
|
||||||
for (index, entry) in entries.iter().enumerate().rev() {
|
for (index, entry) in entries.iter().enumerate().rev() {
|
||||||
|
@ -318,7 +327,7 @@ pub(crate) fn single_card_revlog_to_items(
|
||||||
let skip = if training { 1 } else { 0 };
|
let skip = if training { 1 } else { 0 };
|
||||||
// Convert the remaining entries into separate FSRSItems, where each item
|
// Convert the remaining entries into separate FSRSItems, where each item
|
||||||
// contains all reviews done until then.
|
// contains all reviews done until then.
|
||||||
let items = entries
|
let items: Vec<FSRSItem> = entries
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.skip(skip)
|
.skip(skip)
|
||||||
|
@ -338,7 +347,7 @@ pub(crate) fn single_card_revlog_to_items(
|
||||||
if items.is_empty() {
|
if items.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some((items, revlogs_complete))
|
Some((items, revlogs_complete, entries.len()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -125,13 +125,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
if (computeWeightsProgress) {
|
if (computeWeightsProgress) {
|
||||||
computeWeightsProgress.current = computeWeightsProgress.total;
|
computeWeightsProgress.current = computeWeightsProgress.total;
|
||||||
}
|
}
|
||||||
if (resp.fsrsItems < 400) {
|
|
||||||
alert(
|
|
||||||
tr.deckConfigMustHave400Reviews({ count: resp.fsrsItems }),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
$config.fsrsWeights = resp.weights;
|
$config.fsrsWeights = resp.weights;
|
||||||
}
|
|
||||||
},
|
},
|
||||||
(progress) => {
|
(progress) => {
|
||||||
if (progress.value.case === "computeWeights") {
|
if (progress.value.case === "computeWeights") {
|
||||||
|
|
Loading…
Reference in a new issue