V3 parent limits (#1638)

* avoid repinning Rust deps by default

* add id_tree dependency

* Respect intermediate child limits in v3

* Test new behaviour of v3 counts

* Rework v3 queue building to respect parent limits

* Add missing did field to SQL query

* Fix `LimitTreeMap::is_exhausted()`

* Rework tree building logic

https://github.com/ankitects/anki/pull/1638#discussion_r798328734

* Add timer for build_queues()

* `is_exhausted()` -> `limit_reached()`

* Move context and limits into `QueueBuilder`

This allows for moving more logic into QueueBuilder, so less passing
around of arguments. Unfortunately, some tests will require additional
work to set up.

* Fix stop condition in new_cards_by_position

* Fix order gather order of new cards by deck

* Add scheduler/queue/builder/burying.rs

* Fix bad tree due to unsorted child decks

* Fix comment

* Fix `cap_new_to_review_rec()`

* Add test for new card gathering

* Always sort `child_decks()`

* Fix deck removal in `cap_new_to_review_rec()`

* Fix sibling ordering in new card gathering

* Remove limits for deck total count with children

* Add random gather order

* Remove bad sibling order handling

All routines ensure ascending order now.
Also do some other minor refactoring.

* Remove queue truncating

All routines stop now as soon as the root limit is reached.

* Move deck fetching into `QueueBuilder::new()`

* Rework new card gather and sort options

https://github.com/ankitects/anki/pull/1638#issuecomment-1032173013

* Disable new sort order choices ...

depending on set gather order.

* Use enum instead of numbers

* Ensure valid sort order setting

* Update new gather and sort order tooltips

* Warn about random insertion order with v3

* Revert "Add timer for build_queues()"

This reverts commit c9f5fc6ebe.

* Update rslib/src/storage/card/mod.rs (dae)

* minor wording tweaks to the tooltips (dae)

+ move legacy strings to bottom
+ consistent capitalization (our leech action still needs fixing,
but that will require introducing a new 'suspend card' string as the
existing one is used elsewhere as well)
This commit is contained in:
RumovZ 2022-02-10 00:55:43 +01:00 committed by GitHub
parent 4b9b923775
commit d55f080733
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 1150 additions and 432 deletions

16
Cargo.lock generated
View file

@ -63,6 +63,7 @@ dependencies = [
"futures", "futures",
"hex", "hex",
"htmlescape", "htmlescape",
"id_tree",
"intl-memoizer", "intl-memoizer",
"itertools", "itertools",
"lazy_static", "lazy_static",
@ -1062,6 +1063,15 @@ dependencies = [
"tokio-native-tls", "tokio-native-tls",
] ]
[[package]]
name = "id_tree"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcd9db8dd5be8bde5a2624ed4b2dfb74368fe7999eb9c4940fd3ca344b61071a"
dependencies = [
"snowflake",
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.2.3" version = "0.2.3"
@ -2541,6 +2551,12 @@ version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
[[package]]
name = "snowflake"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27207bb65232eda1f588cf46db2fee75c0808d557f6b3cf19a75f5d6d7c94df1"
[[package]] [[package]]
name = "socket2" name = "socket2"
version = "0.4.2" version = "0.4.2"

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -1,6 +1,6 @@
This folder integrates Rust crates.io fetching into Bazel. This folder integrates Rust crates.io fetching into Bazel.
To update dependencies, ensure a local Rust environment is available To add or update dependencies, ensure a local Rust environment is available
(eg `source scripts/cargo-env`), then install cargo-raze: (eg `source scripts/cargo-env`), then install cargo-raze:
``` ```
@ -8,9 +8,18 @@ cargo install cargo-raze --version 0.14.1
cargo install cargo-license cargo install cargo-license
``` ```
After updating dependencies in ../rslib/Cargo.toml, change to this After adding/updating dependencies in ../rslib/Cargo.toml, change to this
folder and run python update.py to update the external Bazel repositories folder and run:
to point to the updated deps.
$ python update.py
or
$ REPIN=1 python update.py
The former will apply added crates and adjusted version numbers, while leaving
most crate versions alone. The latter will also update pinned dependencies to their
latest compatible versions.
Note: cargo-raze does not currently work when run from Windows, and nobody Note: cargo-raze does not currently work when run from Windows, and nobody
has investigated why yet. For now, you'll need a Mac or Linux machine, or has investigated why yet. For now, you'll need a Mac or Linux machine, or

View file

@ -911,6 +911,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.hyper-tls-0.5.0.bazel"), build_file = Label("//cargo/remote:BUILD.hyper-tls-0.5.0.bazel"),
) )
maybe(
http_archive,
name = "raze__id_tree__1_8_0",
url = "https://crates.io/api/v1/crates/id_tree/1.8.0/download",
type = "tar.gz",
sha256 = "bcd9db8dd5be8bde5a2624ed4b2dfb74368fe7999eb9c4940fd3ca344b61071a",
strip_prefix = "id_tree-1.8.0",
build_file = Label("//cargo/remote:BUILD.id_tree-1.8.0.bazel"),
)
maybe( maybe(
http_archive, http_archive,
name = "raze__idna__0_2_3", name = "raze__idna__0_2_3",
@ -2291,6 +2301,16 @@ def raze_fetch_remote_crates():
build_file = Label("//cargo/remote:BUILD.smallvec-1.8.0.bazel"), build_file = Label("//cargo/remote:BUILD.smallvec-1.8.0.bazel"),
) )
maybe(
http_archive,
name = "raze__snowflake__1_3_0",
url = "https://crates.io/api/v1/crates/snowflake/1.3.0/download",
type = "tar.gz",
sha256 = "27207bb65232eda1f588cf46db2fee75c0808d557f6b3cf19a75f5d6d7c94df1",
strip_prefix = "snowflake-1.3.0",
build_file = Label("//cargo/remote:BUILD.snowflake-1.3.0.bazel"),
)
maybe( maybe(
http_archive, http_archive,
name = "raze__socket2__0_4_2", name = "raze__socket2__0_4_2",

View file

@ -737,6 +737,15 @@
"license_file": null, "license_file": null,
"description": "Default TLS implementation for use with hyper" "description": "Default TLS implementation for use with hyper"
}, },
{
"name": "id_tree",
"version": "1.8.0",
"authors": "Ian Burns <iwburns8@gmail.com>",
"repository": "https://github.com/iwburns/id-tree",
"license": "MIT",
"license_file": null,
"description": "A library for creating and modifying Tree structures."
},
{ {
"name": "idna", "name": "idna",
"version": "0.2.3", "version": "0.2.3",
@ -1835,6 +1844,15 @@
"license_file": null, "license_file": null,
"description": "'Small vector' optimization: store up to a small number of items on the stack" "description": "'Small vector' optimization: store up to a small number of items on the stack"
}, },
{
"name": "snowflake",
"version": "1.3.0",
"authors": "Steven Allen <steven@stebalien.com>",
"repository": "https://github.com/Stebalien/snowflake",
"license": "Apache-2.0 OR MIT",
"license_file": null,
"description": "A module for generating guaranteed process unique IDs."
},
{ {
"name": "socket2", "name": "socket2",
"version": "0.4.2", "version": "0.4.2",

59
cargo/remote/BUILD.id_tree-1.8.0.bazel vendored Normal file
View file

@ -0,0 +1,59 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT"
])
# Generated Targets
# Unsupported target "basic" with type "example" omitted
rust_library(
name = "id_tree",
srcs = glob(["**/*.rs"]),
crate_features = [
],
crate_root = "src/lib.rs",
data = [],
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=id_tree",
"manual",
],
version = "1.8.0",
# buildifier: leave-alone
deps = [
"@raze__snowflake__1_3_0//:snowflake",
],
)
# Unsupported target "error_tests" with type "test" omitted

View file

@ -0,0 +1,55 @@
"""
@generated
cargo-raze crate build file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
# buildifier: disable=load
load("@bazel_skylib//lib:selects.bzl", "selects")
# buildifier: disable=load
load(
"@rules_rust//rust:defs.bzl",
"rust_binary",
"rust_library",
"rust_proc_macro",
"rust_test",
)
package(default_visibility = [
# Public for visibility by "@raze__crate__version//" targets.
#
# Prefer access through "//cargo", which limits external
# visibility to explicit Cargo.toml dependencies.
"//visibility:public",
])
licenses([
"notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
rust_library(
name = "snowflake",
srcs = glob(["**/*.rs"]),
crate_features = [
"default",
],
crate_root = "src/lib.rs",
data = [],
edition = "2015",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
"crate-name=snowflake",
"manual",
],
version = "1.3.0",
# buildifier: leave-alone
deps = [
],
)

View file

@ -162,7 +162,8 @@ def stage_commit():
) )
update_cargo_lock() if os.getenv("REPIN"):
update_cargo_lock()
run_cargo_raze() run_cargo_raze()
write_licenses() write_licenses()
update_crates_bzl() update_crates_bzl()

View file

@ -60,6 +60,9 @@ deck-config-new-insertion-order-tooltip =
this option will automatically update the existing position of new cards. this option will automatically update the existing position of new cards.
deck-config-new-insertion-order-sequential = Sequential (oldest cards first) deck-config-new-insertion-order-sequential = Sequential (oldest cards first)
deck-config-new-insertion-order-random = Random deck-config-new-insertion-order-random = Random
deck-config-new-insertion-order-random-with-v3 =
With the V3 scheduler, leaving this set to sequential, and adjusting the
order in the Display Order section is recommended.
## Lapses section ## Lapses section
@ -94,29 +97,52 @@ deck-config-bury-tooltip =
deck-config-ordering-title = Display Order deck-config-ordering-title = Display Order
deck-config-new-gather-priority = New card gather order deck-config-new-gather-priority = New card gather order
deck-config-new-gather-priority-tooltip = deck-config-new-gather-priority-tooltip-2 =
`Deck`: gathers cards from each subdeck in order, and stops when the `Deck`: gathers cards from each deck in order, starting from the top. Cards from each deck are
limit of the selected deck has been exceeded. This is faster, and allows you gathered in ascending position. If the daily limit of the selected deck is reached, gathering
to prioritize subdecks that are closer to the top. may stop before all decks have been checked. This order is fastest in large collections, and
allows you to prioritize subdecks that are closer to the top.
`Position`: gathers cards from all decks before they are sorted. This `Ascending position`: gathers cards by ascending position (due #), which is typically
ensures cards appear in strict position (due #) order, even if the parent limit is the oldest-added first.
not high enough to see cards from all decks.
`Descending position`: gathers cards by descending position (due #), which is typically
the latest-added first.
`Random notes`: gathers cards of randomly selected notes. When sibling burying is
disabled, this allows all cards of a note to be seen in a session (eg. both a front->back
and back->front card)
`Random cards`: gathers cards completely randomly.
deck-config-new-gather-priority-deck = Deck deck-config-new-gather-priority-deck = Deck
deck-config-new-gather-priority-position-lowest-first = Ascending position deck-config-new-gather-priority-position-lowest-first = Ascending position
deck-config-new-gather-priority-position-highest-first = Descending position deck-config-new-gather-priority-position-highest-first = Descending position
deck-config-new-gather-priority-random-notes = Random notes
deck-config-new-gather-priority-random-cards = Random cards
deck-config-new-card-sort-order = New card sort order deck-config-new-card-sort-order = New card sort order
deck-config-new-card-sort-order-tooltip = deck-config-new-card-sort-order-tooltip-2 =
How cards are sorted after they have been gathered. By default, Anki sorts `Card template`: Displays cards in card template order. If you have sibling burying
by card template first, to avoid multiple cards of the same note from being disabled, this will ensure all front->back cards are seen before any back->front cards.
shown in succession.
`Order gathered`: Shows cards exactly as they were gathered. If sibling burying is disabled,
this will typically result in all cards of a note being seen one after the other.
`Card template, then random`: Like `Card template`, but shuffles the cards of each
template. When combined with an ascending position gather order, this can be used to show
the oldest cards in a random order for example.
`Random note, then card template`: Picks notes at random, then shows all of their siblings
in order.
`Random`: Fully shuffles the gathered cards.
deck-config-sort-order-card-template-then-lowest-position = Card template, then ascending position deck-config-sort-order-card-template-then-lowest-position = Card template, then ascending position
deck-config-sort-order-card-template-then-highest-position = Card template, then descending position deck-config-sort-order-card-template-then-highest-position = Card template, then descending position
deck-config-sort-order-card-template-then-random = Card template, then random deck-config-sort-order-card-template-then-random = Card template, then random
deck-config-sort-order-random-note-then-template = Random note, then card template
deck-config-sort-order-lowest-position = Ascending position deck-config-sort-order-lowest-position = Ascending position
deck-config-sort-order-highest-position = Descending position deck-config-sort-order-highest-position = Descending position
deck-config-sort-order-random = Random deck-config-sort-order-random = Random
deck-config-sort-order-template-then-gather = Card template, then order gathered deck-config-sort-order-template-then-gather = Card template
deck-config-sort-order-gather = Order gathered deck-config-sort-order-gather = Order gathered
deck-config-new-review-priority = New/review order deck-config-new-review-priority = New/review order
deck-config-new-review-priority-tooltip = When to show new cards in relation to review cards. deck-config-new-review-priority-tooltip = When to show new cards in relation to review cards.
@ -242,3 +268,18 @@ deck-config-relearning-steps-above-minimum-interval = The minimum lapse interval
## Selecting a deck ## Selecting a deck
deck-config-which-deck = Which deck would you like? deck-config-which-deck = Which deck would you like?
## NO NEED TO TRANSLATE. These strings have been replaced with new versions, and will be removed in the future.
deck-config-new-card-sort-order-tooltip =
How cards are sorted after they have been gathered. By default, Anki sorts
by card template first, to avoid multiple cards of the same note from being
shown in succession.
deck-config-new-gather-priority-tooltip =
`Deck`: gathers cards from each subdeck in order, and stops when the
limit of the selected deck has been exceeded. This is faster, and allows you
to prioritize subdecks that are closer to the top.
`Position`: gathers cards from all decks before they are sorted. This
ensures cards appear in strict position (due #) order, even if the parent limit is
not high enough to see cards from all decks.

View file

@ -32,21 +32,33 @@ message DeckConfig {
NEW_CARD_INSERT_ORDER_RANDOM = 1; NEW_CARD_INSERT_ORDER_RANDOM = 1;
} }
enum NewCardGatherPriority { enum NewCardGatherPriority {
// Decks in alphabetical order (preorder), then ascending position.
// Siblings are consecutive, provided they have the same position.
NEW_CARD_GATHER_PRIORITY_DECK = 0; NEW_CARD_GATHER_PRIORITY_DECK = 0;
// Ascending position.
// Siblings are consecutive, provided they have the same position.
NEW_CARD_GATHER_PRIORITY_LOWEST_POSITION = 1; NEW_CARD_GATHER_PRIORITY_LOWEST_POSITION = 1;
// Descending position.
// Siblings are consecutive, provided they have the same position.
NEW_CARD_GATHER_PRIORITY_HIGHEST_POSITION = 2; NEW_CARD_GATHER_PRIORITY_HIGHEST_POSITION = 2;
// Siblings are consecutive.
NEW_CARD_GATHER_PRIORITY_RANDOM_NOTES = 3;
// Siblings are neither grouped nor ordered.
NEW_CARD_GATHER_PRIORITY_RANDOM_CARDS = 4;
} }
enum NewCardSortOrder { enum NewCardSortOrder {
NEW_CARD_SORT_ORDER_TEMPLATE_THEN_LOWEST_POSITION = 0; // Ascending card template ordinal.
NEW_CARD_SORT_ORDER_TEMPLATE_THEN_HIGHEST_POSITION = 1; // For a given ordinal, cards appear in gather order.
NEW_CARD_SORT_ORDER_TEMPLATE = 0;
// Preserves original gather order (eg deck order).
NEW_CARD_SORT_ORDER_NO_SORT = 1;
// Ascending card template ordinal.
// For a given ordinal, cards appear in random order.
NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM = 2; NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM = 2;
NEW_CARD_SORT_ORDER_LOWEST_POSITION = 3; // Random note order. For a given note, cards appear in template order.
NEW_CARD_SORT_ORDER_HIGHEST_POSITION = 4; NEW_CARD_SORT_ORDER_RANDOM_NOTE_THEN_TEMPLATE = 3;
NEW_CARD_SORT_ORDER_RANDOM = 5; // Fully randomized order.
// Sorts by template, preserving original gather order. NEW_CARD_SORT_ORDER_RANDOM_CARD = 4;
NEW_CARD_SORT_ORDER_TEMPLATE_ONLY = 6;
// Preserves original gather order (eg deck order)
NEW_CARD_SORT_ORDER_NO_SORT = 7;
} }
enum ReviewCardOrder { enum ReviewCardOrder {
REVIEW_CARD_ORDER_DAY = 0; REVIEW_CARD_ORDER_DAY = 0;

View file

@ -147,6 +147,7 @@ message DeckTreeNode {
uint32 review_uncapped = 12; uint32 review_uncapped = 12;
uint32 total_in_deck = 13; uint32 total_in_deck = 13;
// with children, without any limits
uint32 total_including_children = 14; uint32 total_including_children = 14;
bool filtered = 16; bool filtered = 16;

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -1146,18 +1146,11 @@ def test_deckFlow():
col.addNote(note) col.addNote(note)
col.reset() col.reset()
assert col.sched.counts() == (3, 0, 0) assert col.sched.counts() == (3, 0, 0)
if is_2021(): # should get top level one first, then ::1, then ::2
# cards arrive in position order by default for i in "one", "three", "two":
for i in "one", "two", "three": c = col.sched.getCard()
c = col.sched.getCard() assert c.note()["Front"] == i
assert c.note()["Front"] == i col.sched.answerCard(c, 3)
col.sched.answerCard(c, 3)
else:
# should get top level one first, then ::1, then ::2
for i in "one", "three", "two":
c = col.sched.getCard()
assert c.note()["Front"] == i
col.sched.answerCard(c, 3)
def test_reorder(): def test_reorder():

View file

@ -82,6 +82,7 @@ rust_library(
"//rslib/cargo:futures", "//rslib/cargo:futures",
"//rslib/cargo:hex", "//rslib/cargo:hex",
"//rslib/cargo:htmlescape", "//rslib/cargo:htmlescape",
"//rslib/cargo:id_tree",
"//rslib/cargo:intl_memoizer", "//rslib/cargo:intl_memoizer",
"//rslib/cargo:itertools", "//rslib/cargo:itertools",
"//rslib/cargo:lazy_static", "//rslib/cargo:lazy_static",

View file

@ -96,3 +96,4 @@ strum = { version = "0.23.0", features = ["derive"] }
tokio-util = { version = "0.6.8", features = ["io"] } tokio-util = { version = "0.6.8", features = ["io"] }
pct-str = { git="https://github.com/timothee-haudebourg/pct-str.git", rev="4adccd8d4a222ab2672350a102f06ae832a0572d" } pct-str = { git="https://github.com/timothee-haudebourg/pct-str.git", rev="4adccd8d4a222ab2672350a102f06ae832a0572d" }
unic-ucd-category = "0.9.0" unic-ucd-category = "0.9.0"
id_tree = "1.8.0"

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -147,6 +147,15 @@ alias(
], ],
) )
alias(
name = "id_tree",
actual = "@raze__id_tree__1_8_0//:id_tree",
tags = [
"cargo-raze",
"manual",
],
)
alias( alias(
name = "inflections", name = "inflections",
actual = "@raze__inflections__1_1_1//:inflections", actual = "@raze__inflections__1_1_1//:inflections",

View file

@ -63,7 +63,7 @@ impl Default for DeckConfig {
graduating_interval_easy: 4, graduating_interval_easy: 4,
new_card_insert_order: NewCardInsertOrder::Due as i32, new_card_insert_order: NewCardInsertOrder::Due as i32,
new_card_gather_priority: NewCardGatherPriority::Deck as i32, new_card_gather_priority: NewCardGatherPriority::Deck as i32,
new_card_sort_order: NewCardSortOrder::TemplateThenLowestPosition as i32, new_card_sort_order: NewCardSortOrder::Template as i32,
review_order: ReviewCardOrder::Day as i32, review_order: ReviewCardOrder::Day as i32,
new_mix: ReviewMix::MixWithReviews as i32, new_mix: ReviewMix::MixWithReviews as i32,
interday_learning_mix: ReviewMix::MixWithReviews as i32, interday_learning_mix: ReviewMix::MixWithReviews as i32,

View file

@ -1,7 +1,9 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap; use std::{collections::HashMap, iter::Peekable};
use id_tree::{InsertBehavior, Node, NodeId, Tree};
use super::Deck; use super::Deck;
use crate::{ use crate::{
@ -67,3 +69,203 @@ pub(crate) fn remaining_limits_map<'a>(
}) })
.collect() .collect()
} }
/// Wrapper of [RemainingLimits] with some additional meta data.
#[derive(Debug, Clone, Copy)]
struct NodeLimits {
deck_id: DeckId,
/// absolute level in the deck hierarchy
level: usize,
limits: RemainingLimits,
}
impl NodeLimits {
fn new(deck: &Deck, config: &HashMap<DeckConfigId, DeckConfig>, today: u32) -> Self {
Self {
deck_id: deck.id,
level: deck.name.components().count(),
limits: RemainingLimits::new(
deck,
deck.config_id().and_then(|id| config.get(&id)),
today,
true,
),
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct LimitTreeMap {
/// A tree representing the remaining limits of the active deck hierarchy.
//
// As long as we never (1) allow a tree without a root, (2) remove nodes,
// and (3) have more than 1 tree, it's safe to unwrap on Tree::get() and
// Tree::root_node_id(), even if we clone Nodes.
tree: Tree<NodeLimits>,
/// A map to access the tree node of a deck. Only decks with a remaining
/// limit above zero are included.
map: HashMap<DeckId, NodeId>,
}
impl LimitTreeMap {
/// Child [Deck]s must be sorted by name.
pub(crate) fn build(
root_deck: &Deck,
child_decks: Vec<Deck>,
config: &HashMap<DeckConfigId, DeckConfig>,
today: u32,
) -> Self {
let root_limits = NodeLimits::new(root_deck, config, today);
let mut tree = Tree::new();
let root_id = tree
.insert(Node::new(root_limits), InsertBehavior::AsRoot)
.unwrap();
let mut map = HashMap::new();
map.insert(root_deck.id, root_id.clone());
let mut limits = Self { tree, map };
let mut remaining_decks = child_decks.into_iter().peekable();
limits.add_child_nodes(root_id, &mut remaining_decks, config, today);
limits
}
/// Recursively appends descendants to the provided parent [Node], and adds
/// them to the [HashMap].
/// Given [Deck]s are assumed to arrive in depth-first order.
/// The tree-from-deck-list logic is taken from [crate::decks::tree::add_child_nodes].
fn add_child_nodes(
&mut self,
parent_node_id: NodeId,
remaining_decks: &mut Peekable<impl Iterator<Item = Deck>>,
config: &HashMap<DeckConfigId, DeckConfig>,
today: u32,
) {
let parent = *self.tree.get(&parent_node_id).unwrap().data();
while let Some(deck) = remaining_decks.peek() {
match deck.name.components().count() {
l if l <= parent.level => {
// next item is at a higher level
break;
}
l if l == parent.level + 1 => {
// next item is an immediate descendent of parent
self.insert_child_node(deck, parent_node_id.clone(), config, today);
remaining_decks.next();
}
_ => {
// next item is at a lower level
if let Some(last_child_node_id) = self
.tree
.get(&parent_node_id)
.unwrap()
.children()
.last()
.cloned()
{
self.add_child_nodes(last_child_node_id, remaining_decks, config, today)
} else {
// immediate parent is missing, skip the deck until a DB check is run
remaining_decks.next();
}
}
}
}
}
fn insert_child_node(
&mut self,
child_deck: &Deck,
parent_node_id: NodeId,
config: &HashMap<DeckConfigId, DeckConfig>,
today: u32,
) {
let mut child_limits = NodeLimits::new(child_deck, config, today);
child_limits
.limits
.cap_to(self.tree.get(&parent_node_id).unwrap().data().limits);
let child_node_id = self
.tree
.insert(
Node::new(child_limits),
InsertBehavior::UnderNode(&parent_node_id),
)
.unwrap();
if child_limits.limits.review > 0 {
self.map.insert(child_deck.id, child_node_id);
}
}
pub(crate) fn root_limit_reached(&self) -> bool {
self.map.is_empty()
}
pub(crate) fn limit_reached(&self, deck_id: DeckId) -> bool {
self.map.get(&deck_id).is_none()
}
pub(crate) fn active_decks(&self) -> Vec<DeckId> {
self.tree
.traverse_pre_order(self.tree.root_node_id().unwrap())
.unwrap()
.map(|node| node.data().deck_id)
.collect()
}
pub(crate) fn remaining_node_id(&self, deck_id: DeckId) -> Option<NodeId> {
self.map.get(&deck_id).map(Clone::clone)
}
pub(crate) fn decrement_node_and_parent_limits(&mut self, node_id: &NodeId, new: bool) {
let node = self.tree.get_mut(node_id).unwrap();
let parent = node.parent().cloned();
let limit = &mut node.data_mut().limits;
if if new {
limit.new = limit.new.saturating_sub(1);
limit.new
} else {
limit.review = limit.review.saturating_sub(1);
limit.review
} == 0
{
self.remove_node_and_descendants_from_map(node_id);
};
if let Some(parent_id) = parent {
self.decrement_node_and_parent_limits(&parent_id, new)
}
}
pub(crate) fn remove_node_and_descendants_from_map(&mut self, node_id: &NodeId) {
let node = self.tree.get(node_id).unwrap();
self.map.remove(&node.data().deck_id);
for child_id in node.children().clone() {
self.remove_node_and_descendants_from_map(&child_id);
}
}
pub(crate) fn cap_new_to_review(&mut self) {
self.cap_new_to_review_rec(&self.tree.root_node_id().unwrap().clone(), 9999);
}
fn cap_new_to_review_rec(&mut self, node_id: &NodeId, parent_limit: u32) {
let node = self.tree.get_mut(node_id).unwrap();
let mut limits = &mut node.data_mut().limits;
limits.new = limits.new.min(limits.review).min(parent_limit);
// clone because of borrowing rules
let node_limit = limits.new;
let children = node.children().clone();
if node_limit == 0 {
self.remove_node_and_descendants_from_map(node_id);
}
for child_id in children {
self.cap_new_to_review_rec(&child_id, node_limit);
}
}
}

View file

@ -203,33 +203,30 @@ fn sum_counts_and_apply_limits_v3(
.copied() .copied()
.unwrap_or_default(); .unwrap_or_default();
// cap current node's own cards // initialize with this node's values
let this_node_uncapped = NodeCountsV3 { let mut this_node_uncapped = NodeCountsV3 {
new: node.new_count, new: node.new_count,
review: node.review_count, review: node.review_count,
intraday_learning: node.intraday_learning, intraday_learning: node.intraday_learning,
interday_learning: node.interday_learning_uncapped, interday_learning: node.interday_learning_uncapped,
total: node.total_in_deck, total: node.total_in_deck,
}; };
let mut individually_capped_total = this_node_uncapped.capped(&remaining); let mut total_including_children = node.total_in_deck;
// and add the capped values from child decks
// add capped child counts / uncapped total
for child in &mut node.children { for child in &mut node.children {
individually_capped_total += sum_counts_and_apply_limits_v3(child, limits); this_node_uncapped += sum_counts_and_apply_limits_v3(child, limits);
total_including_children += child.total_including_children;
} }
node.total_including_children = individually_capped_total.total;
// We already have a sum of the current deck's capped cards+its child decks' let this_node_capped = this_node_uncapped.capped(&remaining);
// capped cards, which we'll return to the parent. But because clicking on a
// given deck imposes that deck's limits on the total number of cards shown,
// the sum we'll display needs to be capped again by the limits of the current
// deck.
let total_constrained_by_current_deck = individually_capped_total.capped(&remaining);
node.new_count = total_constrained_by_current_deck.new;
node.review_count = total_constrained_by_current_deck.review;
node.learn_count = total_constrained_by_current_deck.intraday_learning
+ total_constrained_by_current_deck.interday_learning;
individually_capped_total node.new_count = this_node_capped.new;
node.review_count = this_node_capped.review;
node.learn_count = this_node_capped.intraday_learning + this_node_capped.interday_learning;
node.total_including_children = total_including_children;
this_node_capped
} }
fn hide_default_deck(node: &mut DeckTreeNode) { fn hide_default_deck(node: &mut DeckTreeNode) {
@ -475,4 +472,50 @@ mod test {
Ok(()) Ok(())
} }
#[test]
fn nested_counts_v3() -> Result<()> {
fn create_deck_with_new_limit(col: &mut Collection, name: &str, new_limit: u32) -> Deck {
let mut deck = col.get_or_create_normal_deck(name).unwrap();
let mut conf = DeckConfig::default();
conf.inner.new_per_day = new_limit;
col.add_or_update_deck_config(&mut conf).unwrap();
deck.normal_mut().unwrap().config_id = conf.id.0;
col.add_or_update_deck(&mut deck).unwrap();
deck
}
let mut col = open_test_collection();
col.set_config_bool(BoolKey::Sched2021, true, false)?;
let parent_deck = create_deck_with_new_limit(&mut col, "Default", 8);
let child_deck = create_deck_with_new_limit(&mut col, "Default::child", 4);
let grandchild_1 = create_deck_with_new_limit(&mut col, "Default::child::grandchild_1", 2);
let grandchild_2 = create_deck_with_new_limit(&mut col, "Default::child::grandchild_2", 1);
// add 2 new cards to each deck
let nt = col.get_notetype_by_name("Cloze")?.unwrap();
let mut note = nt.new_note();
note.set_field(0, "{{c1::}} {{c2::}}")?;
col.add_note(&mut note, parent_deck.id)?;
note.id.0 = 0;
col.add_note(&mut note, child_deck.id)?;
note.id.0 = 0;
col.add_note(&mut note, grandchild_1.id)?;
note.id.0 = 0;
col.add_note(&mut note, grandchild_2.id)?;
let parent = &col.deck_tree(Some(TimestampSecs::now()), None)?.children[0];
// grandchildren: own cards, limited by own new limits
assert_eq!(parent.children[0].children[0].new_count, 2);
assert_eq!(parent.children[0].children[1].new_count, 1);
// child: cards from self and children, limited by own new limit
assert_eq!(parent.children[0].new_count, 4);
// parent: cards from self and all subdecks, all limits in the hierarchy are respected
assert_eq!(parent.new_count, 6);
assert_eq!(parent.total_including_children, 8);
assert_eq!(parent.total_in_deck, 2);
Ok(())
}
} }

View file

@ -0,0 +1,76 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{BuryMode, Context, DueCard, NewCard, QueueBuilder};
use crate::prelude::*;
pub(super) enum DueOrNewCard {
Due(DueCard),
New(NewCard),
}
impl DueOrNewCard {
fn original_deck_id(&self) -> DeckId {
match self {
Self::Due(card) => card.original_deck_id.or(card.current_deck_id),
Self::New(card) => card.original_deck_id.or(card.current_deck_id),
}
}
fn note_id(&self) -> NoteId {
match self {
Self::Due(card) => card.note_id,
Self::New(card) => card.note_id,
}
}
}
impl From<DueCard> for DueOrNewCard {
fn from(card: DueCard) -> DueOrNewCard {
DueOrNewCard::Due(card)
}
}
impl From<NewCard> for DueOrNewCard {
fn from(card: NewCard) -> DueOrNewCard {
DueOrNewCard::New(card)
}
}
impl Context {
pub(super) fn bury_mode(&self, deck_id: DeckId) -> BuryMode {
self.deck_map
.get(&deck_id)
.and_then(|deck| deck.config_id())
.and_then(|config_id| self.config_map.get(&config_id))
.map(|config| BuryMode {
bury_new: config.inner.bury_new,
bury_reviews: config.inner.bury_reviews,
})
.unwrap_or_default()
}
}
impl QueueBuilder {
/// If burying is enabled in `new_settings`, existing entry will be updated.
/// Returns a copy made before changing the entry, so that a card with burying
/// enabled will bury future siblings, but not itself.
pub(super) fn get_and_update_bury_mode_for_note(
&mut self,
card: DueOrNewCard,
) -> Option<BuryMode> {
let mut previous_mode = None;
let new_mode = self.context.bury_mode(card.original_deck_id());
self.context
.seen_note_ids
.entry(card.note_id())
.and_modify(|entry| {
previous_mode = Some(*entry);
entry.bury_new |= new_mode.bury_new;
entry.bury_reviews |= new_mode.bury_reviews;
})
.or_insert(new_mode);
previous_mode
}
}

View file

@ -1,23 +1,127 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{BuryMode, DueCard, NewCard, QueueBuilder}; use super::{DueCard, NewCard, QueueBuilder};
use crate::{prelude::*, scheduler::queue::DueCardKind}; use crate::{
deckconfig::NewCardGatherPriority, prelude::*, scheduler::queue::DueCardKind,
storage::card::NewCardSorting,
};
impl QueueBuilder { impl QueueBuilder {
pub(in super::super) fn add_intraday_learning_card( pub(super) fn gather_cards(&mut self, col: &mut Collection) -> Result<()> {
self.gather_intraday_learning_cards(col)?;
self.gather_due_cards(col, DueCardKind::Learning)?;
self.gather_due_cards(col, DueCardKind::Review)?;
self.limits.cap_new_to_review();
self.gather_new_cards(col)?;
Ok(())
}
fn gather_intraday_learning_cards(&mut self, col: &mut Collection) -> Result<()> {
col.storage.for_each_intraday_card_in_active_decks(
self.context.timing.next_day_at,
|card| {
self.get_and_update_bury_mode_for_note(card.into());
self.learning.push(card);
},
)?;
Ok(())
}
fn gather_due_cards(&mut self, col: &mut Collection, kind: DueCardKind) -> Result<()> {
if !self.limits.root_limit_reached() {
col.storage.for_each_due_card_in_active_decks(
self.context.timing.days_elapsed,
self.context.sort_options.review_order,
kind,
|card| {
if self.limits.root_limit_reached() {
false
} else {
if let Some(node_id) = self.limits.remaining_node_id(card.current_deck_id) {
if self.add_due_card(card) {
self.limits
.decrement_node_and_parent_limits(&node_id, false);
}
}
true
}
},
)?;
}
Ok(())
}
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
match self.context.sort_options.new_gather_priority {
NewCardGatherPriority::Deck => self.gather_new_cards_by_deck(col),
NewCardGatherPriority::LowestPosition => {
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
}
NewCardGatherPriority::HighestPosition => {
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
}
NewCardGatherPriority::RandomNotes => self.gather_new_cards_sorted(
col,
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
),
NewCardGatherPriority::RandomCards => self.gather_new_cards_sorted(
col,
NewCardSorting::RandomCards(self.context.timing.days_elapsed),
),
}
}
fn gather_new_cards_by_deck(&mut self, col: &mut Collection) -> Result<()> {
for deck_id in self.limits.active_decks() {
if self.limits.root_limit_reached() {
break;
}
if !self.limits.limit_reached(deck_id) {
col.storage.for_each_new_card_in_deck(deck_id, |card| {
if let Some(node_id) = self.limits.remaining_node_id(deck_id) {
if self.add_new_card(card) {
self.limits.decrement_node_and_parent_limits(&node_id, true);
}
true
} else {
false
}
})?;
}
}
Ok(())
}
fn gather_new_cards_sorted(
&mut self, &mut self,
card: DueCard, col: &mut Collection,
bury_mode: BuryMode, order: NewCardSorting,
) { ) -> Result<()> {
self.get_and_update_bury_mode_for_note(card.note_id, bury_mode); col.storage
self.learning.push(card); .for_each_new_card_in_active_decks(order, |card| {
if self.limits.root_limit_reached() {
false
} else {
if let Some(node_id) = self.limits.remaining_node_id(card.current_deck_id) {
if self.add_new_card(card) {
self.limits.decrement_node_and_parent_limits(&node_id, true);
}
}
true
}
})?;
Ok(())
} }
/// True if limit should be decremented. /// True if limit should be decremented.
pub(in super::super) fn add_due_card(&mut self, card: DueCard, bury_mode: BuryMode) -> bool { fn add_due_card(&mut self, card: DueCard) -> bool {
let bury_this_card = self let bury_this_card = self
.get_and_update_bury_mode_for_note(card.note_id, bury_mode) .get_and_update_bury_mode_for_note(card.into())
.map(|mode| mode.bury_reviews) .map(|mode| mode.bury_reviews)
.unwrap_or_default(); .unwrap_or_default();
if bury_this_card { if bury_this_card {
@ -33,157 +137,17 @@ impl QueueBuilder {
} }
// True if limit should be decremented. // True if limit should be decremented.
pub(in super::super) fn add_new_card(&mut self, card: NewCard, bury_mode: BuryMode) -> bool { fn add_new_card(&mut self, card: NewCard) -> bool {
let previous_bury_mode = self let bury_this_card = self
.get_and_update_bury_mode_for_note(card.note_id, bury_mode) .get_and_update_bury_mode_for_note(card.into())
.map(|mode| mode.bury_new); .map(|mode| mode.bury_new)
.unwrap_or_default();
// no previous siblings seen? // no previous siblings seen?
if previous_bury_mode.is_none() { if bury_this_card {
self.new.push(card); false
return true;
}
let bury_this_card = previous_bury_mode.unwrap();
// Cards will be arriving in (due, card_id) order, with all
// siblings sharing the same due number by default. In the
// common case, card ids will match template order, and nothing
// special is required. But if some cards have been generated
// after the initial note creation, they will have higher card
// ids, and the siblings will thus arrive in the wrong order.
// Sorting by ordinal in the DB layer is fairly costly, as it
// doesn't allow us to exit early when the daily limits have
// been met, so we want to enforce ordering as we add instead.
let previous_card_was_sibling_with_higher_ordinal = self
.new
.last()
.map(|previous| {
previous.note_id == card.note_id && previous.template_index > card.template_index
})
.unwrap_or(false);
if previous_card_was_sibling_with_higher_ordinal {
if bury_this_card {
// When burying is enabled, we replace the existing sibling
// with the lower ordinal one, and skip decrementing the limit.
*self.new.last_mut().unwrap() = card;
false
} else {
// When burying disabled, we'll want to add this card as well, but we
// need to insert it in front of the later-ordinal card(s).
let target_idx = self
.new
.iter()
.enumerate()
.rev()
.filter_map(|(idx, queued_card)| {
if queued_card.note_id != card.note_id
|| queued_card.template_index < card.template_index
{
Some(idx + 1)
} else {
None
}
})
.next()
.unwrap_or(0);
self.new.insert(target_idx, card);
true
}
} else { } else {
// card has arrived in expected order - add if burying disabled self.new.push(card);
if bury_this_card { true
false
} else {
self.new.push(card);
true
}
} }
} }
/// If burying is enabled in `new_settings`, existing entry will be updated.
/// Returns a copy made before changing the entry, so that a card with burying
/// enabled will bury future siblings, but not itself.
fn get_and_update_bury_mode_for_note(
&mut self,
note_id: NoteId,
new_mode: BuryMode,
) -> Option<BuryMode> {
let mut previous_mode = None;
self.seen_note_ids
.entry(note_id)
.and_modify(|entry| {
previous_mode = Some(*entry);
entry.bury_new |= new_mode.bury_new;
entry.bury_reviews |= new_mode.bury_reviews;
})
.or_insert(new_mode);
previous_mode
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn new_siblings() {
let mut builder = QueueBuilder::default();
let cards = vec![
NewCard {
id: CardId(1),
note_id: NoteId(1),
template_index: 0,
..Default::default()
},
NewCard {
id: CardId(2),
note_id: NoteId(2),
template_index: 1,
..Default::default()
},
NewCard {
id: CardId(3),
note_id: NoteId(2),
template_index: 2,
..Default::default()
},
NewCard {
id: CardId(4),
note_id: NoteId(2),
// lowest ordinal, should be used instead of card 2/3
template_index: 0,
..Default::default()
},
];
for card in &cards {
builder.add_new_card(
card.clone(),
BuryMode {
bury_new: true,
..Default::default()
},
);
}
assert_eq!(builder.new[0].id, CardId(1));
assert_eq!(builder.new[1].id, CardId(4));
assert_eq!(builder.new.len(), 2);
// with burying disabled, we should get all siblings in order
let mut builder = QueueBuilder::default();
for card in &cards {
builder.add_new_card(card.clone(), Default::default());
}
assert_eq!(builder.new[0].id, CardId(1));
assert_eq!(builder.new[1].id, CardId(4));
assert_eq!(builder.new[2].id, CardId(2));
assert_eq!(builder.new[3].id, CardId(3));
}
} }

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod burying;
mod gathering; mod gathering;
pub(crate) mod intersperser; pub(crate) mod intersperser;
pub(crate) mod sized_chain; pub(crate) mod sized_chain;
@ -14,12 +15,13 @@ use sized_chain::SizedChain;
use super::{CardQueues, Counts, LearningQueueEntry, MainQueueEntry, MainQueueEntryKind}; use super::{CardQueues, Counts, LearningQueueEntry, MainQueueEntry, MainQueueEntryKind};
use crate::{ use crate::{
deckconfig::{NewCardGatherPriority, NewCardSortOrder, ReviewCardOrder, ReviewMix}, deckconfig::{NewCardGatherPriority, NewCardSortOrder, ReviewCardOrder, ReviewMix},
decks::limits::{remaining_limits_map, RemainingLimits}, decks::limits::LimitTreeMap,
prelude::*, prelude::*,
scheduler::timing::SchedTimingToday,
}; };
/// Temporary holder for review cards that will be built into a queue. /// Temporary holder for review cards that will be built into a queue.
#[derive(Debug, Clone)] #[derive(Debug, Clone, Copy)]
pub(crate) struct DueCard { pub(crate) struct DueCard {
pub id: CardId, pub id: CardId,
pub note_id: NoteId, pub note_id: NoteId,
@ -37,12 +39,12 @@ pub(crate) enum DueCardKind {
} }
/// Temporary holder for new cards that will be built into a queue. /// Temporary holder for new cards that will be built into a queue.
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone, Copy)]
pub(crate) struct NewCard { pub(crate) struct NewCard {
pub id: CardId, pub id: CardId,
pub note_id: NoteId, pub note_id: NoteId,
pub mtime: TimestampSecs, pub mtime: TimestampSecs,
pub due: i32, pub current_deck_id: DeckId,
pub original_deck_id: DeckId, pub original_deck_id: DeckId,
pub template_index: u32, pub template_index: u32,
pub hash: u64, pub hash: u64,
@ -98,30 +100,55 @@ pub(super) struct QueueSortOptions {
pub(super) new_review_mix: ReviewMix, pub(super) new_review_mix: ReviewMix,
} }
#[derive(Default)] #[derive(Debug, Clone)]
pub(super) struct QueueBuilder { pub(super) struct QueueBuilder {
pub(super) new: Vec<NewCard>, pub(super) new: Vec<NewCard>,
pub(super) review: Vec<DueCard>, pub(super) review: Vec<DueCard>,
pub(super) learning: Vec<DueCard>, pub(super) learning: Vec<DueCard>,
pub(super) day_learning: Vec<DueCard>, pub(super) day_learning: Vec<DueCard>,
pub(super) seen_note_ids: HashMap<NoteId, BuryMode>, limits: LimitTreeMap,
pub(super) sort_options: QueueSortOptions, context: Context,
}
/// Data container and helper for building queues.
#[derive(Debug, Clone)]
struct Context {
timing: SchedTimingToday,
config_map: HashMap<DeckConfigId, DeckConfig>,
root_deck: Deck,
sort_options: QueueSortOptions,
seen_note_ids: HashMap<NoteId, BuryMode>,
deck_map: HashMap<DeckId, Deck>,
} }
impl QueueBuilder { impl QueueBuilder {
pub(super) fn new(sort_options: QueueSortOptions) -> Self { pub(super) fn new(col: &mut Collection, deck_id: DeckId) -> Result<Self> {
QueueBuilder { let timing = col.timing_for_timestamp(TimestampSecs::now())?;
sort_options, let config_map = col.storage.get_deck_config_map()?;
..Default::default() let root_deck = col.storage.get_deck(deck_id)?.ok_or(AnkiError::NotFound)?;
} let child_decks = col.storage.child_decks(&root_deck)?;
let limits = LimitTreeMap::build(&root_deck, child_decks, &config_map, timing.days_elapsed);
let sort_options = sort_options(&root_deck, &config_map);
let deck_map = col.storage.get_decks_map()?;
Ok(QueueBuilder {
new: Vec::new(),
review: Vec::new(),
learning: Vec::new(),
day_learning: Vec::new(),
limits,
context: Context {
timing,
config_map,
root_deck,
sort_options,
seen_note_ids: HashMap::new(),
deck_map,
},
})
} }
pub(super) fn build( pub(super) fn build(mut self, learn_ahead_secs: i64) -> CardQueues {
mut self,
top_deck_limits: RemainingLimits,
learn_ahead_secs: i64,
current_day: u32,
) -> CardQueues {
self.sort_new(); self.sort_new();
// intraday learning and total learn count // intraday learning and total learn count
@ -134,22 +161,19 @@ impl QueueBuilder {
.count() .count()
+ self.day_learning.len(); + self.day_learning.len();
// cap and note down review + new counts
self.review.truncate(top_deck_limits.review as usize);
let review_count = self.review.len(); let review_count = self.review.len();
self.new.truncate(top_deck_limits.new as usize);
let new_count = self.new.len(); let new_count = self.new.len();
// merge interday and new cards into main // merge interday and new cards into main
let with_interday_learn = merge_day_learning( let with_interday_learn = merge_day_learning(
self.review, self.review,
self.day_learning, self.day_learning,
self.sort_options.day_learn_mix, self.context.sort_options.day_learn_mix,
); );
let main_iter = merge_new( let main_iter = merge_new(
with_interday_learn, with_interday_learn,
self.new, self.new,
self.sort_options.new_review_mix, self.context.sort_options.new_review_mix,
); );
CardQueues { CardQueues {
@ -161,13 +185,32 @@ impl QueueBuilder {
main: main_iter.collect(), main: main_iter.collect(),
intraday_learning, intraday_learning,
learn_ahead_secs, learn_ahead_secs,
current_day, current_day: self.context.timing.days_elapsed,
build_time: TimestampMillis::now(), build_time: TimestampMillis::now(),
current_learning_cutoff: now, current_learning_cutoff: now,
} }
} }
} }
fn sort_options(deck: &Deck, config_map: &HashMap<DeckConfigId, DeckConfig>) -> QueueSortOptions {
deck.config_id()
.and_then(|config_id| config_map.get(&config_id))
.map(|config| QueueSortOptions {
new_order: config.inner.new_card_sort_order(),
new_gather_priority: config.inner.new_card_gather_priority(),
review_order: config.inner.review_order(),
day_learn_mix: config.inner.interday_learning_mix(),
new_review_mix: config.inner.new_mix(),
})
.unwrap_or_else(|| {
// filtered decks do not space siblings
QueueSortOptions {
new_order: NewCardSortOrder::NoSort,
..Default::default()
}
})
}
fn merge_day_learning( fn merge_day_learning(
reviews: Vec<DueCard>, reviews: Vec<DueCard>,
day_learning: Vec<DueCard>, day_learning: Vec<DueCard>,
@ -204,134 +247,122 @@ fn sort_learning(mut learning: Vec<DueCard>) -> VecDeque<LearningQueueEntry> {
impl Collection { impl Collection {
pub(crate) fn build_queues(&mut self, deck_id: DeckId) -> Result<CardQueues> { pub(crate) fn build_queues(&mut self, deck_id: DeckId) -> Result<CardQueues> {
let now = TimestampSecs::now(); let mut queues = QueueBuilder::new(self, deck_id)?;
let timing = self.timing_for_timestamp(now)?;
let decks = self.storage.deck_with_children(deck_id)?;
// need full map, since filtered decks may contain cards from decks
// outside tree
let deck_map = self.storage.get_decks_map()?;
let config = self.storage.get_deck_config_map()?;
let sort_options = decks[0]
.config_id()
.and_then(|config_id| config.get(&config_id))
.map(|config| QueueSortOptions {
new_order: config.inner.new_card_sort_order(),
new_gather_priority: config.inner.new_card_gather_priority(),
review_order: config.inner.review_order(),
day_learn_mix: config.inner.interday_learning_mix(),
new_review_mix: config.inner.new_mix(),
})
.unwrap_or_else(|| {
// filtered decks do not space siblings
QueueSortOptions {
new_order: NewCardSortOrder::LowestPosition,
..Default::default()
}
});
// fetch remaining limits, and cap to selected deck limits so that we don't
// do more work than necessary
let mut remaining = remaining_limits_map(decks.iter(), &config, timing.days_elapsed, true);
let selected_deck_limits_at_start = *remaining.get(&deck_id).unwrap();
let mut selected_deck_limits = selected_deck_limits_at_start;
for limit in remaining.values_mut() {
limit.cap_to(selected_deck_limits);
}
self.storage.update_active_decks(&decks[0])?;
let mut queues = QueueBuilder::new(sort_options.clone());
let get_bury_mode = |home_deck: DeckId| {
deck_map
.get(&home_deck)
.and_then(|deck| deck.config_id())
.and_then(|config_id| config.get(&config_id))
.map(|config| BuryMode {
bury_new: config.inner.bury_new,
bury_reviews: config.inner.bury_reviews,
})
.unwrap_or_default()
};
// intraday cards first, noting down any notes that will need burying
self.storage self.storage
.for_each_intraday_card_in_active_decks(timing.next_day_at, |card| { .update_active_decks(&queues.context.root_deck)?;
let bury = get_bury_mode(card.current_deck_id);
queues.add_intraday_learning_card(card, bury)
})?;
// interday learning, then reviews queues.gather_cards(self)?;
let mut add_due_cards = |kind: DueCardKind| -> Result<()> {
if selected_deck_limits.review != 0 {
self.storage.for_each_due_card_in_active_decks(
timing.days_elapsed,
sort_options.review_order,
kind,
|card| {
if selected_deck_limits.review == 0 {
return false;
}
let bury = get_bury_mode(card.original_deck_id.or(card.current_deck_id));
let limits = remaining.get_mut(&card.current_deck_id).unwrap();
if limits.review != 0 && queues.add_due_card(card, bury) {
selected_deck_limits.review -= 1;
limits.review -= 1;
}
true let queues = queues.build(self.learn_ahead_secs() as i64);
},
)?;
}
Ok(())
};
add_due_cards(DueCardKind::Learning)?;
add_due_cards(DueCardKind::Review)?;
// cap new cards to the remaining review limit
for limit in remaining.values_mut() {
limit.new = limit.new.min(limit.review).min(selected_deck_limits.review);
}
selected_deck_limits.new = selected_deck_limits.new.min(selected_deck_limits.review);
// new cards last
let can_exit_early = sort_options.new_gather_priority == NewCardGatherPriority::Deck;
let reverse = sort_options.new_gather_priority == NewCardGatherPriority::HighestPosition;
for deck in &decks {
if can_exit_early && selected_deck_limits.new == 0 {
break;
}
let limit = remaining.get_mut(&deck.id).unwrap();
if limit.new > 0 {
self.storage
.for_each_new_card_in_deck(deck.id, reverse, |card| {
let bury = get_bury_mode(card.original_deck_id.or(deck.id));
if limit.new != 0 {
if queues.add_new_card(card, bury) {
limit.new -= 1;
selected_deck_limits.new =
selected_deck_limits.new.saturating_sub(1);
}
true
} else {
false
}
})?;
}
}
let final_limits = RemainingLimits {
new: selected_deck_limits_at_start
.new
.min(selected_deck_limits.review),
..selected_deck_limits_at_start
};
let queues = queues.build(
final_limits,
self.learn_ahead_secs() as i64,
timing.days_elapsed,
);
Ok(queues) Ok(queues)
} }
} }
#[cfg(test)]
mod test {
use super::*;
use crate::{
backend_proto::deck_config::config::{NewCardGatherPriority, NewCardSortOrder},
collection::open_test_collection,
};
impl Collection {
fn set_deck_gather_order(&mut self, deck: &mut Deck, order: NewCardGatherPriority) {
let mut conf = DeckConfig::default();
conf.inner.new_card_gather_priority = order as i32;
conf.inner.new_card_sort_order = NewCardSortOrder::NoSort as i32;
self.add_or_update_deck_config(&mut conf).unwrap();
deck.normal_mut().unwrap().config_id = conf.id.0;
self.add_or_update_deck(deck).unwrap();
}
fn set_deck_new_limit(&mut self, deck: &mut Deck, new_limit: u32) {
let mut conf = DeckConfig::default();
conf.inner.new_per_day = new_limit;
self.add_or_update_deck_config(&mut conf).unwrap();
deck.normal_mut().unwrap().config_id = conf.id.0;
self.add_or_update_deck(deck).unwrap();
}
fn queue_as_deck_and_template(&mut self, deck_id: DeckId) -> Vec<(DeckId, u16)> {
self.build_queues(deck_id)
.unwrap()
.iter()
.map(|entry| {
let card = self.storage.get_card(entry.card_id()).unwrap().unwrap();
(card.deck_id, card.template_idx)
})
.collect()
}
}
#[test]
fn queue_building() -> Result<()> {
let mut col = open_test_collection();
col.set_config_bool(BoolKey::Sched2021, true, false)?;
// parent
// ┣━━child━━grandchild
// ┗━━child_2
let mut parent = col.get_or_create_normal_deck("Default").unwrap();
let mut child = col.get_or_create_normal_deck("Default::child").unwrap();
let child_2 = col.get_or_create_normal_deck("Default::child_2").unwrap();
let grandchild = col
.get_or_create_normal_deck("Default::child::grandchild")
.unwrap();
// add 2 new cards to each deck
let nt = col.get_notetype_by_name("Cloze")?.unwrap();
let mut note = nt.new_note();
note.set_field(0, "{{c1::}} {{c2::}}")?;
for deck in [&parent, &child, &child_2, &grandchild] {
note.id.0 = 0;
col.add_note(&mut note, deck.id)?;
}
// set child's new limit to 3, which should affect grandchild
col.set_deck_new_limit(&mut child, 3);
// depth-first tree order
col.set_deck_gather_order(&mut parent, NewCardGatherPriority::Deck);
let cards = vec![
(parent.id, 0),
(parent.id, 1),
(child.id, 0),
(child.id, 1),
(grandchild.id, 0),
(child_2.id, 0),
(child_2.id, 1),
];
assert_eq!(col.queue_as_deck_and_template(parent.id), cards);
// insertion order
col.set_deck_gather_order(&mut parent, NewCardGatherPriority::LowestPosition);
let cards = vec![
(parent.id, 0),
(parent.id, 1),
(child.id, 0),
(child.id, 1),
(child_2.id, 0),
(child_2.id, 1),
(grandchild.id, 0),
];
assert_eq!(col.queue_as_deck_and_template(parent.id), cards);
// inverted insertion order, but sibling order is preserved
col.set_deck_gather_order(&mut parent, NewCardGatherPriority::HighestPosition);
let cards = vec![
(grandchild.id, 0),
(grandchild.id, 1),
(child_2.id, 0),
(child_2.id, 1),
(child.id, 0),
(parent.id, 0),
(parent.id, 1),
];
assert_eq!(col.queue_as_deck_and_template(parent.id), cards);
Ok(())
}
}

View file

@ -9,57 +9,52 @@ use super::{NewCard, NewCardSortOrder, QueueBuilder};
impl QueueBuilder { impl QueueBuilder {
pub(super) fn sort_new(&mut self) { pub(super) fn sort_new(&mut self) {
match self.sort_options.new_order { match self.context.sort_options.new_order {
NewCardSortOrder::TemplateThenLowestPosition => { // preserve gather order
self.new.sort_unstable_by(template_then_lowest_position); NewCardSortOrder::NoSort => (),
} NewCardSortOrder::Template => {
NewCardSortOrder::TemplateThenHighestPosition => {
self.new.sort_unstable_by(template_then_highest_position);
}
NewCardSortOrder::TemplateThenRandom => {
self.new.iter_mut().for_each(NewCard::hash_id_and_mtime);
self.new.sort_unstable_by(template_then_random);
}
NewCardSortOrder::LowestPosition => self.new.sort_unstable_by(lowest_position),
NewCardSortOrder::HighestPosition => self.new.sort_unstable_by(highest_position),
NewCardSortOrder::Random => {
self.new.iter_mut().for_each(NewCard::hash_id_and_mtime);
self.new.sort_unstable_by(new_hash)
}
NewCardSortOrder::TemplateOnly => {
// stable sort to preserve gather order // stable sort to preserve gather order
self.new self.new
.sort_by(|a, b| a.template_index.cmp(&b.template_index)) .sort_by(|a, b| a.template_index.cmp(&b.template_index))
} }
NewCardSortOrder::NoSort => { NewCardSortOrder::TemplateThenRandom => {
// preserve gather order self.hash_new_cards_by_id();
self.new.sort_unstable_by(cmp_template_then_hash);
}
NewCardSortOrder::RandomNoteThenTemplate => {
self.hash_new_cards_by_note_id();
self.new.sort_unstable_by(cmp_hash_then_template);
}
NewCardSortOrder::RandomCard => {
self.hash_new_cards_by_id();
self.new.sort_unstable_by(cmp_hash)
} }
} }
} }
fn hash_new_cards_by_id(&mut self) {
self.new
.iter_mut()
.for_each(|card| card.hash_id_with_salt(self.context.timing.days_elapsed as i64));
}
fn hash_new_cards_by_note_id(&mut self) {
self.new
.iter_mut()
.for_each(|card| card.hash_note_id_with_salt(self.context.timing.days_elapsed as i64));
}
} }
fn template_then_lowest_position(a: &NewCard, b: &NewCard) -> Ordering { fn cmp_hash(a: &NewCard, b: &NewCard) -> Ordering {
(a.template_index, a.due).cmp(&(b.template_index, b.due)) a.hash.cmp(&b.hash)
} }
fn template_then_highest_position(a: &NewCard, b: &NewCard) -> Ordering { fn cmp_template_then_hash(a: &NewCard, b: &NewCard) -> Ordering {
(a.template_index, b.due).cmp(&(b.template_index, a.due))
}
fn template_then_random(a: &NewCard, b: &NewCard) -> Ordering {
(a.template_index, a.hash).cmp(&(b.template_index, b.hash)) (a.template_index, a.hash).cmp(&(b.template_index, b.hash))
} }
fn lowest_position(a: &NewCard, b: &NewCard) -> Ordering { fn cmp_hash_then_template(a: &NewCard, b: &NewCard) -> Ordering {
a.due.cmp(&b.due) (a.hash, a.template_index).cmp(&(b.hash, b.template_index))
}
fn highest_position(a: &NewCard, b: &NewCard) -> Ordering {
b.due.cmp(&a.due)
}
fn new_hash(a: &NewCard, b: &NewCard) -> Ordering {
a.hash.cmp(&b.hash)
} }
// We sort based on a hash so that if the queue is rebuilt, remaining // We sort based on a hash so that if the queue is rebuilt, remaining
@ -67,10 +62,17 @@ fn new_hash(a: &NewCard, b: &NewCard) -> Ordering {
// may still result in a different card) // may still result in a different card)
impl NewCard { impl NewCard {
fn hash_id_and_mtime(&mut self) { fn hash_id_with_salt(&mut self, salt: i64) {
let mut hasher = FnvHasher::default(); let mut hasher = FnvHasher::default();
hasher.write_i64(self.id.0); hasher.write_i64(self.id.0);
hasher.write_i64(self.mtime.0); hasher.write_i64(salt);
self.hash = hasher.finish();
}
fn hash_note_id_with_salt(&mut self, salt: i64) {
let mut hasher = FnvHasher::default();
hasher.write_i64(self.note_id.0);
hasher.write_i64(salt);
self.hash = hasher.finish(); self.hash = hasher.finish();
} }
} }

View file

@ -0,0 +1,12 @@
SELECT id,
nid,
ord,
cast(mod AS integer),
did,
odid
FROM cards
WHERE did IN (
SELECT id
FROM active_decks
)
AND queue = 0

View file

@ -69,6 +69,18 @@ fn row_to_card(row: &Row) -> result::Result<Card, rusqlite::Error> {
}) })
} }
fn row_to_new_card(row: &Row) -> result::Result<NewCard, rusqlite::Error> {
Ok(NewCard {
id: row.get(0)?,
note_id: row.get(1)?,
template_index: row.get(2)?,
mtime: row.get(3)?,
current_deck_id: row.get(4)?,
original_deck_id: row.get(5)?,
hash: 0,
})
}
impl super::SqliteStorage { impl super::SqliteStorage {
pub fn get_card(&self, cid: CardId) -> Result<Option<Card>> { pub fn get_card(&self, cid: CardId) -> Result<Option<Card>> {
self.db self.db
@ -229,12 +241,31 @@ impl super::SqliteStorage {
Ok(()) Ok(())
} }
/// Call func() for each new card, stopping when it returns false /// Call func() for each new card in the provided deck, stopping when it
/// or no more cards found. /// returns or no more cards found.
pub(crate) fn for_each_new_card_in_deck<F>( pub(crate) fn for_each_new_card_in_deck<F>(&self, deck: DeckId, mut func: F) -> Result<()>
where
F: FnMut(NewCard) -> bool,
{
let mut stmt = self.db.prepare_cached(&format!(
"{} ORDER BY due, ord ASC",
include_str!("new_cards.sql")
))?;
let mut rows = stmt.query(params![deck])?;
while let Some(row) = rows.next()? {
if !func(row_to_new_card(row)?) {
break;
}
}
Ok(())
}
/// Call func() for each new card in the active decks, stopping when it
/// returns false or no more cards found.
pub(crate) fn for_each_new_card_in_active_decks<F>(
&self, &self,
deck: DeckId, order: NewCardSorting,
reverse: bool,
mut func: F, mut func: F,
) -> Result<()> ) -> Result<()>
where where
@ -242,20 +273,12 @@ impl super::SqliteStorage {
{ {
let mut stmt = self.db.prepare_cached(&format!( let mut stmt = self.db.prepare_cached(&format!(
"{} ORDER BY {}", "{} ORDER BY {}",
include_str!("new_cards.sql"), include_str!("active_new_cards.sql"),
if reverse { "due desc" } else { "due asc" } order.write(),
))?; ))?;
let mut rows = stmt.query(params![deck])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
if !func(NewCard { if !func(row_to_new_card(row)?) {
id: row.get(0)?,
note_id: row.get(1)?,
due: row.get(2)?,
template_index: row.get(3)?,
mtime: row.get(4)?,
original_deck_id: row.get(5)?,
hash: 0,
}) {
break; break;
} }
} }
@ -619,6 +642,33 @@ fn review_order_sql(order: ReviewCardOrder) -> String {
v.join(", ") v.join(", ")
} }
#[derive(Debug, Clone, Copy)]
pub(crate) enum NewCardSorting {
/// Ascending position, consecutive siblings,
/// provided they have the same position.
LowestPosition,
/// Descending position, consecutive siblings,
/// provided they have the same position.
HighestPosition,
/// Random, but with consecutive siblings.
/// For some given salt the order is stable.
RandomNotes(u32),
/// Fully random.
/// For some given salt the order is stable.
RandomCards(u32),
}
impl NewCardSorting {
fn write(self) -> String {
match self {
NewCardSorting::LowestPosition => "due ASC, ord ASC".to_string(),
NewCardSorting::HighestPosition => "due DESC, ord ASC".to_string(),
NewCardSorting::RandomNotes(salt) => format!("fnvhash(nid, {salt}), ord ASC"),
NewCardSorting::RandomCards(salt) => format!("fnvhash(id, {salt})"),
}
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::path::Path; use std::path::Path;

View file

@ -1,8 +1,8 @@
SELECT id, SELECT id,
nid, nid,
due,
ord, ord,
cast(mod AS integer), cast(mod AS integer),
did,
odid odid
FROM cards FROM cards
WHERE did = ? WHERE did = ?

View file

@ -198,13 +198,14 @@ impl SqliteStorage {
.collect() .collect()
} }
/// Returns the descendants of the given [Deck] in preorder.
pub(crate) fn child_decks(&self, parent: &Deck) -> Result<Vec<Deck>> { pub(crate) fn child_decks(&self, parent: &Deck) -> Result<Vec<Deck>> {
let prefix_start = format!("{}\x1f", parent.name); let prefix_start = format!("{}\x1f", parent.name);
let prefix_end = format!("{}\x20", parent.name); let prefix_end = format!("{}\x20", parent.name);
self.db self.db
.prepare_cached(concat!( .prepare_cached(concat!(
include_str!("get_deck.sql"), include_str!("get_deck.sql"),
" where name >= ? and name < ?" " where name >= ? and name < ? order by name"
))? ))?
.query_and_then([prefix_start, prefix_end], row_to_deck)? .query_and_then([prefix_start, prefix_end], row_to_deck)?
.collect() .collect()

View file

@ -6,6 +6,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import DynamicallySlottable from "../components/DynamicallySlottable.svelte"; import DynamicallySlottable from "../components/DynamicallySlottable.svelte";
import Item from "../components/Item.svelte"; import Item from "../components/Item.svelte";
import * as tr from "../lib/ftl"; import * as tr from "../lib/ftl";
import { DeckConfig } from "../lib/proto";
import EnumSelectorRow from "./EnumSelectorRow.svelte"; import EnumSelectorRow from "./EnumSelectorRow.svelte";
import type { DeckOptionsState } from "./lib"; import type { DeckOptionsState } from "./lib";
import { reviewMixChoices } from "./strings"; import { reviewMixChoices } from "./strings";
@ -23,16 +24,15 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
tr.deckConfigNewGatherPriorityDeck(), tr.deckConfigNewGatherPriorityDeck(),
tr.deckConfigNewGatherPriorityPositionLowestFirst(), tr.deckConfigNewGatherPriorityPositionLowestFirst(),
tr.deckConfigNewGatherPriorityPositionHighestFirst(), tr.deckConfigNewGatherPriorityPositionHighestFirst(),
tr.deckConfigNewGatherPriorityRandomNotes(),
tr.deckConfigNewGatherPriorityRandomCards(),
]; ];
const newSortOrderChoices = [ const newSortOrderChoices = [
tr.deckConfigSortOrderCardTemplateThenLowestPosition(),
tr.deckConfigSortOrderCardTemplateThenHighestPosition(),
tr.deckConfigSortOrderCardTemplateThenRandom(),
tr.deckConfigSortOrderLowestPosition(),
tr.deckConfigSortOrderHighestPosition(),
tr.deckConfigSortOrderRandom(),
tr.deckConfigSortOrderTemplateThenGather(), tr.deckConfigSortOrderTemplateThenGather(),
tr.deckConfigSortOrderGather(), tr.deckConfigSortOrderGather(),
tr.deckConfigSortOrderCardTemplateThenRandom(),
tr.deckConfigSortOrderRandomNoteThenTemplate(),
tr.deckConfigSortOrderRandom(),
]; ];
const reviewOrderChoices = [ const reviewOrderChoices = [
tr.deckConfigSortOrderDueDateThenRandom(), tr.deckConfigSortOrderDueDateThenRandom(),
@ -43,6 +43,46 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
tr.deckConfigSortOrderAscendingEase(), tr.deckConfigSortOrderAscendingEase(),
tr.deckConfigSortOrderDescendingEase(), tr.deckConfigSortOrderDescendingEase(),
]; ];
const GatherOrder = DeckConfig.DeckConfig.Config.NewCardGatherPriority;
const SortOrder = DeckConfig.DeckConfig.Config.NewCardSortOrder;
let disabledNewSortOrders: number[] = [];
$: {
switch ($config.newCardGatherPriority) {
case GatherOrder.NEW_CARD_GATHER_PRIORITY_RANDOM_NOTES:
disabledNewSortOrders = [
// same as NEW_CARD_SORT_ORDER_TEMPLATE
SortOrder.NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM,
// same as NEW_CARD_SORT_ORDER_NO_SORT
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_NOTE_THEN_TEMPLATE,
];
break;
case GatherOrder.NEW_CARD_GATHER_PRIORITY_RANDOM_CARDS:
disabledNewSortOrders = [
// same as NEW_CARD_SORT_ORDER_TEMPLATE
SortOrder.NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM,
// not useful if siblings are not gathered together
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_NOTE_THEN_TEMPLATE,
// same as NEW_CARD_SORT_ORDER_NO_SORT
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_CARD,
];
break;
default:
disabledNewSortOrders = [];
break;
}
// disabled options aren't deselected automatically
if (disabledNewSortOrders.includes($config.newCardSortOrder)) {
// default option should never be disabled
$config.newCardSortOrder = 0;
}
// check for invalid index from previous version
if (!($config.newCardSortOrder in SortOrder)) {
$config.newCardSortOrder = 0;
}
}
</script> </script>
<TitledContainer title={tr.deckConfigOrderingTitle()}> <TitledContainer title={tr.deckConfigOrderingTitle()}>
@ -52,7 +92,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
bind:value={$config.newCardGatherPriority} bind:value={$config.newCardGatherPriority}
defaultValue={defaults.newCardGatherPriority} defaultValue={defaults.newCardGatherPriority}
choices={newGatherPriorityChoices} choices={newGatherPriorityChoices}
markdownTooltip={tr.deckConfigNewGatherPriorityTooltip() + currentDeck} markdownTooltip={tr.deckConfigNewGatherPriorityTooltip_2() +
currentDeck}
> >
{tr.deckConfigNewGatherPriority()} {tr.deckConfigNewGatherPriority()}
</EnumSelectorRow> </EnumSelectorRow>
@ -63,7 +104,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
bind:value={$config.newCardSortOrder} bind:value={$config.newCardSortOrder}
defaultValue={defaults.newCardSortOrder} defaultValue={defaults.newCardSortOrder}
choices={newSortOrderChoices} choices={newSortOrderChoices}
markdownTooltip={tr.deckConfigNewCardSortOrderTooltip() + currentDeck} disabled={disabledNewSortOrders}
markdownTooltip={tr.deckConfigNewCardSortOrderTooltip_2() + currentDeck}
> >
{tr.deckConfigNewCardSortOrder()} {tr.deckConfigNewCardSortOrder()}
</EnumSelectorRow> </EnumSelectorRow>

View file

@ -7,6 +7,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let choices: string[]; export let choices: string[];
export let value: number = 0; export let value: number = 0;
export let disabled: number[] = [];
</script> </script>
<select <select
@ -16,7 +17,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
class="enum-selector form-select" class="enum-selector form-select"
> >
{#each choices as choice, idx} {#each choices as choice, idx}
<option value={idx}>{choice}</option> <option value={idx} disabled={disabled.includes(idx)}>{choice}</option>
{/each} {/each}
</select> </select>

View file

@ -14,6 +14,7 @@
export let defaultValue: number; export let defaultValue: number;
export let breakpoint: Breakpoint = "md"; export let breakpoint: Breakpoint = "md";
export let choices: string[]; export let choices: string[];
export let disabled: number[] = [];
export let markdownTooltip: string; export let markdownTooltip: string;
</script> </script>
@ -22,7 +23,7 @@
<TooltipLabel {markdownTooltip}><slot /></TooltipLabel> <TooltipLabel {markdownTooltip}><slot /></TooltipLabel>
</Col> </Col>
<Col --col-size={5} {breakpoint}> <Col --col-size={5} {breakpoint}>
<EnumSelector bind:value {choices} /> <EnumSelector bind:value {choices} {disabled} />
<RevertButton bind:value {defaultValue} /> <RevertButton bind:value {defaultValue} />
</Col> </Col>
</Row> </Row>

View file

@ -6,6 +6,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import DynamicallySlottable from "../components/DynamicallySlottable.svelte"; import DynamicallySlottable from "../components/DynamicallySlottable.svelte";
import Item from "../components/Item.svelte"; import Item from "../components/Item.svelte";
import * as tr from "../lib/ftl"; import * as tr from "../lib/ftl";
import { DeckConfig } from "../lib/proto";
import EnumSelectorRow from "./EnumSelectorRow.svelte"; import EnumSelectorRow from "./EnumSelectorRow.svelte";
import type { DeckOptionsState } from "./lib"; import type { DeckOptionsState } from "./lib";
import SpinBoxRow from "./SpinBoxRow.svelte"; import SpinBoxRow from "./SpinBoxRow.svelte";
@ -39,6 +40,13 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
$config.graduatingIntervalGood > $config.graduatingIntervalEasy $config.graduatingIntervalGood > $config.graduatingIntervalEasy
? tr.deckConfigGoodAboveEasy() ? tr.deckConfigGoodAboveEasy()
: ""; : "";
$: insertionOrderRandom =
state.v3Scheduler &&
$config.newCardInsertOrder ==
DeckConfig.DeckConfig.Config.NewCardInsertOrder.NEW_CARD_INSERT_ORDER_RANDOM
? tr.deckConfigNewInsertionOrderRandomWithV3()
: "";
</script> </script>
<TitledContainer title={tr.schedulingNewCards()}> <TitledContainer title={tr.schedulingNewCards()}>
@ -92,5 +100,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{tr.deckConfigNewInsertionOrder()} {tr.deckConfigNewInsertionOrder()}
</EnumSelectorRow> </EnumSelectorRow>
</Item> </Item>
<Item>
<Warning warning={insertionOrderRandom} />
</Item>
</DynamicallySlottable> </DynamicallySlottable>
</TitledContainer> </TitledContainer>